Compare commits
9 Commits
3afcfd5856
...
c53f511fd3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c53f511fd3 | ||
|
|
6b28a7b25c | ||
|
|
6eb00c8f6c | ||
|
|
2b16387189 | ||
|
|
53c0000207 | ||
|
|
0724a9c2e1 | ||
|
|
a4f8026d5d | ||
|
|
4e9009a77a | ||
|
|
00d08a36cc |
@ -1,9 +1,10 @@
|
|||||||
# Standard library imports
|
# Standard library imports
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass
|
||||||
from enum import auto, Enum
|
from enum import auto, Enum
|
||||||
import functools
|
import functools
|
||||||
|
import threading
|
||||||
from typing import NamedTuple
|
from typing import NamedTuple
|
||||||
|
|
||||||
# Third party imports
|
# Third party imports
|
||||||
@ -34,12 +35,18 @@ def singleton(cls):
|
|||||||
"""
|
"""
|
||||||
Make a class a Singleton class (see
|
Make a class a Singleton class (see
|
||||||
https://realpython.com/primer-on-python-decorators/#creating-singletons)
|
https://realpython.com/primer-on-python-decorators/#creating-singletons)
|
||||||
|
|
||||||
|
Added locking.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
lock = threading.Lock()
|
||||||
|
|
||||||
@functools.wraps(cls)
|
@functools.wraps(cls)
|
||||||
def wrapper_singleton(*args, **kwargs):
|
def wrapper_singleton(*args, **kwargs):
|
||||||
if not wrapper_singleton.instance:
|
if wrapper_singleton.instance is None:
|
||||||
wrapper_singleton.instance = cls(*args, **kwargs)
|
with lock:
|
||||||
|
if wrapper_singleton.instance is None: # Check still None
|
||||||
|
wrapper_singleton.instance = cls(*args, **kwargs)
|
||||||
return wrapper_singleton.instance
|
return wrapper_singleton.instance
|
||||||
|
|
||||||
wrapper_singleton.instance = None
|
wrapper_singleton.instance = None
|
||||||
|
|||||||
@ -2,7 +2,6 @@
|
|||||||
import datetime as dt
|
import datetime as dt
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
# PyQt imports
|
# PyQt imports
|
||||||
|
|
||||||
@ -35,8 +34,6 @@ class Config(object):
|
|||||||
COLOUR_UNREADABLE = "#dc3545"
|
COLOUR_UNREADABLE = "#dc3545"
|
||||||
COLOUR_WARNING_TIMER = "#ffc107"
|
COLOUR_WARNING_TIMER = "#ffc107"
|
||||||
DBFS_SILENCE = -50
|
DBFS_SILENCE = -50
|
||||||
DEBUG_FUNCTIONS: list[Optional[str]] = []
|
|
||||||
DEBUG_MODULES: list[Optional[str]] = []
|
|
||||||
DEFAULT_COLUMN_WIDTH = 200
|
DEFAULT_COLUMN_WIDTH = 200
|
||||||
DISPLAY_SQL = False
|
DISPLAY_SQL = False
|
||||||
DO_NOT_IMPORT = "Do not import"
|
DO_NOT_IMPORT = "Do not import"
|
||||||
@ -83,6 +80,7 @@ class Config(object):
|
|||||||
MAIL_USERNAME = os.environ.get("MAIL_USERNAME")
|
MAIL_USERNAME = os.environ.get("MAIL_USERNAME")
|
||||||
MAIL_USE_TLS = os.environ.get("MAIL_USE_TLS") is not None
|
MAIL_USE_TLS = os.environ.get("MAIL_USE_TLS") is not None
|
||||||
MAX_IMPORT_MATCHES = 5
|
MAX_IMPORT_MATCHES = 5
|
||||||
|
MAX_IMPORT_THREADS = 3
|
||||||
MAX_INFO_TABS = 5
|
MAX_INFO_TABS = 5
|
||||||
MAX_MISSING_FILES_TO_REPORT = 10
|
MAX_MISSING_FILES_TO_REPORT = 10
|
||||||
MILLISECOND_SIGFIGS = 0
|
MILLISECOND_SIGFIGS = 0
|
||||||
|
|||||||
@ -147,12 +147,15 @@ class TracksTable(Model):
|
|||||||
title: Mapped[str] = mapped_column(String(256), index=True)
|
title: Mapped[str] = mapped_column(String(256), index=True)
|
||||||
|
|
||||||
playlistrows: Mapped[list[PlaylistRowsTable]] = relationship(
|
playlistrows: Mapped[list[PlaylistRowsTable]] = relationship(
|
||||||
"PlaylistRowsTable", back_populates="track"
|
"PlaylistRowsTable",
|
||||||
|
back_populates="track",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
)
|
)
|
||||||
playlists = association_proxy("playlistrows", "playlist")
|
playlists = association_proxy("playlistrows", "playlist")
|
||||||
playdates: Mapped[list[PlaydatesTable]] = relationship(
|
playdates: Mapped[list[PlaydatesTable]] = relationship(
|
||||||
"PlaydatesTable",
|
"PlaydatesTable",
|
||||||
back_populates="track",
|
back_populates="track",
|
||||||
|
cascade="all, delete-orphan",
|
||||||
lazy="joined",
|
lazy="joined",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from __future__ import annotations
|
|||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from fuzzywuzzy import fuzz # type: ignore
|
from fuzzywuzzy import fuzz # type: ignore
|
||||||
import os.path
|
import os.path
|
||||||
|
import threading
|
||||||
from typing import Optional, Sequence
|
from typing import Optional, Sequence
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
@ -10,7 +11,6 @@ import shutil
|
|||||||
# PyQt imports
|
# PyQt imports
|
||||||
from PyQt6.QtCore import (
|
from PyQt6.QtCore import (
|
||||||
pyqtSignal,
|
pyqtSignal,
|
||||||
QObject,
|
|
||||||
QThread,
|
QThread,
|
||||||
)
|
)
|
||||||
from PyQt6.QtWidgets import (
|
from PyQt6.QtWidgets import (
|
||||||
@ -30,6 +30,7 @@ from PyQt6.QtWidgets import (
|
|||||||
from classes import (
|
from classes import (
|
||||||
ApplicationError,
|
ApplicationError,
|
||||||
MusicMusterSignals,
|
MusicMusterSignals,
|
||||||
|
singleton,
|
||||||
Tags,
|
Tags,
|
||||||
)
|
)
|
||||||
from config import Config
|
from config import Config
|
||||||
@ -53,7 +54,6 @@ class ThreadData:
|
|||||||
|
|
||||||
base_model: PlaylistModel
|
base_model: PlaylistModel
|
||||||
row_number: int
|
row_number: int
|
||||||
worker: Optional[DoTrackImport] = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@ -62,9 +62,10 @@ class TrackFileData:
|
|||||||
Data structure to hold details of file to be imported
|
Data structure to hold details of file to be imported
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
source_path: str
|
||||||
tags: Tags = Tags()
|
tags: Tags = Tags()
|
||||||
destination_path: str = ""
|
destination_path: str = ""
|
||||||
import_this_file: bool = True
|
import_this_file: bool = False
|
||||||
error: str = ""
|
error: str = ""
|
||||||
file_path_to_remove: Optional[str] = None
|
file_path_to_remove: Optional[str] = None
|
||||||
track_id: int = 0
|
track_id: int = 0
|
||||||
@ -85,6 +86,7 @@ class TrackMatchData:
|
|||||||
track_id: int
|
track_id: int
|
||||||
|
|
||||||
|
|
||||||
|
@singleton
|
||||||
class FileImporter:
|
class FileImporter:
|
||||||
"""
|
"""
|
||||||
Class to manage the import of new tracks. Sanity checks are carried
|
Class to manage the import of new tracks. Sanity checks are carried
|
||||||
@ -97,11 +99,16 @@ class FileImporter:
|
|||||||
The actual import is handled by the DoTrackImport class.
|
The actual import is handled by the DoTrackImport class.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# Place to keep a reference to importer workers. This is an instance
|
||||||
|
# variable to allow tests access. As this is a singleton, a class
|
||||||
|
# variable or an instance variable are effectively the same thing.
|
||||||
|
workers: dict[str, DoTrackImport] = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, base_model: PlaylistModel, row_number: Optional[int] = None
|
self, base_model: PlaylistModel, row_number: Optional[int] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Set up class
|
Initialise the FileImporter singleton instance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Create ModelData
|
# Create ModelData
|
||||||
@ -109,23 +116,13 @@ class FileImporter:
|
|||||||
row_number = base_model.rowCount()
|
row_number = base_model.rowCount()
|
||||||
self.model_data = ThreadData(base_model=base_model, row_number=row_number)
|
self.model_data = ThreadData(base_model=base_model, row_number=row_number)
|
||||||
|
|
||||||
# Populate self.import_files_data
|
|
||||||
for infile in [
|
|
||||||
os.path.join(Config.REPLACE_FILES_DEFAULT_SOURCE, f)
|
|
||||||
for f in os.listdir(Config.REPLACE_FILES_DEFAULT_SOURCE)
|
|
||||||
if f.endswith((".mp3", ".flac"))
|
|
||||||
]:
|
|
||||||
self.import_files_data[infile] = TrackFileData()
|
|
||||||
|
|
||||||
# Place to keep a reference to importer threads
|
|
||||||
self.threads: list[QThread] = []
|
|
||||||
|
|
||||||
# Data structure to track files to import
|
# Data structure to track files to import
|
||||||
self.import_files_data: dict[str, TrackFileData] = {}
|
self.import_files_data: list[TrackFileData] = []
|
||||||
|
|
||||||
# Dictionary of exsting tracks indexed by track.id
|
# Dictionary of exsting tracks indexed by track.id
|
||||||
self.existing_tracks = self._get_existing_tracks()
|
self.existing_tracks = self._get_existing_tracks()
|
||||||
|
|
||||||
|
# Get signals
|
||||||
self.signals = MusicMusterSignals()
|
self.signals = MusicMusterSignals()
|
||||||
|
|
||||||
def _get_existing_tracks(self) -> Sequence[Tracks]:
|
def _get_existing_tracks(self) -> Sequence[Tracks]:
|
||||||
@ -136,19 +133,15 @@ class FileImporter:
|
|||||||
with db.Session() as session:
|
with db.Session() as session:
|
||||||
return Tracks.get_all(session)
|
return Tracks.get_all(session)
|
||||||
|
|
||||||
def do_import(self) -> None:
|
def start(self) -> None:
|
||||||
"""
|
"""
|
||||||
Populate self.import_files_data, which is a TrackFileData object for each entry.
|
Build a TrackFileData object for each new file to import, add it
|
||||||
|
to self.import_files_data, and trigger importing.
|
||||||
- Validate files to be imported
|
|
||||||
- Find matches and similar files
|
|
||||||
- Get user choices for each import file
|
|
||||||
- Validate self.import_files_data integrity
|
|
||||||
- Tell the user which files won't be imported and why
|
|
||||||
- Import the files, one by one.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not self.import_files_data:
|
new_files: list[str] = []
|
||||||
|
|
||||||
|
if not os.listdir(Config.REPLACE_FILES_DEFAULT_SOURCE):
|
||||||
show_OK(
|
show_OK(
|
||||||
"File import",
|
"File import",
|
||||||
f"No files in {Config.REPLACE_FILES_DEFAULT_SOURCE} to import",
|
f"No files in {Config.REPLACE_FILES_DEFAULT_SOURCE} to import",
|
||||||
@ -156,78 +149,107 @@ class FileImporter:
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
for path in self.import_files_data.keys():
|
for infile in [
|
||||||
self.validate_file(path)
|
os.path.join(Config.REPLACE_FILES_DEFAULT_SOURCE, f)
|
||||||
if self.import_files_data[path].import_this_file:
|
for f in os.listdir(Config.REPLACE_FILES_DEFAULT_SOURCE)
|
||||||
self.find_similar(path)
|
if f.endswith((".mp3", ".flac"))
|
||||||
if len(self.import_files_data[path].track_match_data) > 1:
|
]:
|
||||||
self.sort_track_match_data(path)
|
if infile in [a.source_path for a in self.import_files_data]:
|
||||||
selection = self.get_user_choices(path)
|
log.debug(f"file_importer.start skipping {infile=}, already queued")
|
||||||
self.process_selection(path, selection)
|
else:
|
||||||
if self.import_files_data[path].import_this_file:
|
new_files.append(infile)
|
||||||
self.validate_file_data(path)
|
self.import_files_data.append(self.populate_trackfiledata(infile))
|
||||||
|
|
||||||
# Tell user which files won't be imported and why
|
# Tell user which files won't be imported and why
|
||||||
self.inform_user()
|
self.inform_user(
|
||||||
# Start the import of all other files
|
[
|
||||||
self.import_next_file()
|
a
|
||||||
|
for a in self.import_files_data
|
||||||
|
if a.source_path in new_files and a.import_this_file is False
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def validate_file(self, path: str) -> None:
|
# Remove do-not-import entries from queue
|
||||||
|
self.import_files_data[:] = [
|
||||||
|
a for a in self.import_files_data if a.import_this_file is not False
|
||||||
|
]
|
||||||
|
|
||||||
|
# Start the import if necessary
|
||||||
|
log.debug(f"Import files prepared: {[a.source_path for a in self.import_files_data]}")
|
||||||
|
self._import_next_file()
|
||||||
|
|
||||||
|
def populate_trackfiledata(self, path: str) -> TrackFileData:
|
||||||
"""
|
"""
|
||||||
- check all files are readable
|
Populate TrackFileData object for path:
|
||||||
- check all files have tags
|
|
||||||
- Mark failures not to be imported and populate error text.
|
|
||||||
|
|
||||||
On return, the following TrackFileData fields should be set:
|
- Validate file to be imported
|
||||||
|
- Find matches and similar files
|
||||||
tags: Yes
|
- Get user choices for each import file
|
||||||
destination_path: No
|
- Validate self.import_files_data integrity
|
||||||
import_this_file: Yes (set by default)
|
- Tell the user which files won't be imported and why
|
||||||
error: No (only set if an error is detected)
|
- Import the files, one by one.
|
||||||
file_path_to_remove: No
|
|
||||||
track_id: No
|
|
||||||
track_match_data: No
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for path in self.import_files_data.keys():
|
tfd = TrackFileData(source_path=path)
|
||||||
if file_is_unreadable(path):
|
|
||||||
self.import_files_data[path].import_this_file = False
|
|
||||||
self.import_files_data[path].error = f"{path} is unreadable"
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
if self.check_file_readable(tfd):
|
||||||
self.import_files_data[path].tags = get_tags(path)
|
if self.check_file_tags(tfd):
|
||||||
except ApplicationError as e:
|
self.find_similar(tfd)
|
||||||
self.import_files_data[path].import_this_file = False
|
if len(tfd.track_match_data) > 1:
|
||||||
self.import_files_data[path].error = f"Tag errors ({str(e)})"
|
self.sort_track_match_data(tfd)
|
||||||
continue
|
selection = self.get_user_choices(tfd)
|
||||||
|
if self.process_selection(tfd, selection):
|
||||||
|
if self.validate_file_data(tfd):
|
||||||
|
tfd.import_this_file = True
|
||||||
|
|
||||||
def find_similar(self, path: str) -> None:
|
return tfd
|
||||||
|
|
||||||
|
def check_file_readable(self, tfd: TrackFileData) -> bool:
|
||||||
|
"""
|
||||||
|
Check file is readable.
|
||||||
|
Return True if it is.
|
||||||
|
Populate error and return False if not.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if file_is_unreadable(tfd.source_path):
|
||||||
|
tfd.import_this_file = False
|
||||||
|
tfd.error = f"{tfd.source_path} is unreadable"
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def check_file_tags(self, tfd: TrackFileData) -> bool:
|
||||||
|
"""
|
||||||
|
Add tags to tfd
|
||||||
|
Return True if successful.
|
||||||
|
Populate error and return False if not.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
tfd.tags = get_tags(tfd.source_path)
|
||||||
|
except ApplicationError as e:
|
||||||
|
tfd.import_this_file = False
|
||||||
|
tfd.error = f"of tag errors ({str(e)})"
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def find_similar(self, tfd: TrackFileData) -> None:
|
||||||
"""
|
"""
|
||||||
- Search title in existing tracks
|
- Search title in existing tracks
|
||||||
- if score >= Config.FUZZYMATCH_MINIMUM_LIST:
|
- if score >= Config.FUZZYMATCH_MINIMUM_LIST:
|
||||||
- get artist score
|
- get artist score
|
||||||
- add TrackMatchData to self.import_files_data[path].track_match_data
|
- add TrackMatchData to self.import_files_data[path].track_match_data
|
||||||
|
|
||||||
On return, the following TrackFileData fields should be set:
|
|
||||||
|
|
||||||
tags: Yes
|
|
||||||
destination_path: No
|
|
||||||
import_this_file: Yes (set by default)
|
|
||||||
error: No (only set if an error is detected)
|
|
||||||
file_path_to_remove: No
|
|
||||||
track_id: No
|
|
||||||
track_match_data: YES, IN THIS FUNCTION
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
title = self.import_files_data[path].tags.title
|
title = tfd.tags.title
|
||||||
artist = self.import_files_data[path].tags.artist
|
artist = tfd.tags.artist
|
||||||
|
|
||||||
for existing_track in self.existing_tracks:
|
for existing_track in self.existing_tracks:
|
||||||
title_score = self._get_match_score(title, existing_track.title)
|
title_score = self._get_match_score(title, existing_track.title)
|
||||||
if title_score >= Config.FUZZYMATCH_MINIMUM_LIST:
|
if title_score >= Config.FUZZYMATCH_MINIMUM_LIST:
|
||||||
artist_score = self._get_match_score(artist, existing_track.artist)
|
artist_score = self._get_match_score(artist, existing_track.artist)
|
||||||
self.import_files_data[path].track_match_data.append(
|
tfd.track_match_data.append(
|
||||||
TrackMatchData(
|
TrackMatchData(
|
||||||
artist=existing_track.artist,
|
artist=existing_track.artist,
|
||||||
artist_match=artist_score,
|
artist_match=artist_score,
|
||||||
@ -237,14 +259,12 @@ class FileImporter:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def sort_track_match_data(self, path: str) -> None:
|
def sort_track_match_data(self, tfd: TrackFileData) -> None:
|
||||||
"""
|
"""
|
||||||
Sort matched tracks in artist-similarity order
|
Sort matched tracks in artist-similarity order
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.import_files_data[path].track_match_data.sort(
|
tfd.track_match_data.sort(key=lambda x: x.artist_match, reverse=True)
|
||||||
key=lambda x: x.artist_match, reverse=True
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_match_score(self, str1: str, str2: str) -> float:
|
def _get_match_score(self, str1: str, str2: str) -> float:
|
||||||
"""
|
"""
|
||||||
@ -266,7 +286,7 @@ class FileImporter:
|
|||||||
|
|
||||||
return combined_score
|
return combined_score
|
||||||
|
|
||||||
def get_user_choices(self, path: str) -> int:
|
def get_user_choices(self, tfd: TrackFileData) -> int:
|
||||||
"""
|
"""
|
||||||
Find out whether user wants to import this as a new track,
|
Find out whether user wants to import this as a new track,
|
||||||
overwrite an existing track or not import it at all.
|
overwrite an existing track or not import it at all.
|
||||||
@ -282,15 +302,12 @@ class FileImporter:
|
|||||||
choices.append((Config.IMPORT_AS_NEW, 0, ""))
|
choices.append((Config.IMPORT_AS_NEW, 0, ""))
|
||||||
|
|
||||||
# New track details
|
# New track details
|
||||||
new_track_description = (
|
new_track_description = f"{tfd.tags.title} ({tfd.tags.artist})"
|
||||||
f"{self.import_files_data[path].tags.title} "
|
|
||||||
f"({self.import_files_data[path].tags.artist})"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Select 'import as new' as default unless the top match is good
|
# Select 'import as new' as default unless the top match is good
|
||||||
# enough
|
# enough
|
||||||
default = 1
|
default = 1
|
||||||
track_match_data = self.import_files_data[path].track_match_data
|
track_match_data = tfd.track_match_data
|
||||||
if track_match_data:
|
if track_match_data:
|
||||||
if (
|
if (
|
||||||
track_match_data[0].artist_match
|
track_match_data[0].artist_match
|
||||||
@ -323,48 +340,39 @@ class FileImporter:
|
|||||||
else:
|
else:
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
def process_selection(self, path: str, selection: int) -> None:
|
def process_selection(self, tfd: TrackFileData, selection: int) -> bool:
|
||||||
"""
|
"""
|
||||||
Process selection from PickMatch
|
Process selection from PickMatch
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if selection < 0:
|
if selection < 0:
|
||||||
# User cancelled
|
# User cancelled
|
||||||
self.import_files_data[path].import_this_file = False
|
tfd.import_this_file = False
|
||||||
self.import_files_data[path].error = "you asked not to import this file"
|
tfd.error = "you asked not to import this file"
|
||||||
|
return False
|
||||||
|
|
||||||
elif selection > 0:
|
elif selection > 0:
|
||||||
# Import and replace track
|
# Import and replace track
|
||||||
self.replace_file(path=path, track_id=selection)
|
self.replace_file(tfd, track_id=selection)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Import as new
|
# Import as new
|
||||||
self.import_as_new(path=path)
|
self.import_as_new(tfd)
|
||||||
|
|
||||||
def replace_file(self, path: str, track_id: int) -> None:
|
return True
|
||||||
|
|
||||||
|
def replace_file(self, tfd: TrackFileData, track_id: int) -> None:
|
||||||
"""
|
"""
|
||||||
Set up to replace an existing file.
|
Set up to replace an existing file.
|
||||||
|
|
||||||
On return, the following TrackFileData fields should be set:
|
|
||||||
|
|
||||||
tags: Yes
|
|
||||||
destination_path: YES, IN THIS FUNCTION
|
|
||||||
import_this_file: Yes (set by default)
|
|
||||||
error: No (only set if an error is detected)
|
|
||||||
file_path_to_remove: YES, IN THIS FUNCTION
|
|
||||||
track_id: YES, IN THIS FUNCTION
|
|
||||||
track_match_data: Yes
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
ifd = self.import_files_data[path]
|
|
||||||
|
|
||||||
if track_id < 1:
|
if track_id < 1:
|
||||||
raise ApplicationError(f"No track ID: replace_file({path=}, {track_id=})")
|
raise ApplicationError(f"No track ID: replace_file({tfd=}, {track_id=})")
|
||||||
|
|
||||||
ifd.track_id = track_id
|
tfd.track_id = track_id
|
||||||
|
|
||||||
existing_track_path = self._get_existing_track(track_id).path
|
existing_track_path = self._get_existing_track(track_id).path
|
||||||
ifd.file_path_to_remove = existing_track_path
|
tfd.file_path_to_remove = existing_track_path
|
||||||
|
|
||||||
# If the existing file in the Config.IMPORT_DESTINATION
|
# If the existing file in the Config.IMPORT_DESTINATION
|
||||||
# directory, replace it with the imported file name; otherwise,
|
# directory, replace it with the imported file name; otherwise,
|
||||||
@ -372,11 +380,11 @@ class FileImporter:
|
|||||||
# names from CDs, etc.
|
# names from CDs, etc.
|
||||||
|
|
||||||
if os.path.dirname(existing_track_path) == Config.IMPORT_DESTINATION:
|
if os.path.dirname(existing_track_path) == Config.IMPORT_DESTINATION:
|
||||||
ifd.destination_path = os.path.join(
|
tfd.destination_path = os.path.join(
|
||||||
Config.IMPORT_DESTINATION, os.path.basename(path)
|
Config.IMPORT_DESTINATION, os.path.basename(tfd.source_path)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
ifd.destination_path = existing_track_path
|
tfd.destination_path = existing_track_path
|
||||||
|
|
||||||
def _get_existing_track(self, track_id: int) -> Tracks:
|
def _get_existing_track(self, track_id: int) -> Tracks:
|
||||||
"""
|
"""
|
||||||
@ -391,58 +399,45 @@ class FileImporter:
|
|||||||
|
|
||||||
return existing_track_records[0]
|
return existing_track_records[0]
|
||||||
|
|
||||||
def import_as_new(self, path: str) -> None:
|
def import_as_new(self, tfd: TrackFileData) -> None:
|
||||||
"""
|
"""
|
||||||
Set up to import as a new file.
|
Set up to import as a new file.
|
||||||
|
|
||||||
On return, the following TrackFileData fields should be set:
|
|
||||||
|
|
||||||
tags: Yes
|
|
||||||
destination_path: YES, IN THIS FUNCTION
|
|
||||||
import_this_file: Yes (set by default)
|
|
||||||
error: No (only set if an error is detected)
|
|
||||||
file_path_to_remove: No (not needed now)
|
|
||||||
track_id: Yes
|
|
||||||
track_match_data: Yes
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
ifd = self.import_files_data[path]
|
tfd.destination_path = os.path.join(
|
||||||
ifd.destination_path = os.path.join(
|
Config.IMPORT_DESTINATION, os.path.basename(tfd.source_path)
|
||||||
Config.IMPORT_DESTINATION, os.path.basename(path)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate_file_data(self, path: str) -> None:
|
def validate_file_data(self, tfd: TrackFileData) -> bool:
|
||||||
"""
|
"""
|
||||||
Check the data structures for integrity
|
Check the data structures for integrity
|
||||||
|
Return True if all OK
|
||||||
|
Populate error and return False if not.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
ifd = self.import_files_data[path]
|
|
||||||
|
|
||||||
# Check import_this_file
|
|
||||||
if not ifd.import_this_file:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check tags
|
# Check tags
|
||||||
if not (ifd.tags.artist and ifd.tags.title):
|
if not (tfd.tags.artist and tfd.tags.title):
|
||||||
raise ApplicationError(f"validate_file_data: {ifd.tags=}, {path=}")
|
raise ApplicationError(
|
||||||
|
f"validate_file_data: {tfd.tags=}, {tfd.source_path=}"
|
||||||
|
)
|
||||||
|
|
||||||
# Check file_path_to_remove
|
# Check file_path_to_remove
|
||||||
if ifd.file_path_to_remove and not os.path.exists(ifd.file_path_to_remove):
|
if tfd.file_path_to_remove and not os.path.exists(tfd.file_path_to_remove):
|
||||||
# File to remove is missing, but this isn't a major error. We
|
# File to remove is missing, but this isn't a major error. We
|
||||||
# may be importing to replace a deleted file.
|
# may be importing to replace a deleted file.
|
||||||
ifd.file_path_to_remove = ""
|
tfd.file_path_to_remove = ""
|
||||||
|
|
||||||
# Check destination_path
|
# Check destination_path
|
||||||
if not ifd.destination_path:
|
if not tfd.destination_path:
|
||||||
raise ApplicationError(
|
raise ApplicationError(
|
||||||
f"validate_file_data: no destination path set ({path=})"
|
f"validate_file_data: no destination path set ({tfd.source_path=})"
|
||||||
)
|
)
|
||||||
|
|
||||||
# If destination path is the same as file_path_to_remove, that's
|
# If destination path is the same as file_path_to_remove, that's
|
||||||
# OK, otherwise if this is a new import then check check
|
# OK, otherwise if this is a new import then check check
|
||||||
# destination path doesn't already exists
|
# destination path doesn't already exists
|
||||||
if ifd.track_id == 0 and ifd.destination_path != ifd.file_path_to_remove:
|
if tfd.track_id == 0 and tfd.destination_path != tfd.file_path_to_remove:
|
||||||
while os.path.exists(ifd.destination_path):
|
while os.path.exists(tfd.destination_path):
|
||||||
msg = (
|
msg = (
|
||||||
"New import requested but default destination path ({ifd.destination_path}) "
|
"New import requested but default destination path ({ifd.destination_path}) "
|
||||||
"already exists. Click OK and choose where to save this track"
|
"already exists. Click OK and choose where to save this track"
|
||||||
@ -455,92 +450,104 @@ class FileImporter:
|
|||||||
directory=Config.IMPORT_DESTINATION,
|
directory=Config.IMPORT_DESTINATION,
|
||||||
)
|
)
|
||||||
if pathspec:
|
if pathspec:
|
||||||
ifd.destination_path = pathspec[0]
|
if pathspec == '':
|
||||||
|
# User cancelled
|
||||||
|
tfd.error = "You did not select a location to save this track"
|
||||||
|
return False
|
||||||
|
tfd.destination_path = pathspec[0]
|
||||||
else:
|
else:
|
||||||
ifd.import_this_file = False
|
tfd.error = "destination file already exists"
|
||||||
ifd.error = "destination file already exists"
|
return False
|
||||||
return
|
|
||||||
|
|
||||||
# Check track_id
|
# Check track_id
|
||||||
if ifd.track_id < 0:
|
if tfd.track_id < 0:
|
||||||
raise ApplicationError(f"validate_file_data: track_id < 0, {path=}")
|
raise ApplicationError(
|
||||||
|
f"validate_file_data: track_id < 0, {tfd.source_path=}"
|
||||||
|
)
|
||||||
|
|
||||||
def inform_user(self) -> None:
|
return True
|
||||||
|
|
||||||
|
def inform_user(self, tfds: list[TrackFileData]) -> None:
|
||||||
"""
|
"""
|
||||||
Tell user about files that won't be imported
|
Tell user about files that won't be imported
|
||||||
"""
|
"""
|
||||||
|
|
||||||
msgs: list[str] = []
|
msgs: list[str] = []
|
||||||
for path, entry in self.import_files_data.items():
|
for tfd in tfds:
|
||||||
if entry.import_this_file is False:
|
msgs.append(
|
||||||
msgs.append(
|
f"{os.path.basename(tfd.source_path)} will not be imported because {tfd.error}"
|
||||||
f"{os.path.basename(path)} will not be imported because {entry.error}"
|
)
|
||||||
)
|
|
||||||
if msgs:
|
if msgs:
|
||||||
show_OK("File not imported", "\r\r".join(msgs))
|
show_OK("File not imported", "\r\r".join(msgs))
|
||||||
|
log.debug("\r\r".join(msgs))
|
||||||
|
|
||||||
def import_next_file(self) -> None:
|
def _import_next_file(self) -> None:
|
||||||
"""
|
"""
|
||||||
Import the next file sequentially.
|
Import the next file sequentially.
|
||||||
|
|
||||||
|
This is called when an import completes so will be called asynchronously.
|
||||||
|
Protect with a lock.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
while True:
|
lock = threading.Lock()
|
||||||
if not self.import_files_data:
|
|
||||||
self.signals.status_message_signal.emit("All files imported", 10000)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get details for next file to import
|
with lock:
|
||||||
path, tfd = self.import_files_data.popitem()
|
while len(self.workers) < Config.MAX_IMPORT_THREADS:
|
||||||
if tfd.import_this_file:
|
try:
|
||||||
break
|
tfd = self.import_files_data.pop()
|
||||||
|
filename = os.path.basename(tfd.source_path)
|
||||||
|
log.debug(f"_import_next_file: {filename}")
|
||||||
|
log.debug(
|
||||||
|
f"remaining files: {[a.source_path for a in self.import_files_data]}"
|
||||||
|
)
|
||||||
|
self.signals.status_message_signal.emit(f"Importing {filename}", 10000)
|
||||||
|
self._start_import(tfd)
|
||||||
|
except IndexError:
|
||||||
|
log.debug("import_next_file: no files remaining in queue")
|
||||||
|
break
|
||||||
|
|
||||||
print(f"import_next_file {path=}")
|
def _start_import(self, tfd: TrackFileData) -> None:
|
||||||
|
"""
|
||||||
|
Start thread to import track
|
||||||
|
"""
|
||||||
|
|
||||||
# Create and start a thread for processing
|
filename = os.path.basename(tfd.source_path)
|
||||||
worker = DoTrackImport(
|
log.debug(f"_start_import({filename=})")
|
||||||
import_file_path=path,
|
|
||||||
|
self.workers[tfd.source_path] = DoTrackImport(
|
||||||
|
import_file_path=tfd.source_path,
|
||||||
tags=tfd.tags,
|
tags=tfd.tags,
|
||||||
destination_path=tfd.destination_path,
|
destination_path=tfd.destination_path,
|
||||||
track_id=tfd.track_id,
|
track_id=tfd.track_id,
|
||||||
)
|
)
|
||||||
thread = QThread()
|
log.debug(f"{self.workers[tfd.source_path]=} created")
|
||||||
self.threads.append(thread)
|
|
||||||
|
|
||||||
# Move worker to thread
|
self.workers[tfd.source_path].import_finished.connect(self.post_import_processing)
|
||||||
worker.moveToThread(thread)
|
self.workers[tfd.source_path].finished.connect(lambda: self.cleanup_thread(tfd))
|
||||||
|
self.workers[tfd.source_path].finished.connect(self.workers[tfd.source_path].deleteLater)
|
||||||
|
|
||||||
# Connect signals and slots
|
self.workers[tfd.source_path].start()
|
||||||
thread.started.connect(worker.run)
|
|
||||||
thread.started.connect(lambda: print(f"Thread starting for {path=}"))
|
|
||||||
|
|
||||||
worker.import_finished.connect(self.post_import_processing)
|
def cleanup_thread(self, tfd: TrackFileData) -> None:
|
||||||
worker.import_finished.connect(thread.quit)
|
|
||||||
worker.import_finished.connect(lambda: print(f"Worker ended for {path=}"))
|
|
||||||
|
|
||||||
# Ensure cleanup only after thread is fully stopped
|
|
||||||
thread.finished.connect(lambda: self.cleanup_thread(thread, worker))
|
|
||||||
thread.finished.connect(lambda: print(f"Thread ended for {path=}"))
|
|
||||||
|
|
||||||
# Start the thread
|
|
||||||
print(f"Calling thread.start() for {path=}")
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
def cleanup_thread(self, thread, worker):
|
|
||||||
"""
|
"""
|
||||||
Remove references to finished threads/workers to prevent leaks.
|
Remove references to finished threads/workers to prevent leaks.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
worker.deleteLater()
|
log.debug(f"cleanup_thread({tfd.source_path=})")
|
||||||
thread.deleteLater()
|
|
||||||
if thread in self.threads:
|
|
||||||
self.threads.remove(thread)
|
|
||||||
|
|
||||||
def post_import_processing(self, track_id: int) -> None:
|
if tfd.source_path in self.workers:
|
||||||
|
del self.workers[tfd.source_path]
|
||||||
|
else:
|
||||||
|
log.debug(f"Couldn't find entry in self.workers: {tfd.source_path=}")
|
||||||
|
|
||||||
|
log.debug(f"After cleanup_thread: {self.workers.keys()=}")
|
||||||
|
|
||||||
|
def post_import_processing(self, source_path: str, track_id: int) -> None:
|
||||||
"""
|
"""
|
||||||
If track already in playlist, refresh it else insert it
|
If track already in playlist, refresh it else insert it
|
||||||
"""
|
"""
|
||||||
|
|
||||||
log.debug(f"post_import_processing({track_id=})")
|
log.debug(f"post_import_processing({source_path=}, {track_id=})")
|
||||||
|
|
||||||
if self.model_data:
|
if self.model_data:
|
||||||
if self.model_data.base_model:
|
if self.model_data.base_model:
|
||||||
@ -548,16 +555,16 @@ class FileImporter:
|
|||||||
track_id, self.model_data.row_number
|
track_id, self.model_data.row_number
|
||||||
)
|
)
|
||||||
|
|
||||||
# Process next file
|
# Process next file(s)
|
||||||
self.import_next_file()
|
self._import_next_file()
|
||||||
|
|
||||||
|
|
||||||
class DoTrackImport(QObject):
|
class DoTrackImport(QThread):
|
||||||
"""
|
"""
|
||||||
Class to manage the actual import of tracks in a thread.
|
Class to manage the actual import of tracks in a thread.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import_finished = pyqtSignal(int)
|
import_finished = pyqtSignal(str, int)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -578,6 +585,9 @@ class DoTrackImport(QObject):
|
|||||||
|
|
||||||
self.signals = MusicMusterSignals()
|
self.signals = MusicMusterSignals()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<DoTrackImport(id={hex(id(self))}, import_file_path={self.import_file_path}"
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""
|
"""
|
||||||
Either create track objects from passed files or update exising track
|
Either create track objects from passed files or update exising track
|
||||||
@ -601,11 +611,11 @@ class DoTrackImport(QObject):
|
|||||||
if temp_file and os.path.exists(temp_file):
|
if temp_file and os.path.exists(temp_file):
|
||||||
os.unlink(temp_file)
|
os.unlink(temp_file)
|
||||||
|
|
||||||
with db.Session() as session:
|
self.signals.status_message_signal.emit(
|
||||||
self.signals.status_message_signal.emit(
|
f"Importing {os.path.basename(self.import_file_path)}", 5000
|
||||||
f"Importing {os.path.basename(self.import_file_path)}", 5000
|
)
|
||||||
)
|
|
||||||
|
|
||||||
|
with db.Session() as session:
|
||||||
if self.track_id == 0:
|
if self.track_id == 0:
|
||||||
# Import new track
|
# Import new track
|
||||||
try:
|
try:
|
||||||
@ -630,6 +640,9 @@ class DoTrackImport(QObject):
|
|||||||
if hasattr(track, key):
|
if hasattr(track, key):
|
||||||
setattr(track, key, value)
|
setattr(track, key, value)
|
||||||
track.path = self.destination_track_path
|
track.path = self.destination_track_path
|
||||||
|
else:
|
||||||
|
log.error(f"Unable to retrieve {self.track_id=}")
|
||||||
|
return
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
helpers.normalise_track(self.destination_track_path)
|
helpers.normalise_track(self.destination_track_path)
|
||||||
@ -637,7 +650,7 @@ class DoTrackImport(QObject):
|
|||||||
self.signals.status_message_signal.emit(
|
self.signals.status_message_signal.emit(
|
||||||
f"{os.path.basename(self.import_file_path)} imported", 10000
|
f"{os.path.basename(self.import_file_path)} imported", 10000
|
||||||
)
|
)
|
||||||
self.import_finished.emit(track.id)
|
self.import_finished.emit(self.import_file_path, track.id)
|
||||||
|
|
||||||
|
|
||||||
class PickMatch(QDialog):
|
class PickMatch(QDialog):
|
||||||
|
|||||||
@ -200,9 +200,9 @@ def get_tags(path: str) -> Tags:
|
|||||||
try:
|
try:
|
||||||
tag = TinyTag.get(path)
|
tag = TinyTag.get(path)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
raise ApplicationError(f"File not found: get_tags({path=})")
|
raise ApplicationError(f"File not found: {path})")
|
||||||
except TinyTagException:
|
except TinyTagException:
|
||||||
raise ApplicationError(f"Can't read tags: get_tags({path=})")
|
raise ApplicationError(f"Can't read tags in {path})")
|
||||||
|
|
||||||
if (
|
if (
|
||||||
tag.title is None
|
tag.title is None
|
||||||
@ -210,7 +210,7 @@ def get_tags(path: str) -> Tags:
|
|||||||
or tag.bitrate is None
|
or tag.bitrate is None
|
||||||
or tag.duration is None
|
or tag.duration is None
|
||||||
):
|
):
|
||||||
raise ApplicationError(f"Missing tags: get_tags({path=})")
|
raise ApplicationError(f"Missing tags in {path})")
|
||||||
|
|
||||||
return Tags(
|
return Tags(
|
||||||
title=tag.title,
|
title=tag.title,
|
||||||
|
|||||||
36
app/log.py
Executable file → Normal file
36
app/log.py
Executable file → Normal file
@ -1,5 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# Standard library imports
|
# Standard library imports
|
||||||
|
from collections import defaultdict
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
@ -20,15 +21,38 @@ from config import Config
|
|||||||
class FunctionFilter(logging.Filter):
|
class FunctionFilter(logging.Filter):
|
||||||
"""Filter to allow category-based logging to stderr."""
|
"""Filter to allow category-based logging to stderr."""
|
||||||
|
|
||||||
def __init__(self, functions: set[str]):
|
def __init__(self, module_functions: dict[str, list[str]]):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.functions = functions
|
|
||||||
|
self.modules: list[str] = []
|
||||||
|
self.functions: defaultdict[str, list[str]] = defaultdict(list)
|
||||||
|
|
||||||
|
for module in module_functions.keys():
|
||||||
|
if module_functions[module]:
|
||||||
|
for function in module_functions[module]:
|
||||||
|
self.functions[module].append(function)
|
||||||
|
else:
|
||||||
|
self.modules.append(module)
|
||||||
|
|
||||||
def filter(self, record: logging.LogRecord) -> bool:
|
def filter(self, record: logging.LogRecord) -> bool:
|
||||||
return (
|
if not getattr(record, "levelname", None) == "DEBUG":
|
||||||
getattr(record, "funcName", None) in self.functions
|
# Only prcess DEBUG messages
|
||||||
and getattr(record, "levelname", None) == "DEBUG"
|
return False
|
||||||
)
|
|
||||||
|
module = getattr(record, "module", None)
|
||||||
|
if not module:
|
||||||
|
# No module in record
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Process if this is a module we're tracking
|
||||||
|
if module in self.modules:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Process if this is a function we're tracking
|
||||||
|
if getattr(record, "funcName", None) in self.functions[module]:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class LevelTagFilter(logging.Filter):
|
class LevelTagFilter(logging.Filter):
|
||||||
|
|||||||
@ -4,18 +4,24 @@ disable_existing_loggers: True
|
|||||||
formatters:
|
formatters:
|
||||||
colored:
|
colored:
|
||||||
(): colorlog.ColoredFormatter
|
(): colorlog.ColoredFormatter
|
||||||
format: "%(log_color)s[%(asctime)s] %(filename)s:%(lineno)s %(message)s"
|
format: "%(log_color)s[%(asctime)s] %(filename)s.%(funcName)s:%(lineno)s %(blue)s%(message)s"
|
||||||
datefmt: "%H:%M:%S"
|
datefmt: "%H:%M:%S"
|
||||||
syslog:
|
syslog:
|
||||||
format: "[%(name)s] %(filename)s:%(lineno)s %(leveltag)s: %(message)s"
|
format: "[%(name)s] %(filename)s:%(lineno)s %(leveltag)s: %(message)s"
|
||||||
|
|
||||||
filters:
|
filters:
|
||||||
leveltag:
|
leveltag:
|
||||||
(): newlogger.LevelTagFilter
|
(): log.LevelTagFilter
|
||||||
category_filter:
|
category_filter:
|
||||||
(): newlogger.FunctionFilter
|
(): log.FunctionFilter
|
||||||
functions: !!set
|
module_functions:
|
||||||
fb: null
|
# Optionally additionally log some debug calls to stderr
|
||||||
|
# log all debug calls in a module:
|
||||||
|
# module-name: []
|
||||||
|
# log debug calls for some functions in a module:
|
||||||
|
# module-name:
|
||||||
|
# - function-name-1
|
||||||
|
# - function-name-2
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
stderr:
|
stderr:
|
||||||
|
|||||||
@ -1,5 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from newlogger import log
|
from log import log
|
||||||
|
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
def fa():
|
def fa():
|
||||||
log.debug("fa Debug message")
|
log.debug("fa Debug message")
|
||||||
|
|||||||
@ -860,7 +860,7 @@ class Window(QMainWindow, Ui_MainWindow):
|
|||||||
self.current.base_model,
|
self.current.base_model,
|
||||||
self.current_row_or_end()
|
self.current_row_or_end()
|
||||||
)
|
)
|
||||||
self.importer.do_import()
|
self.importer.start()
|
||||||
|
|
||||||
def insert_header(self) -> None:
|
def insert_header(self) -> None:
|
||||||
"""Show dialog box to enter header text and add to playlist"""
|
"""Show dialog box to enter header text and add to playlist"""
|
||||||
|
|||||||
@ -1030,7 +1030,7 @@ class PlaylistModel(QAbstractTableModel):
|
|||||||
log.debug(f"{self}: OBS scene changed to '{scene_name}'")
|
log.debug(f"{self}: OBS scene changed to '{scene_name}'")
|
||||||
continue
|
continue
|
||||||
except obswebsocket.exceptions.ConnectionFailure:
|
except obswebsocket.exceptions.ConnectionFailure:
|
||||||
log.error(f"{self}: OBS connection refused")
|
log.warning(f"{self}: OBS connection refused")
|
||||||
return
|
return
|
||||||
|
|
||||||
def previous_track_ended(self) -> None:
|
def previous_track_ended(self) -> None:
|
||||||
@ -1151,6 +1151,7 @@ class PlaylistModel(QAbstractTableModel):
|
|||||||
]:
|
]:
|
||||||
if ts:
|
if ts:
|
||||||
ts.update_playlist_and_row(session)
|
ts.update_playlist_and_row(session)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
self.update_track_times()
|
self.update_track_times()
|
||||||
|
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user