mirror of
https://code.eliotberriot.com/funkwhale/funkwhale.git
synced 2025-10-05 04:29:23 +02:00
Merge branch '1105-scan-ui' into 'develop'
Fix #1105: Can now launch server import from the UI Closes #1105 See merge request funkwhale/funkwhale!1192
This commit is contained in:
commit
b7f1c02c6f
17 changed files with 476 additions and 6 deletions
|
@ -12,6 +12,7 @@ import watchdog.events
|
|||
import watchdog.observers
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.core.files import File
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
@ -68,8 +69,34 @@ def batch(iterable, n=1):
|
|||
yield current
|
||||
|
||||
|
||||
class CacheWriter:
|
||||
"""
|
||||
Output to cache instead of console
|
||||
"""
|
||||
|
||||
def __init__(self, key, stdout, buffer_size=10):
|
||||
self.key = key
|
||||
cache.set(self.key, [])
|
||||
self.stdout = stdout
|
||||
self.buffer_size = buffer_size
|
||||
self.buffer = []
|
||||
|
||||
def write(self, message):
|
||||
# we redispatch the message to the console, for debugging
|
||||
self.stdout.write(message)
|
||||
|
||||
self.buffer.append(message)
|
||||
if len(self.buffer) > self.buffer_size:
|
||||
self.flush()
|
||||
|
||||
def flush(self):
|
||||
current = cache.get(self.key)
|
||||
cache.set(self.key, current + self.buffer)
|
||||
self.buffer = []
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Import audio files mathinc given glob pattern"
|
||||
help = "Import audio files matching given glob pattern"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
|
@ -207,7 +234,22 @@ class Command(BaseCommand):
|
|||
help="Size of each batch, only used when crawling large collections",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
def handle(self, *args, **kwargs):
|
||||
cache.set("fs-import:status", "started")
|
||||
if kwargs.get("update_cache", False):
|
||||
self.stdout = CacheWriter("fs-import:logs", self.stdout)
|
||||
self.stderr = self.stdout
|
||||
try:
|
||||
return self._handle(*args, **kwargs)
|
||||
except CommandError as e:
|
||||
self.stdout.write(str(e))
|
||||
raise
|
||||
finally:
|
||||
if kwargs.get("update_cache", False):
|
||||
cache.set("fs-import:status", "finished")
|
||||
self.stdout.flush()
|
||||
|
||||
def _handle(self, *args, **options):
|
||||
# handle relative directories
|
||||
options["path"] = [os.path.abspath(path) for path in options["path"]]
|
||||
self.is_confirmed = False
|
||||
|
@ -312,6 +354,10 @@ class Command(BaseCommand):
|
|||
batch_duration = None
|
||||
self.stdout.write("Starting import of new files…")
|
||||
for i, entries in enumerate(batch(crawler, options["batch_size"])):
|
||||
if options.get("update_cache", False) is True:
|
||||
# check to see if the scan was cancelled
|
||||
if cache.get("fs-import:status") == "canceled":
|
||||
raise CommandError("Import cancelled")
|
||||
total += len(entries)
|
||||
batch_start = time.time()
|
||||
time_stats = ""
|
||||
|
|
|
@ -838,3 +838,23 @@ class AlbumCreateSerializer(serializers.Serializer):
|
|||
tag_models.set_tags(instance, *(validated_data.get("tags", []) or []))
|
||||
instance.artist.get_channel()
|
||||
return instance
|
||||
|
||||
|
||||
class FSImportSerializer(serializers.Serializer):
|
||||
path = serializers.CharField(allow_blank=True)
|
||||
library = serializers.UUIDField()
|
||||
import_reference = serializers.CharField()
|
||||
|
||||
def validate_path(self, value):
|
||||
try:
|
||||
utils.browse_dir(settings.MUSIC_DIRECTORY_PATH, value)
|
||||
except (NotADirectoryError, FileNotFoundError, ValueError):
|
||||
raise serializers.ValidationError("Invalid path")
|
||||
|
||||
return value
|
||||
|
||||
def validate_library(self, value):
|
||||
try:
|
||||
return self.context["user"].actor.libraries.get(uuid=value)
|
||||
except models.Library.DoesNotExist:
|
||||
raise serializers.ValidationError("Invalid library")
|
||||
|
|
|
@ -3,10 +3,12 @@ import datetime
|
|||
import logging
|
||||
import os
|
||||
|
||||
from django.utils import timezone
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
from django.db.models import F, Q
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
|
||||
from musicbrainzngs import ResponseError
|
||||
from requests.exceptions import RequestException
|
||||
|
@ -17,6 +19,7 @@ from funkwhale_api.common import utils as common_utils
|
|||
from funkwhale_api.federation import routes
|
||||
from funkwhale_api.federation import library as lb
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.music.management.commands import import_files
|
||||
from funkwhale_api.tags import models as tags_models
|
||||
from funkwhale_api.tags import tasks as tags_tasks
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
@ -938,3 +941,32 @@ def update_track_metadata(audio_metadata, track):
|
|||
common_utils.attach_file(
|
||||
track.album, "attachment_cover", new_data["album"].get("cover_data")
|
||||
)
|
||||
|
||||
|
||||
@celery.app.task(name="music.fs_import")
|
||||
@celery.require_instance(models.Library.objects.all(), "library")
|
||||
def fs_import(library, path, import_reference):
|
||||
if cache.get("fs-import:status") != "pending":
|
||||
raise ValueError("Invalid import status")
|
||||
|
||||
command = import_files.Command()
|
||||
|
||||
options = {
|
||||
"recursive": True,
|
||||
"library_id": str(library.uuid),
|
||||
"path": [os.path.join(settings.MUSIC_DIRECTORY_PATH, path)],
|
||||
"update_cache": True,
|
||||
"in_place": True,
|
||||
"reference": import_reference,
|
||||
"watch": False,
|
||||
"interactive": False,
|
||||
"batch_size": 1000,
|
||||
"async_": False,
|
||||
"prune": True,
|
||||
"replace": False,
|
||||
"verbosity": 1,
|
||||
"exit_on_failure": False,
|
||||
"outbox": False,
|
||||
"broadcast": False,
|
||||
}
|
||||
command.handle(**options)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import os
|
||||
import pathlib
|
||||
import mimetypes
|
||||
|
||||
import magic
|
||||
|
@ -130,3 +132,21 @@ def increment_downloads_count(upload, user, wsgi_request):
|
|||
duration = max(upload.duration or 0, settings.MIN_DELAY_BETWEEN_DOWNLOADS_COUNT)
|
||||
|
||||
cache.set(cache_key, 1, duration)
|
||||
|
||||
|
||||
def browse_dir(root, path):
|
||||
if ".." in path:
|
||||
raise ValueError("Relative browsing is not allowed")
|
||||
|
||||
root = pathlib.Path(root)
|
||||
real_path = root / path
|
||||
|
||||
dirs = []
|
||||
files = []
|
||||
for el in sorted(os.listdir(real_path)):
|
||||
if os.path.isdir(real_path / el):
|
||||
dirs.append({"name": el, "dir": True})
|
||||
else:
|
||||
files.append({"name": el, "dir": False})
|
||||
|
||||
return dirs + files
|
||||
|
|
|
@ -3,6 +3,7 @@ import datetime
|
|||
import logging
|
||||
import urllib.parse
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
from django.db.models import Count, Prefetch, Sum, F, Q
|
||||
import django.db.utils
|
||||
|
@ -314,6 +315,64 @@ class LibraryViewSet(
|
|||
serializer = self.get_serializer(queryset, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(
|
||||
methods=["get", "post", "delete"],
|
||||
detail=False,
|
||||
url_name="fs-import",
|
||||
url_path="fs-import",
|
||||
)
|
||||
@transaction.non_atomic_requests
|
||||
def fs_import(self, request, *args, **kwargs):
|
||||
if not request.user.is_authenticated:
|
||||
return Response({}, status=403)
|
||||
if not request.user.all_permissions["library"]:
|
||||
return Response({}, status=403)
|
||||
if request.method == "GET":
|
||||
path = request.GET.get("path", "")
|
||||
data = {
|
||||
"root": settings.MUSIC_DIRECTORY_PATH,
|
||||
"path": path,
|
||||
"import": None,
|
||||
}
|
||||
status = cache.get("fs-import:status", default=None)
|
||||
if status:
|
||||
data["import"] = {
|
||||
"status": status,
|
||||
"reference": cache.get("fs-import:reference"),
|
||||
"logs": cache.get("fs-import:logs", default=[]),
|
||||
}
|
||||
try:
|
||||
data["content"] = utils.browse_dir(data["root"], data["path"])
|
||||
except (NotADirectoryError, ValueError, FileNotFoundError) as e:
|
||||
return Response({"detail": str(e)}, status=400)
|
||||
|
||||
return Response(data)
|
||||
if request.method == "POST":
|
||||
if cache.get("fs-import:status", default=None) in [
|
||||
"pending",
|
||||
"started",
|
||||
]:
|
||||
return Response({"detail": "An import is already running"}, status=400)
|
||||
|
||||
data = request.data
|
||||
serializer = serializers.FSImportSerializer(
|
||||
data=data, context={"user": request.user}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
cache.set("fs-import:status", "pending")
|
||||
cache.set(
|
||||
"fs-import:reference", serializer.validated_data["import_reference"]
|
||||
)
|
||||
tasks.fs_import.delay(
|
||||
library_id=serializer.validated_data["library"].pk,
|
||||
path=serializer.validated_data["path"],
|
||||
import_reference=serializer.validated_data["import_reference"],
|
||||
)
|
||||
return Response(status=201)
|
||||
if request.method == "DELETE":
|
||||
cache.set("fs-import:status", "canceled")
|
||||
return Response(status=204)
|
||||
|
||||
|
||||
class TrackViewSet(
|
||||
HandleInvalidSearch,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue