mirror of
https://code.eliotberriot.com/funkwhale/funkwhale.git
synced 2025-10-04 07:09:16 +02:00
Attachments
This commit is contained in:
parent
421b441dbe
commit
c84396e669
50 changed files with 879 additions and 261 deletions
|
@ -45,3 +45,20 @@ class MutationAdmin(ModelAdmin):
|
|||
search_fields = ["created_by__preferred_username"]
|
||||
list_filter = ["type", "is_approved", "is_applied"]
|
||||
actions = [apply]
|
||||
|
||||
|
||||
@register(models.Attachment)
|
||||
class AttachmentAdmin(ModelAdmin):
|
||||
list_display = [
|
||||
"uuid",
|
||||
"actor",
|
||||
"url",
|
||||
"file",
|
||||
"size",
|
||||
"mimetype",
|
||||
"creation_date",
|
||||
"last_fetch_date",
|
||||
]
|
||||
list_select_related = True
|
||||
search_fields = ["actor__domain__name"]
|
||||
list_filter = ["mimetype"]
|
||||
|
|
|
@ -23,3 +23,14 @@ class MutationFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
return
|
||||
self.target = extracted
|
||||
self.save()
|
||||
|
||||
|
||||
@registry.register
|
||||
class AttachmentFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
url = factory.Faker("federation_url")
|
||||
uuid = factory.Faker("uuid4")
|
||||
actor = factory.SubFactory(federation_factories.ActorFactory)
|
||||
file = factory.django.ImageField()
|
||||
|
||||
class Meta:
|
||||
model = "common.Attachment"
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import html
|
||||
import io
|
||||
import requests
|
||||
import time
|
||||
import xml.sax.saxutils
|
||||
|
||||
|
@ -11,6 +10,7 @@ from django import urls
|
|||
from rest_framework import views
|
||||
|
||||
from . import preferences
|
||||
from . import session
|
||||
from . import throttling
|
||||
from . import utils
|
||||
|
||||
|
@ -76,10 +76,7 @@ def get_spa_html(spa_url):
|
|||
if cached:
|
||||
return cached
|
||||
|
||||
response = requests.get(
|
||||
utils.join_url(spa_url, "index.html"),
|
||||
verify=settings.EXTERNAL_REQUESTS_VERIFY_SSL,
|
||||
)
|
||||
response = session.get_session().get(utils.join_url(spa_url, "index.html"),)
|
||||
response.raise_for_status()
|
||||
content = response.text
|
||||
caches["local"].set(cache_key, content, settings.FUNKWHALE_SPA_HTML_CACHE_DURATION)
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
# Generated by Django 2.2.6 on 2019-11-11 13:38
|
||||
|
||||
import django.contrib.postgres.fields.jsonb
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import funkwhale_api.common.models
|
||||
import funkwhale_api.common.validators
|
||||
import uuid
|
||||
import versatileimagefield.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('common', '0003_cit_extension'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Attachment',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('url', models.URLField(max_length=500, unique=True, null=True)),
|
||||
('uuid', models.UUIDField(db_index=True, default=uuid.uuid4, unique=True)),
|
||||
('creation_date', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('last_fetch_date', models.DateTimeField(blank=True, null=True)),
|
||||
('size', models.IntegerField(blank=True, null=True)),
|
||||
('mimetype', models.CharField(blank=True, max_length=200, null=True)),
|
||||
('file', versatileimagefield.fields.VersatileImageField(max_length=255, upload_to=funkwhale_api.common.models.get_file_path, validators=[funkwhale_api.common.validators.ImageDimensionsValidator(min_height=50, min_width=50), funkwhale_api.common.validators.FileValidator(allowed_extensions=['png', 'jpg', 'jpeg'], max_size=5242880)])),
|
||||
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='attachments', to='federation.Actor', null=True)),
|
||||
],
|
||||
),
|
||||
]
|
|
@ -1,4 +1,6 @@
|
|||
import uuid
|
||||
import magic
|
||||
import mimetypes
|
||||
|
||||
from django.contrib.postgres.fields import JSONField
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
|
@ -9,11 +11,18 @@ from django.db import connections, models, transaction
|
|||
from django.db.models import Lookup
|
||||
from django.db.models.fields import Field
|
||||
from django.db.models.sql.compiler import SQLCompiler
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
from django.urls import reverse
|
||||
|
||||
from versatileimagefield.fields import VersatileImageField
|
||||
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
|
||||
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
|
||||
from . import utils
|
||||
from . import validators
|
||||
|
||||
|
||||
@Field.register_lookup
|
||||
class NotEqual(Lookup):
|
||||
|
@ -150,3 +159,102 @@ class Mutation(models.Model):
|
|||
self.applied_date = timezone.now()
|
||||
self.save(update_fields=["is_applied", "applied_date", "previous_state"])
|
||||
return previous_state
|
||||
|
||||
|
||||
def get_file_path(instance, filename):
|
||||
return utils.ChunkedPath("attachments")(instance, filename)
|
||||
|
||||
|
||||
class AttachmentQuerySet(models.QuerySet):
|
||||
def attached(self, include=True):
|
||||
related_fields = ["covered_album"]
|
||||
query = None
|
||||
for field in related_fields:
|
||||
field_query = ~models.Q(**{field: None})
|
||||
query = query | field_query if query else field_query
|
||||
|
||||
if include is False:
|
||||
query = ~query
|
||||
|
||||
return self.filter(query)
|
||||
|
||||
|
||||
class Attachment(models.Model):
|
||||
# Remote URL where the attachment can be fetched
|
||||
url = models.URLField(max_length=500, unique=True, null=True)
|
||||
uuid = models.UUIDField(unique=True, db_index=True, default=uuid.uuid4)
|
||||
# Actor associated with the attachment
|
||||
actor = models.ForeignKey(
|
||||
"federation.Actor",
|
||||
related_name="attachments",
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
)
|
||||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
last_fetch_date = models.DateTimeField(null=True, blank=True)
|
||||
# File size
|
||||
size = models.IntegerField(null=True, blank=True)
|
||||
mimetype = models.CharField(null=True, blank=True, max_length=200)
|
||||
|
||||
file = VersatileImageField(
|
||||
upload_to=get_file_path,
|
||||
max_length=255,
|
||||
validators=[
|
||||
validators.ImageDimensionsValidator(min_width=50, min_height=50),
|
||||
validators.FileValidator(
|
||||
allowed_extensions=["png", "jpg", "jpeg"], max_size=1024 * 1024 * 5,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
objects = AttachmentQuerySet.as_manager()
|
||||
|
||||
def save(self, **kwargs):
|
||||
if self.file and not self.size:
|
||||
self.size = self.file.size
|
||||
|
||||
if self.file and not self.mimetype:
|
||||
self.mimetype = self.guess_mimetype()
|
||||
|
||||
return super().save()
|
||||
|
||||
@property
|
||||
def is_local(self):
|
||||
return federation_utils.is_local(self.fid)
|
||||
|
||||
def guess_mimetype(self):
|
||||
f = self.file
|
||||
b = min(1000000, f.size)
|
||||
t = magic.from_buffer(f.read(b), mime=True)
|
||||
if not t.startswith("image/"):
|
||||
# failure, we try guessing by extension
|
||||
mt, _ = mimetypes.guess_type(f.name)
|
||||
if mt:
|
||||
t = mt
|
||||
return t
|
||||
|
||||
@property
|
||||
def download_url_original(self):
|
||||
if self.file:
|
||||
return federation_utils.full_url(self.file.url)
|
||||
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
|
||||
return federation_utils.full_url(proxy_url + "?next=original")
|
||||
|
||||
@property
|
||||
def download_url_medium_square_crop(self):
|
||||
if self.file:
|
||||
return federation_utils.full_url(self.file.crop["200x200"].url)
|
||||
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
|
||||
return federation_utils.full_url(proxy_url + "?next=medium_square_crop")
|
||||
|
||||
|
||||
@receiver(models.signals.post_save, sender=Attachment)
|
||||
def warm_attachment_thumbnails(sender, instance, **kwargs):
|
||||
if not instance.file or not settings.CREATE_IMAGE_THUMBNAILS:
|
||||
return
|
||||
warmer = VersatileImageFieldWarmer(
|
||||
instance_or_queryset=instance,
|
||||
rendition_key_set="attachment_square",
|
||||
image_attr="file",
|
||||
)
|
||||
num_created, failed_to_create = warmer.warm()
|
||||
|
|
|
@ -4,11 +4,16 @@ Compute different sizes of image used for Album covers and User avatars
|
|||
|
||||
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
|
||||
|
||||
from funkwhale_api.common.models import Attachment
|
||||
from funkwhale_api.music.models import Album
|
||||
from funkwhale_api.users.models import User
|
||||
|
||||
|
||||
MODELS = [(Album, "cover", "square"), (User, "avatar", "square")]
|
||||
MODELS = [
|
||||
(Album, "cover", "square"),
|
||||
(User, "avatar", "square"),
|
||||
(Attachment, "file", "attachment_square"),
|
||||
]
|
||||
|
||||
|
||||
def main(command, **kwargs):
|
||||
|
|
|
@ -272,3 +272,38 @@ class APIMutationSerializer(serializers.ModelSerializer):
|
|||
if value not in self.context["registry"]:
|
||||
raise serializers.ValidationError("Invalid mutation type {}".format(value))
|
||||
return value
|
||||
|
||||
|
||||
class AttachmentSerializer(serializers.Serializer):
|
||||
uuid = serializers.UUIDField(read_only=True)
|
||||
size = serializers.IntegerField(read_only=True)
|
||||
mimetype = serializers.CharField(read_only=True)
|
||||
creation_date = serializers.DateTimeField(read_only=True)
|
||||
file = StripExifImageField(write_only=True)
|
||||
urls = serializers.SerializerMethodField()
|
||||
|
||||
def get_urls(self, o):
|
||||
urls = {}
|
||||
urls["source"] = o.url
|
||||
urls["original"] = o.download_url_original
|
||||
urls["medium_square_crop"] = o.download_url_medium_square_crop
|
||||
return urls
|
||||
|
||||
def to_representation(self, o):
|
||||
repr = super().to_representation(o)
|
||||
# XXX: BACKWARD COMPATIBILITY
|
||||
# having the attachment urls in a nested JSON obj is better,
|
||||
# but we can't do this without breaking clients
|
||||
# So we extract the urls and include these in the parent payload
|
||||
repr.update({k: v for k, v in repr["urls"].items() if k != "source"})
|
||||
# also, our legacy images had lots of variations (400x400, 200x200, 50x50)
|
||||
# but we removed some of these, so we emulate these by hand (by redirecting)
|
||||
# to actual, existing attachment variations
|
||||
repr["square_crop"] = repr["medium_square_crop"]
|
||||
repr["small_square_crop"] = repr["medium_square_crop"]
|
||||
return repr
|
||||
|
||||
def create(self, validated_data):
|
||||
return models.Attachment.objects.create(
|
||||
file=validated_data["file"], actor=validated_data["actor"]
|
||||
)
|
||||
|
|
|
@ -4,6 +4,13 @@ from django.conf import settings
|
|||
import funkwhale_api
|
||||
|
||||
|
||||
class FunkwhaleSession(requests.Session):
|
||||
def request(self, *args, **kwargs):
|
||||
kwargs.setdefault("verify", settings.EXTERNAL_REQUESTS_VERIFY_SSL)
|
||||
kwargs.setdefault("timeout", settings.EXTERNAL_REQUESTS_TIMEOUT)
|
||||
return super().request(*args, **kwargs)
|
||||
|
||||
|
||||
def get_user_agent():
|
||||
return "python-requests (funkwhale/{}; +{})".format(
|
||||
funkwhale_api.__version__, settings.FUNKWHALE_URL
|
||||
|
@ -11,6 +18,6 @@ def get_user_agent():
|
|||
|
||||
|
||||
def get_session():
|
||||
s = requests.Session()
|
||||
s = FunkwhaleSession()
|
||||
s.headers["User-Agent"] = get_user_agent()
|
||||
return s
|
||||
|
|
|
@ -1,14 +1,23 @@
|
|||
import datetime
|
||||
import logging
|
||||
import tempfile
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.files import File
|
||||
from django.db import transaction
|
||||
from django.dispatch import receiver
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.common import channels
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
from . import models
|
||||
from . import serializers
|
||||
from . import session
|
||||
from . import signals
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery.app.task(name="common.apply_mutation")
|
||||
@transaction.atomic
|
||||
|
@ -57,3 +66,35 @@ def broadcast_mutation_update(mutation, old_is_approved, new_is_approved, **kwar
|
|||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def fetch_remote_attachment(attachment, filename=None, save=True):
|
||||
if attachment.file:
|
||||
# already there, no need to fetch
|
||||
return
|
||||
|
||||
s = session.get_session()
|
||||
attachment.last_fetch_date = timezone.now()
|
||||
with tempfile.TemporaryFile() as tf:
|
||||
with s.get(attachment.url, timeout=5, stream=True) as r:
|
||||
for chunk in r.iter_content():
|
||||
tf.write(chunk)
|
||||
tf.seek(0)
|
||||
attachment.file.save(
|
||||
filename or attachment.url.split("/")[-1], File(tf), save=save
|
||||
)
|
||||
|
||||
|
||||
@celery.app.task(name="common.prune_unattached_attachments")
|
||||
def prune_unattached_attachments():
|
||||
limit = timezone.now() - datetime.timedelta(
|
||||
seconds=settings.ATTACHMENTS_UNATTACHED_PRUNE_DELAY
|
||||
)
|
||||
candidates = models.Attachment.objects.attached(False).filter(
|
||||
creation_date__lte=limit
|
||||
)
|
||||
|
||||
total = candidates.count()
|
||||
logger.info("Deleting %s unattached attachments…", total)
|
||||
result = candidates.delete()
|
||||
logger.info("Deletion done: %s", result)
|
||||
|
|
|
@ -11,6 +11,8 @@ from rest_framework import response
|
|||
from rest_framework import views
|
||||
from rest_framework import viewsets
|
||||
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
from . import filters
|
||||
from . import models
|
||||
from . import mutations
|
||||
|
@ -140,3 +142,40 @@ class RateLimitView(views.APIView):
|
|||
"scopes": throttling.get_status(ident, time.time()),
|
||||
}
|
||||
return response.Response(data, status=200)
|
||||
|
||||
|
||||
class AttachmentViewSet(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.CreateModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
lookup_field = "uuid"
|
||||
queryset = models.Attachment.objects.all()
|
||||
serializer_class = serializers.AttachmentSerializer
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
anonymous_policy = "setting"
|
||||
|
||||
@action(detail=True, methods=["get"])
|
||||
@transaction.atomic
|
||||
def proxy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
|
||||
size = request.GET.get("next", "original").lower()
|
||||
if size not in ["original", "medium_square_crop"]:
|
||||
size = "original"
|
||||
|
||||
tasks.fetch_remote_attachment(instance)
|
||||
data = self.serializer_class(instance).data
|
||||
redirect = response.Response(status=302)
|
||||
redirect["Location"] = data["urls"][size]
|
||||
return redirect
|
||||
|
||||
def perform_create(self, serializer):
|
||||
return serializer.save(actor=self.request.user.actor)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
if instance.actor is None or instance.actor != self.request.user.actor:
|
||||
raise exceptions.PermissionDenied()
|
||||
instance.delete()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue