diff --git a/.flake8 b/.flake8 index 97cfb3ffb..2b8bf7611 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,5 @@ [flake8] +exclude = + anthias_app/migrations/*.py per-file-ignores = bin/migrate.py: E501 diff --git a/.github/workflows/docker-test.yaml b/.github/workflows/docker-test.yaml index 48d4a18ce..5af043f5e 100644 --- a/.github/workflows/docker-test.yaml +++ b/.github/workflows/docker-test.yaml @@ -60,11 +60,14 @@ jobs: run: | docker compose -f docker-compose.test.yml up -d - - name: Run the tests inside the container - shell: 'script -q -e -c "bash {0}"' + - name: Run the unit tests inside the container + run: | + docker compose -f docker-compose.test.yml exec anthias-test ./manage.py test --noinput --parallel --exclude-tag=integration + + - name: Run the integration tests inside the container run: | docker compose -f docker-compose.test.yml exec anthias-test bash ./bin/prepare_test_environment.sh -s - docker compose -f docker-compose.test.yml exec anthias-test nose2 -v + docker compose -f docker-compose.test.yml exec anthias-test ./manage.py test --noinput --tag=integration - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v3 diff --git a/.gitignore b/.gitignore index bad1d78af..27f991455 100644 --- a/.gitignore +++ b/.gitignore @@ -52,3 +52,7 @@ docker/Dockerfile.test docker-compose.yml balena-deploy/ +db.sqlite3 + +# Django +staticfiles/ diff --git a/anthias_app/admin.py b/anthias_app/admin.py new file mode 100644 index 000000000..42c33bc8e --- /dev/null +++ b/anthias_app/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin # noqa F401 + +# Register your models here. diff --git a/anthias_app/apps.py b/anthias_app/apps.py new file mode 100644 index 000000000..a6f7354b9 --- /dev/null +++ b/anthias_app/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class AnthiasAppConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'anthias_app' diff --git a/anthias_app/helpers.py b/anthias_app/helpers.py index d156a4e94..ff006b85b 100644 --- a/anthias_app/helpers.py +++ b/anthias_app/helpers.py @@ -1,26 +1,27 @@ import uuid import yaml -from datetime import datetime -from flask import render_template -from os import getenv, path -from lib import assets_helper, db +from django.shortcuts import render +from django.utils import timezone from lib.github import is_up_to_date from lib.utils import get_video_duration +from os import getenv, path +from anthias_app.models import Asset from settings import settings -def template(template_name, **context): +def template(request, template_name, context): + """ + This is a helper function that is used to render a template + with some global context. This is used to avoid having to + repeat code in other views. """ - This is a template response wrapper that shares the - same function signature as Flask's render_template() method - but also injects some global context.""" - # Add global contexts context['date_format'] = settings['date_format'] context['default_duration'] = settings['default_duration'] context['default_streaming_duration'] = ( - settings['default_streaming_duration']) + settings['default_streaming_duration'] + ) context['template_settings'] = { 'imports': ['from lib.utils import template_handle_unicode'], 'default_filters': ['template_handle_unicode'], @@ -28,7 +29,7 @@ def template(template_name, **context): context['up_to_date'] = is_up_to_date() context['use_24_hour_clock'] = settings['use_24_hour_clock'] - return render_template(template_name, context=context) + return render(request, template_name, context) def prepare_default_asset(**kwargs): @@ -46,7 +47,6 @@ def prepare_default_asset(**kwargs): 'asset_id': asset_id, 'duration': duration, 'end_date': kwargs['end_date'], - 'is_active': 1, 'is_enabled': True, 'is_processing': 0, 'mimetype': kwargs['mimetype'], @@ -62,7 +62,7 @@ def prepare_default_asset(**kwargs): def add_default_assets(): settings.load() - datetime_now = datetime.now() + datetime_now = timezone.now() default_asset_settings = { 'start_date': datetime_now, 'end_date': datetime_now.replace(year=datetime_now.year + 6), @@ -70,25 +70,28 @@ def add_default_assets(): } default_assets_yaml = path.join( - getenv('HOME'), '.screenly/default_assets.yml') + getenv('HOME'), + '.screenly/default_assets.yml', + ) with open(default_assets_yaml, 'r') as yaml_file: default_assets = yaml.safe_load(yaml_file).get('assets') - with db.conn(settings['database']) as conn: - for default_asset in default_assets: - default_asset_settings.update({ - 'name': default_asset.get('name'), - 'uri': default_asset.get('uri'), - 'mimetype': default_asset.get('mimetype') - }) - asset = prepare_default_asset(**default_asset_settings) - if asset: - assets_helper.create(conn, asset) + + for default_asset in default_assets: + default_asset_settings.update({ + 'name': default_asset.get('name'), + 'uri': default_asset.get('uri'), + 'mimetype': default_asset.get('mimetype') + }) + asset = prepare_default_asset(**default_asset_settings) + + if asset: + Asset.objects.create(**asset) def remove_default_assets(): settings.load() - with db.conn(settings['database']) as conn: - for asset in assets_helper.read(conn): - if asset['asset_id'].startswith('default_'): - assets_helper.delete(conn, asset['asset_id']) + + for asset in Asset.objects.all(): + if asset.asset_id.startswith('default_'): + asset.delete() diff --git a/anthias_app/migrations/0001_initial.py b/anthias_app/migrations/0001_initial.py new file mode 100644 index 000000000..6d3b0ce25 --- /dev/null +++ b/anthias_app/migrations/0001_initial.py @@ -0,0 +1,36 @@ +# Generated by Django 3.2.18 on 2024-08-23 18:45 + +import anthias_app.models +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Asset', + fields=[ + ('asset_id', models.TextField(default=anthias_app.models.generate_asset_id, editable=False, primary_key=True, serialize=False)), + ('name', models.TextField(blank=True, null=True)), + ('uri', models.TextField(blank=True, null=True)), + ('md5', models.TextField(blank=True, null=True)), + ('start_date', models.DateTimeField(blank=True, null=True)), + ('end_date', models.DateTimeField(blank=True, null=True)), + ('duration', models.TextField(blank=True, null=True)), + ('mimetype', models.TextField(blank=True, null=True)), + ('is_enabled', models.IntegerField(default=0)), + ('is_processing', models.IntegerField(default=0)), + ('nocache', models.IntegerField(default=0)), + ('play_order', models.IntegerField(default=0)), + ('skip_asset_check', models.IntegerField(default=0)), + ], + options={ + 'db_table': 'assets', + }, + ), + ] diff --git a/anthias_app/migrations/0002_auto_20241015_1524.py b/anthias_app/migrations/0002_auto_20241015_1524.py new file mode 100644 index 000000000..6aafe9140 --- /dev/null +++ b/anthias_app/migrations/0002_auto_20241015_1524.py @@ -0,0 +1,38 @@ +# Generated by Django 3.2.18 on 2024-10-15 15:24 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('anthias_app', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='asset', + name='duration', + field=models.BigIntegerField(blank=True, null=True), + ), + migrations.AlterField( + model_name='asset', + name='is_enabled', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='asset', + name='is_processing', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='asset', + name='nocache', + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name='asset', + name='skip_asset_check', + field=models.BooleanField(default=False), + ), + ] diff --git a/anthias_app/migrations/__init__.py b/anthias_app/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/anthias_app/models.py b/anthias_app/models.py new file mode 100644 index 000000000..3d1c2849c --- /dev/null +++ b/anthias_app/models.py @@ -0,0 +1,36 @@ +import uuid +from django.db import models +from django.utils import timezone + + +def generate_asset_id(): + return uuid.uuid4().hex + + +class Asset(models.Model): + asset_id = models.TextField( + primary_key=True, default=generate_asset_id, editable=False) + name = models.TextField(blank=True, null=True) + uri = models.TextField(blank=True, null=True) + md5 = models.TextField(blank=True, null=True) + start_date = models.DateTimeField(blank=True, null=True) + end_date = models.DateTimeField(blank=True, null=True) + duration = models.BigIntegerField(blank=True, null=True) + mimetype = models.TextField(blank=True, null=True) + is_enabled = models.BooleanField(default=False) + is_processing = models.BooleanField(default=False) + nocache = models.BooleanField(default=False) + play_order = models.IntegerField(default=0) + skip_asset_check = models.BooleanField(default=False) + + class Meta: + db_table = 'assets' + + def is_active(self): + if self.is_enabled and self.start_date and self.end_date: + current_time = timezone.now() + return ( + self.start_date < current_time < self.end_date + ) + + return False diff --git a/anthias_app/tests.py b/anthias_app/tests.py new file mode 100644 index 000000000..7a896dcd1 --- /dev/null +++ b/anthias_app/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase # noqa F401 + +# Create your tests here. diff --git a/anthias_app/urls.py b/anthias_app/urls.py new file mode 100644 index 000000000..4e3834cdf --- /dev/null +++ b/anthias_app/urls.py @@ -0,0 +1,12 @@ +from django.urls import path +from . import views + +app_name = 'anthias_app' + +urlpatterns = [ + path('', views.index, name='index'), + path('settings', views.settings_page, name='settings'), + path('system-info', views.system_info, name='system_info'), + path('integrations', views.integrations, name='integrations'), + path('splash-page', views.splash_page, name='splash_page'), +] diff --git a/anthias_app/views.py b/anthias_app/views.py index 54a5d4377..d551c8445 100644 --- a/anthias_app/views.py +++ b/anthias_app/views.py @@ -1,18 +1,18 @@ -import ipaddress -import logging -import psutil from datetime import timedelta -from flask import Blueprint, request +from django.views.decorators.http import require_http_methods from hurry.filesize import size -from os import getenv, statvfs +from os import ( + getenv, + statvfs, +) from platform import machine -from urllib.parse import urlparse - -from anthias_app.helpers import ( - add_default_assets, - remove_default_assets, - template, +from settings import ( + CONFIGURABLE_SETTINGS, + DEFAULTS, + settings, + ZmqPublisher, ) +from urllib.parse import urlparse from lib import ( diagnostics, device_helper, @@ -20,31 +20,31 @@ from lib.auth import authorized from lib.utils import ( connect_to_redis, - get_balena_supervisor_version, get_node_ip, get_node_mac_address, is_balena_app, is_demo_node, is_docker, ) -from settings import ( - CONFIGURABLE_SETTINGS, - DEFAULTS, - settings, - ZmqPublisher, +from .helpers import ( + add_default_assets, + remove_default_assets, + template, ) +import ipaddress +import psutil + r = connect_to_redis() -anthias_app_bp = Blueprint('anthias_app', __name__) -@anthias_app_bp.route('/') @authorized -def index(): +@require_http_methods(["GET"]) +def index(request): player_name = settings['player_name'] - my_ip = urlparse(request.host_url).hostname + my_ip = urlparse(request.build_absolute_uri()).hostname is_demo = is_demo_node() - balena_uuid = getenv("BALENA_APP_UUID", None) + balena_device_uuid = getenv("BALENA_DEVICE_UUID", None) ws_addresses = [] @@ -53,30 +53,28 @@ def index(): else: ws_addresses.append('ws://' + my_ip + '/ws/') - if balena_uuid: + if balena_device_uuid: ws_addresses.append( - 'wss://{}.balena-devices.com/ws/'.format(balena_uuid)) - - return template( - 'index.html', - ws_addresses=ws_addresses, - player_name=player_name, - is_demo=is_demo, - is_balena=is_balena_app(), - ) + 'wss://{}.balena-devices.com/ws/'.format(balena_device_uuid) + ) + + return template(request, 'index.html', { + 'ws_addresses': ws_addresses, + 'player_name': player_name, + 'is_demo': is_demo, + 'is_balena': is_balena_app(), + }) -@anthias_app_bp.route('/settings', methods=["GET", "POST"]) @authorized -def settings_page(): +@require_http_methods(["GET", "POST"]) +def settings_page(request): context = {'flash': None} - if request.method == "POST": + if request.method == 'POST': try: - # Put some request variables in local variables to make them - # easier to read. - current_pass = request.form.get('current-password', '') - auth_backend = request.form.get('auth_backend', '') + current_pass = request.POST.get('current-password', '') + auth_backend = request.POST.get('auth_backend', '') if ( auth_backend != settings['auth_backend'] @@ -100,16 +98,19 @@ def settings_page(): .check_password(current_pass) ) next_auth_backend = settings.auth_backends[auth_backend] - next_auth_backend.update_settings(current_pass_correct) + next_auth_backend.update_settings(request, current_pass_correct) settings['auth_backend'] = auth_backend for field, default in list(CONFIGURABLE_SETTINGS.items()): - value = request.form.get(field, default) - - if not value and field in [ - 'default_duration', - 'default_streaming_duration', - ]: + value = request.POST.get(field, default) + + if ( + not value + and field in [ + 'default_duration', + 'default_streaming_duration', + ] + ): value = str(0) if isinstance(default, bool): value = value == 'on' @@ -155,14 +156,10 @@ def settings_page(): 'selected' if settings['auth_backend'] == backend.name else '' - ) + ), }) - try: - ip_addresses = get_node_ip().split() - except Exception as error: - logging.warning(f"Error getting IP addresses: {error}") - ip_addresses = ['IP_ADDRESS'] + ip_addresses = get_node_ip().split() context.update({ 'user': settings['user'], @@ -175,12 +172,12 @@ def settings_page(): 'host_user': getenv('HOST_USER') }) - return template('settings.html', **context) + return template(request, 'settings.html', context) -@anthias_app_bp.route('/system-info') @authorized -def system_info(): +@require_http_methods(["GET"]) +def system_info(request): loadavg = diagnostics.get_load_avg()['15 min'] display_power = r.get('display_power') @@ -210,49 +207,53 @@ def system_info(): if device_model is None and machine() == 'x86_64': device_model = 'Generic x86_64 Device' - version = '{}@{}'.format( + anthias_version = '{}@{}'.format( diagnostics.get_git_branch(), diagnostics.get_git_short_hash() ) - return template( - 'system-info.html', - player_name=player_name, - loadavg=loadavg, - free_space=free_space, - uptime=system_uptime, - memory=memory, - display_power=display_power, - device_model=device_model, - version=version, - mac_address=get_node_mac_address(), - is_balena=is_balena_app(), - ) + context = { + 'player_name': player_name, + 'loadavg': loadavg, + 'free_space': free_space, + 'uptime': { + 'days': system_uptime.days, + 'hours': round(system_uptime.seconds / 3600, 2), + }, + 'memory': memory, + 'display_power': display_power, + 'device_model': device_model, + 'anthias_version': anthias_version, + 'mac_address': get_node_mac_address(), + 'is_balena': is_balena_app(), + } + return template(request, 'system-info.html', context) -@anthias_app_bp.route('/integrations') -@authorized -def integrations(): +@authorized +@require_http_methods(["GET"]) +def integrations(request): context = { 'player_name': settings['player_name'], 'is_balena': is_balena_app(), } if context['is_balena']: - context['balena_device_id'] = getenv('BALENA_DEVICE_UUID') - context['balena_app_id'] = getenv('BALENA_APP_ID') - context['balena_app_name'] = getenv('BALENA_APP_NAME') - context['balena_supervisor_version'] = get_balena_supervisor_version() - context['balena_host_os_version'] = getenv('BALENA_HOST_OS_VERSION') - context['balena_device_name_at_init'] = getenv( - 'BALENA_DEVICE_NAME_AT_INIT') + context.update({ + 'balena_device_id': getenv('BALENA_DEVICE_UUID'), + 'balena_app_id': getenv('BALENA_APP_ID'), + 'balena_app_name': getenv('BALENA_APP_NAME'), + 'balena_supervisor_version': getenv('BALENA_SUPERVISOR_VERSION'), + 'balena_host_os_version': getenv('BALENA_HOST_OS_VERSION'), + 'balena_device_name_at_init': getenv('BALENA_DEVICE_NAME_AT_INIT'), + }) - return template('integrations.html', **context) + return template(request, 'integrations.html', context) -@anthias_app_bp.route('/splash-page') -def splash_page(): +@require_http_methods(["GET"]) +def splash_page(request): ip_addresses = [] for ip_address in get_node_ip().split(): @@ -263,4 +264,6 @@ def splash_page(): else: ip_addresses.append(f'http://{ip_address}') - return template('splash-page.html', ip_addresses=ip_addresses) + return template(request, 'splash-page.html', { + 'ip_addresses': ip_addresses + }) diff --git a/anthias_django/__init__.py b/anthias_django/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/anthias_django/asgi.py b/anthias_django/asgi.py new file mode 100644 index 000000000..77ddf2946 --- /dev/null +++ b/anthias_django/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for anthias_django project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'anthias_django.settings') + +application = get_asgi_application() diff --git a/anthias_django/settings.py b/anthias_django/settings.py new file mode 100644 index 000000000..67d441b4b --- /dev/null +++ b/anthias_django/settings.py @@ -0,0 +1,183 @@ +""" +Django settings for anthias_django project. + +Generated by 'django-admin startproject' using Django 3.2.18. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/3.2/ref/settings/ +""" + +import pytz +import secrets +from pathlib import Path +from os import getenv + +from settings import settings as device_settings + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ + + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = getenv('ENVIRONMENT', 'production') in ['development', 'test'] + +if not DEBUG: + if not device_settings.get('django_secret_key'): + # Modify the generated so that string interpolation + # errors can be avoided. + secret_key = secrets.token_urlsafe(50) + device_settings['django_secret_key'] = secret_key + device_settings.save() + + SECRET_KEY = device_settings.get('django_secret_key') +else: + # SECURITY WARNING: keep the secret key used in production secret! + SECRET_KEY = 'django-insecure-7rz*$)g6dk&=h-3imq2xw*iu!zuhfb&w6v482_vs!w@4_gha=j' # noqa: E501 + +# @TODO: Resolve hostnames and IP addresses dynamically. +ALLOWED_HOSTS = [ + '127.0.0.1', + 'localhost', + 'anthias', + 'anthias-server' +] + + +# Application definition + +INSTALLED_APPS = [ + 'anthias_app.apps.AnthiasAppConfig', + 'drf_spectacular', + 'rest_framework', + 'api.apps.ApiConfig', + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'dbbackup', +] + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'anthias_django.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [ + BASE_DIR / 'templates', + ], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'anthias_django.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/3.2/ref/settings/#databases +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': ( + '/data/.screenly/test.db' if getenv('ENVIRONMENT') == 'test' + else '/data/.screenly/screenly.db' + ), + }, +} + + +# Password validation +# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators +AUTH_MODULE_PREFIX = 'django.contrib.auth.password_validation' +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': f'{AUTH_MODULE_PREFIX}.UserAttributeSimilarityValidator', + }, + { + 'NAME': f'{AUTH_MODULE_PREFIX}.MinimumLengthValidator', + }, + { + 'NAME': f'{AUTH_MODULE_PREFIX}.CommonPasswordValidator', + }, + { + 'NAME': f'{AUTH_MODULE_PREFIX}.NumericPasswordValidator', + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/3.2/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + +try: + with open('/etc/timezone', 'r') as f: + TIME_ZONE = f.read().strip() + pytz.timezone(TIME_ZONE) # Checks if the timezone is valid. +except (pytz.exceptions.UnknownTimeZoneError, FileNotFoundError): + TIME_ZONE = 'UTC' + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/3.2/howto/static-files/ + +STATIC_URL = '/static/' +STATICFILES_DIRS = [ + BASE_DIR / 'static', +] +STATIC_ROOT = '/data/screenly/staticfiles' + +# Default primary key field type +# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field + +DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' + +REST_FRAMEWORK = { + 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema', + 'EXCEPTION_HANDLER': 'api.helpers.custom_exception_handler', + # The project uses custom authentication classes, + # so we need to disable the default ones. + 'DEFAULT_AUTHENTICATION_CLASSES': [] +} + +SPECTACULAR_SETTINGS = { + 'TITLE': 'Anthias API', + 'VERSION': '1.2.0', +} + +# `django-dbbackup` settings +DBBACKUP_STORAGE = 'django.core.files.storage.FileSystemStorage' +DBBACKUP_STORAGE_OPTIONS = {'location': '/data/.screenly/backups'} +DBBACKUP_HOSTNAME = 'anthias' diff --git a/anthias_django/urls.py b/anthias_django/urls.py new file mode 100644 index 000000000..a3ebfbcf3 --- /dev/null +++ b/anthias_django/urls.py @@ -0,0 +1,40 @@ +"""anthias_django URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.2/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.contrib import admin +from django.urls import include, path +from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView +from lib.auth import authorized + + +class APIDocView(SpectacularRedocView): + @authorized + def get(self, request, *args, **kwargs): + return super().get(request, *args, **kwargs) + + +urlpatterns = [ + path('admin/', admin.site.urls), + path('', include('anthias_app.urls')), + path('api/', include('api.urls')), + path('api/schema/', SpectacularAPIView.as_view(), name='schema'), + path( + 'api/docs/', + APIDocView.as_view(url_name='schema'), + name='redoc' + ), +] + +# @TODO: Write custom 403 and 404 pages. diff --git a/anthias_django/wsgi.py b/anthias_django/wsgi.py new file mode 100644 index 000000000..baf942905 --- /dev/null +++ b/anthias_django/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for anthias_django project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'anthias_django.settings') + +application = get_wsgi_application() diff --git a/api/admin.py b/api/admin.py new file mode 100644 index 000000000..42c33bc8e --- /dev/null +++ b/api/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin # noqa F401 + +# Register your models here. diff --git a/api/apps.py b/api/apps.py new file mode 100644 index 000000000..66656fd29 --- /dev/null +++ b/api/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class ApiConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'api' diff --git a/api/helpers.py b/api/helpers.py index 528d89e40..860d1f693 100644 --- a/api/helpers.py +++ b/api/helpers.py @@ -1,383 +1,38 @@ import json -import traceback -import uuid from dateutil import parser as date_parser -from flask import escape, make_response -from functools import wraps -from os import path, rename -from past.builtins import basestring -from flask_restful_swagger_2 import Schema -from werkzeug.wrappers import Request +from rest_framework import status +from rest_framework.views import exception_handler +from rest_framework.response import Response +from anthias_app.models import Asset -from lib import assets_helper, db -from lib.utils import ( - download_video_from_youtube, - json_dump, - get_video_duration, - validate_url, -) -from settings import settings - -class AssetModel(Schema): - type = 'object' - properties = { - 'asset_id': {'type': 'string'}, - 'name': {'type': 'string'}, - 'uri': {'type': 'string'}, - 'start_date': { - 'type': 'string', - 'format': 'date-time' - }, - 'end_date': { - 'type': 'string', - 'format': 'date-time' - }, - 'duration': {'type': 'string'}, - 'mimetype': {'type': 'string'}, - 'is_active': { - 'type': 'integer', - 'format': 'int64', - }, - 'is_enabled': { - 'type': 'integer', - 'format': 'int64', - }, - 'is_processing': { - 'type': 'integer', - 'format': 'int64', - }, - 'nocache': { - 'type': 'integer', - 'format': 'int64', - }, - 'play_order': { - 'type': 'integer', - 'format': 'int64', - }, - 'skip_asset_check': { - 'type': 'integer', - 'format': 'int64', - } - } - - -class AssetRequestModel(Schema): - type = 'object' - properties = { - 'name': {'type': 'string'}, - 'uri': {'type': 'string'}, - 'start_date': { - 'type': 'string', - 'format': 'date-time' - }, - 'end_date': { - 'type': 'string', - 'format': 'date-time' - }, - 'duration': {'type': 'string'}, - 'mimetype': {'type': 'string'}, - 'is_enabled': { - 'type': 'integer', - 'format': 'int64', - }, - 'nocache': { - 'type': 'integer', - 'format': 'int64', - }, - 'play_order': { - 'type': 'integer', - 'format': 'int64', - }, - 'skip_asset_check': { - 'type': 'integer', - 'format': 'int64', - } - } - required = [ - 'name', 'uri', 'mimetype', 'is_enabled', 'start_date', 'end_date'] - - -class AssetContentModel(Schema): - type = 'object' - properties = { - 'type': {'type': 'string'}, - 'url': {'type': 'string'}, - 'filename': {'type': 'string'}, - 'mimetype': {'type': 'string'}, - 'content': { - 'type': 'string', - 'format': 'byte' - }, - } - required = ['type', 'filename'] - - -class AssetPropertiesModel(Schema): - type = 'object' - properties = { - 'name': {'type': 'string'}, - 'start_date': { - 'type': 'string', - 'format': 'date-time' - }, - 'end_date': { - 'type': 'string', - 'format': 'date-time' - }, - 'duration': {'type': 'string'}, - 'is_active': { - 'type': 'integer', - 'format': 'int64', - }, - 'is_enabled': { - 'type': 'integer', - 'format': 'int64', - }, - 'nocache': { - 'type': 'integer', - 'format': 'int64', - }, - 'play_order': { - 'type': 'integer', - 'format': 'int64', - }, - 'skip_asset_check': { - 'type': 'integer', - 'format': 'int64', - } - } - - -def api_error(error): - return make_response(json_dump({'error': error}), 500) - - -def prepare_asset(request, unique_name=False): - req = Request(request.environ) - data = None - - # For backward compatibility - try: - data = json.loads(req.data) - except ValueError: - data = json.loads(req.form['model']) - except TypeError: - data = json.loads(req.form['model']) - - def get(key): - val = data.get(key, '') - if isinstance(val, str): - return val.strip() - elif isinstance(val, basestring): - return val.strip().decode('utf-8') - else: - return val - - if not all([get('name'), get('uri'), get('mimetype')]): - raise Exception( - "Not enough information provided. " - "Please specify 'name', 'uri', and 'mimetype'." - ) - - name = escape(get('name')) - if unique_name: - with db.conn(settings['database']) as conn: - names = assets_helper.get_names_of_assets(conn) - if name in names: - i = 1 - while True: - new_name = '%s-%i' % (name, i) - if new_name in names: - i += 1 - else: - name = new_name - break - - asset = { - 'name': name, - 'mimetype': get('mimetype'), - 'asset_id': get('asset_id'), - 'is_enabled': get('is_enabled'), - 'is_processing': get('is_processing'), - 'nocache': get('nocache'), - } - - uri = escape(get('uri')) - - if uri.startswith('/'): - if not path.isfile(uri): - raise Exception("Invalid file path. Failed to add asset.") - else: - if not validate_url(uri): - raise Exception("Invalid URL. Failed to add asset.") - - if not asset['asset_id']: - asset['asset_id'] = uuid.uuid4().hex - if uri.startswith('/'): - rename(uri, path.join(settings['assetdir'], asset['asset_id'])) - uri = path.join(settings['assetdir'], asset['asset_id']) - - if 'youtube_asset' in asset['mimetype']: - uri, asset['name'], asset['duration'] = download_video_from_youtube( - uri, asset['asset_id']) - asset['mimetype'] = 'video' - asset['is_processing'] = 1 - - asset['uri'] = uri - - if "video" in asset['mimetype']: - if get('duration') == 'N/A' or int(get('duration')) == 0: - asset['duration'] = int(get_video_duration(uri).total_seconds()) - else: - # Crashes if it's not an int. We want that. - asset['duration'] = int(get('duration')) - - asset['skip_asset_check'] = ( - int(get('skip_asset_check')) - if int(get('skip_asset_check')) - else 0 - ) - - # parse date via python-dateutil and remove timezone info - if get('start_date'): - asset['start_date'] = date_parser.parse( - get('start_date')).replace(tzinfo=None) - else: - asset['start_date'] = "" - - if get('end_date'): - asset['end_date'] = date_parser.parse( - get('end_date')).replace(tzinfo=None) - else: - asset['end_date'] = "" - - return asset - - -def prepare_asset_v1_2(request_environ, asset_id=None, unique_name=False): - data = json.loads(request_environ.data) - - def get(key): - val = data.get(key, '') - if isinstance(val, str): - return val.strip() - elif isinstance(val, basestring): - return val.strip().decode('utf-8') - else: - return val - - if not all([get('name'), - get('uri'), - get('mimetype'), - str(get('is_enabled')), - get('start_date'), - get('end_date')]): - raise Exception( - "Not enough information provided. Please specify 'name', " - "'uri', 'mimetype', 'is_enabled', 'start_date' and 'end_date'." - ) - - ampfix = "&" - name = escape(get('name').replace(ampfix, '&')) - if unique_name: - with db.conn(settings['database']) as conn: - names = assets_helper.get_names_of_assets(conn) - if name in names: - i = 1 - while True: - new_name = '%s-%i' % (name, i) - if new_name in names: - i += 1 - else: - name = new_name - break - - asset = { - 'name': name, - 'mimetype': get('mimetype'), - 'is_enabled': get('is_enabled'), - 'nocache': get('nocache') - } - - uri = ( - (get('uri')) - .replace(ampfix, '&') - .replace('<', '<') - .replace('>', '>') - .replace('\'', ''') - .replace('\"', '"') - ) - - if uri.startswith('/'): - if not path.isfile(uri): - raise Exception("Invalid file path. Failed to add asset.") - else: - if not validate_url(uri): - raise Exception("Invalid URL. Failed to add asset.") - - if not asset_id: - asset['asset_id'] = uuid.uuid4().hex - - if not asset_id and uri.startswith('/'): - new_uri = "{}{}".format( - path.join(settings['assetdir'], asset['asset_id']), get('ext')) - rename(uri, new_uri) - uri = new_uri - - if 'youtube_asset' in asset['mimetype']: - uri, asset['name'], asset['duration'] = download_video_from_youtube( - uri, asset['asset_id']) - asset['mimetype'] = 'video' - asset['is_processing'] = 1 - - asset['uri'] = uri - - if "video" in asset['mimetype']: - if get('duration') == 'N/A' or int(get('duration')) == 0: - asset['duration'] = int(get_video_duration(uri).total_seconds()) - elif get('duration'): - # Crashes if it's not an int. We want that. - asset['duration'] = int(get('duration')) - else: - asset['duration'] = 10 - - asset['play_order'] = get('play_order') if get('play_order') else 0 - - asset['skip_asset_check'] = ( - int(get('skip_asset_check')) - if int(get('skip_asset_check')) - else 0 - ) - - # parse date via python-dateutil and remove timezone info - asset['start_date'] = date_parser.parse( - get('start_date')).replace(tzinfo=None) - asset['end_date'] = date_parser.parse(get('end_date')).replace(tzinfo=None) - - return asset +class AssetCreationException(Exception): + def __init__(self, errors): + self.errors = errors def update_asset(asset, data): for key, value in list(data.items()): if ( - key in ['asset_id', 'is_processing', 'mimetype', 'uri'] or - key not in asset + key in ['asset_id', 'is_processing', 'mimetype', 'uri'] + or key not in asset ): continue if key in ['start_date', 'end_date']: value = date_parser.parse(value).replace(tzinfo=None) - if key in [ - 'play_order', - 'skip_asset_check', - 'is_enabled', - 'is_active', - 'nocache', - ]: + if ( + key in [ + 'play_order', + 'skip_asset_check', + 'is_enabled', + 'is_active', + 'nocache', + ] + ): value = int(value) if key == 'duration': @@ -388,14 +43,42 @@ def update_asset(asset, data): asset.update({key: value}) -# Used as a decorator to catch exceptions and return a JSON response. -def api_response(view): - @wraps(view) - def api_view(*args, **kwargs): - try: - return view(*args, **kwargs) - except Exception as e: - traceback.print_exc() - return api_error(str(e)) +def custom_exception_handler(exc, context): + exception_handler(exc, context) + + return Response( + {'error': str(exc)}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR + ) + + +def get_active_asset_ids(): + enabled_assets = Asset.objects.filter( + is_enabled=1, + start_date__isnull=False, + end_date__isnull=False, + ) + return [ + asset.asset_id + for asset in enabled_assets + if asset.is_active() + ] + + +def save_active_assets_ordering(active_asset_ids): + for i, asset_id in enumerate(active_asset_ids): + Asset.objects.filter(asset_id=asset_id).update(play_order=i) + + +def parse_request(request): + data = None + + # For backward compatibility + try: + data = json.loads(request.data) + except ValueError: + data = json.loads(request.data['model']) + except TypeError: + data = json.loads(request.data['model']) - return api_view + return data diff --git a/api/migrations/__init__.py b/api/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/api/serializers.py b/api/serializers.py new file mode 100644 index 000000000..b2c806c68 --- /dev/null +++ b/api/serializers.py @@ -0,0 +1,288 @@ +import uuid +from os import path, rename +from django.utils import timezone +from rest_framework.serializers import ( + BooleanField, + CharField, + DateTimeField, + IntegerField, + ModelSerializer, + Serializer, +) +from anthias_app.models import Asset +from lib.utils import ( + download_video_from_youtube, + get_video_duration, + validate_url, + url_fails, +) +from settings import settings + + +class AssetRequestSerializer(Serializer): + name = CharField() + uri = CharField() + start_date = DateTimeField(default_timezone=timezone.utc) + end_date = DateTimeField(default_timezone=timezone.utc) + duration = IntegerField() + mimetype = CharField() + is_enabled = BooleanField() + nocache = BooleanField() + play_order = IntegerField() + skip_asset_check = BooleanField() + + +class AssetSerializer(ModelSerializer): + class Meta: + model = Asset + fields = [ + 'asset_id', + 'name', + 'uri', + 'start_date', + 'end_date', + 'duration', + 'mimetype', + 'is_enabled', + 'nocache', + 'play_order', + 'skip_asset_check', + 'is_active', + 'is_processing', + ] + + +class CreateAssetSerializerV1_1(Serializer): + def __init__(self, *args, unique_name=False, **kwargs): + self.unique_name = unique_name + super().__init__(*args, **kwargs) + + name = CharField() + uri = CharField() + start_date = DateTimeField(default_timezone=timezone.utc, required=False) + end_date = DateTimeField(default_timezone=timezone.utc, required=False) + duration = IntegerField(required=False) + mimetype = CharField() + is_enabled = BooleanField(required=False) + is_processing = BooleanField(required=False) + nocache = BooleanField(required=False) + play_order = IntegerField(required=False) + skip_asset_check = BooleanField(required=False) + + def validate(self, data): + name = data['name'] + + if self.unique_name: + names = Asset.objects.values_list('name', flat=True) + + if name in names: + i = 1 + while True: + new_name = f'{name}-{i}' + if new_name in names: + i += 1 + else: + name = new_name + break + + asset = { + 'name': name, + 'mimetype': data.get('mimetype'), + 'asset_id': data.get('asset_id'), + 'is_enabled': data.get('is_enabled', False), + 'is_processing': data.get('is_processing', False), + 'nocache': data.get('nocache', False), + } + + uri = data.get('uri') + + if uri.startswith('/'): + if not path.isfile(uri): + raise Exception("Invalid file path. Failed to add asset.") + else: + if not validate_url(uri): + raise Exception("Invalid URL. Failed to add asset.") + + if not asset['asset_id']: + asset['asset_id'] = uuid.uuid4().hex + if uri.startswith('/'): + rename(uri, path.join(settings['assetdir'], asset['asset_id'])) + uri = path.join(settings['assetdir'], asset['asset_id']) + + if 'youtube_asset' in asset['mimetype']: + ( + uri, asset['name'], asset['duration'] + ) = download_video_from_youtube(uri, asset['asset_id']) + asset['mimetype'] = 'video' + asset['is_processing'] = 1 + + asset['uri'] = uri + + if "video" in asset['mimetype']: + if data.get('duration') == 0: + asset['duration'] = int( + get_video_duration(uri).total_seconds()) + else: + # Crashes if it's not an int. We want that. + asset['duration'] = data.get('duration') + + asset['skip_asset_check'] = data.get('skip_asset_check', False) + + if data.get('start_date'): + asset['start_date'] = data.get('start_date').replace(tzinfo=None) + else: + asset['start_date'] = "" + + if data.get('end_date'): + asset['end_date'] = data.get('end_date').replace(tzinfo=None) + else: + asset['end_date'] = "" + + if not asset['skip_asset_check'] and url_fails(asset['uri']): + raise Exception("Could not retrieve file. Check the asset URL.") + + return asset + + +class CreateAssetSerializerV1_2(Serializer): + def __init__(self, *args, unique_name=False, **kwargs): + self.unique_name = unique_name + super().__init__(*args, **kwargs) + + asset_id = CharField(read_only=True) + ext = CharField(write_only=True, required=False) + name = CharField() + uri = CharField() + start_date = DateTimeField(default_timezone=timezone.utc) + end_date = DateTimeField(default_timezone=timezone.utc) + duration = IntegerField() + mimetype = CharField() + is_enabled = BooleanField() + is_processing = BooleanField(required=False) + nocache = BooleanField(required=False) + play_order = IntegerField(required=False) + skip_asset_check = BooleanField(required=False) + + def prepare_asset(self, data, asset_id=None): + ampersand_fix = '&' + name = data['name'].replace(ampersand_fix, '&') + + if self.unique_name: + names = Asset.objects.values_list('name', flat=True) + if name in names: + i = 1 + while True: + new_name = f'{name}-{i}' + if new_name in names: + i += 1 + else: + name = new_name + break + + asset = { + 'name': name, + 'mimetype': data.get('mimetype'), + 'is_enabled': data.get('is_enabled', False), + 'nocache': data.get('nocache', False), + } + + uri = ( + data['uri'] + .replace(ampersand_fix, '&') + .replace('<', '<') + .replace('>', '>') + .replace('\'', ''') + .replace('\"', '"') + ) + + if uri.startswith('/'): + if not path.isfile(uri): + raise Exception("Invalid file path. Failed to add asset.") + else: + if not validate_url(uri): + raise Exception("Invalid URL. Failed to add asset.") + + if not asset_id: + asset['asset_id'] = uuid.uuid4().hex + + if not asset_id and uri.startswith('/'): + path_name = path.join(settings['assetdir'], asset['asset_id']) + ext_name = data.get('ext', '') + new_uri = f'{path_name}{ext_name}' + rename(uri, new_uri) + uri = new_uri + + if 'youtube_asset' in asset['mimetype']: + ( + uri, asset['name'], asset['duration'] + ) = download_video_from_youtube(uri, asset['asset_id']) + asset['mimetype'] = 'video' + asset['is_processing'] = True + + asset['uri'] = uri + + if "video" in asset['mimetype']: + if data.get('duration') == 0: + asset['duration'] = int( + get_video_duration(uri).total_seconds()) + elif data.get('duration'): + # Crashes if it's not an int. We want that. + asset['duration'] = data.get('duration') + else: + asset['duration'] = 10 + + asset['play_order'] = ( + data.get('play_order') if data.get('play_order') else 0 + ) + + asset['skip_asset_check'] = ( + int(data.get('skip_asset_check')) + if int(data.get('skip_asset_check')) + else 0 + ) + + asset['start_date'] = data.get('start_date').replace(tzinfo=None) + asset['end_date'] = data.get('end_date').replace(tzinfo=None) + + if not asset['skip_asset_check'] and url_fails(asset['uri']): + raise Exception("Could not retrieve file. Check the asset URL.") + + return asset + + def validate(self, data): + return self.prepare_asset(data) + + +class UpdateAssetSerializer(Serializer): + name = CharField() + start_date = DateTimeField(default_timezone=timezone.utc) + end_date = DateTimeField(default_timezone=timezone.utc) + duration = IntegerField() + is_enabled = BooleanField() + is_processing = BooleanField(required=False) + nocache = BooleanField(required=False) + play_order = IntegerField(required=False) + skip_asset_check = BooleanField(required=False) + + def update(self, instance, validated_data): + instance.name = validated_data.get('name', instance.name) + instance.start_date = validated_data.get( + 'start_date', instance.start_date) + instance.end_date = validated_data.get('end_date', instance.end_date) + instance.is_enabled = validated_data.get( + 'is_enabled', instance.is_enabled) + instance.is_processing = validated_data.get( + 'is_processing', instance.is_processing) + instance.nocache = validated_data.get('nocache', instance.nocache) + instance.play_order = validated_data.get( + 'play_order', instance.play_order) + instance.skip_asset_check = validated_data.get( + 'skip_asset_check', instance.skip_asset_check) + + if 'video' not in instance.mimetype: + instance.duration = validated_data.get( + 'duration', instance.duration) + + instance.save() + + return instance diff --git a/api/tests.py b/api/tests.py new file mode 100644 index 000000000..e554255ac --- /dev/null +++ b/api/tests.py @@ -0,0 +1,333 @@ +import json + +from django.conf import settings as django_settings +from django.test import TestCase +from django.urls import reverse +from inspect import cleandoc +from os import path +from pathlib import Path +from rest_framework.test import APIClient +from rest_framework import status +from settings import settings as anthias_settings +from unittest import mock +from unittest_parametrize import parametrize, ParametrizedTestCase + +from anthias_app.models import Asset + + +ASSET_LIST_V1_1_URL = reverse('api:asset_list_v1_1') +ASSET_CREATION_DATA = { + 'name': 'Anthias', + 'uri': 'https://anthias.screenly.io', + 'start_date': '2019-08-24T14:15:22Z', + 'end_date': '2029-08-24T14:15:22Z', + 'duration': 20, + 'mimetype': 'webpage', + 'is_enabled': 0, + 'nocache': 0, + 'play_order': 0, + 'skip_asset_check': 0 +} + +parametrize_version = parametrize( + 'version', + [('v1',), ('v1_1',), ('v1_2',)], +) + + +class CRUDAssetEndpointsTest(TestCase, ParametrizedTestCase): + def setUp(self): + self.client = APIClient() + + def get_assets(self, version): + asset_list_url = reverse(f'api:asset_list_{version}') + response = self.client.get(asset_list_url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + return response.data + + def get_request_data(self, data, version): + if version in ['v1', 'v1_1']: + return { + 'model': json.dumps(data) + } + else: + return data + + def create_asset(self, data, version): + asset_list_url = reverse(f'api:asset_list_{version}') + return self.client.post( + asset_list_url, + data=self.get_request_data(data, version) + ).data + + def update_asset(self, asset_id, data): + return self.client.put( + reverse('api:asset_detail_v1_1', args=[asset_id]), + data=data + ).data + + def get_asset(self, asset_id): + url = reverse('api:asset_detail_v1_1', args=[asset_id]) + return self.client.get(url).data + + def delete_asset(self, asset_id): + url = reverse('api:asset_detail_v1_1', args=[asset_id]) + return self.client.delete(url) + + @parametrize_version + def test_get_assets_when_first_time_setup_should_initially_return_empty(self, version): # noqa: E501 + asset_list_url = reverse(f'api:asset_list_{version}') + response = self.client.get(asset_list_url) + assets = response.data + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(len(assets), 0) + + @parametrize_version + def test_create_asset_should_return_201(self, version): + asset_list_url = reverse(f'api:asset_list_{version}') + response = self.client.post( + asset_list_url, + data=self.get_request_data(ASSET_CREATION_DATA, version) + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + self.assertEqual(response.data['name'], 'Anthias') + self.assertEqual(response.data['uri'], 'https://anthias.screenly.io') + self.assertEqual(response.data['is_enabled'], 0) + self.assertEqual(response.data['nocache'], 0) + self.assertEqual(response.data['play_order'], 0) + self.assertEqual(response.data['skip_asset_check'], 0) + + @parametrize_version + def test_get_assets_after_create_should_return_1_asset(self, version): + self.create_asset(ASSET_CREATION_DATA, version) + + assets = self.get_assets(version) + self.assertEqual(len(assets), 1) + + @parametrize_version + def test_get_asset_by_id_should_return_asset(self, version): + expected_asset = self.create_asset(ASSET_CREATION_DATA, version) + asset_id = expected_asset['asset_id'] + + actual_asset = self.get_asset(asset_id) + + self.assertEqual(expected_asset, actual_asset) + + @parametrize_version + def test_update_asset_should_return_updated_asset(self, version): + expected_asset = self.create_asset(ASSET_CREATION_DATA, version) + asset_id = expected_asset['asset_id'] + updated_asset = self.update_asset( + asset_id, + data={ + 'model': cleandoc( + ''' + { + "name": "Anthias", + "uri": "https://anthias.screenly.io", + "start_date": "2019-08-24T14:15:22Z", + "end_date": "2029-08-24T14:15:22Z", + "duration": "15", + "mimetype": "webpage", + "is_enabled": 1, + "nocache": 0, + "play_order": 0, + "skip_asset_check": 0 + } + ''' + ) + } + ) + + self.assertEqual(updated_asset['name'], 'Anthias') + self.assertEqual(updated_asset['uri'], 'https://anthias.screenly.io') + self.assertEqual(updated_asset['duration'], 15) + self.assertEqual(updated_asset['is_enabled'], 1) + self.assertEqual(updated_asset['play_order'], 0) + + @parametrize_version + def test_delete_asset_should_return_204(self, version): + asset = self.create_asset(ASSET_CREATION_DATA, version) + asset_id = asset['asset_id'] + + response = self.delete_asset(asset_id) + assets = self.client.get(ASSET_LIST_V1_1_URL).data + + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + self.assertEqual(len(assets), 0) + + +class V1EndpointsTest(TestCase, ParametrizedTestCase): + def setUp(self): + self.client = APIClient() + + def tearDown(self): + self.remove_all_asset_files() + + def remove_all_asset_files(self): + asset_directory_path = Path(anthias_settings['assetdir']) + for file in asset_directory_path.iterdir(): + file.unlink() + + def get_asset_content_url(self, asset_id): + return reverse('api:asset_content_v1', args=[asset_id]) + + def test_asset_content(self): + asset = Asset.objects.create(**ASSET_CREATION_DATA) + asset_id = asset.asset_id + + response = self.client.get(self.get_asset_content_url(asset_id)) + data = response.data + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(data['type'], 'url') + self.assertEqual(data['url'], 'https://anthias.screenly.io') + + def test_file_asset(self): + project_base_path = django_settings.BASE_DIR + image_path = path.join( + project_base_path, + 'static/img/standby.png', + ) + + response = self.client.post( + reverse('api:file_asset_v1'), + data={ + 'file_upload': open(image_path, 'rb'), + }, + ) + data = response.data + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertTrue(path.exists(data['uri'])) + self.assertEqual(data['ext'], '.png') + + def test_playlist_order(self): + playlist_order_url = reverse('api:playlist_order_v1') + + for asset_name in ['Asset #1', 'Asset #2', 'Asset #3']: + Asset.objects.create(**{ + **ASSET_CREATION_DATA, + 'name': asset_name, + }) + + self.assertTrue( + all([ + asset.play_order == 0 + for asset in Asset.objects.all() + ]) + ) + + asset_1, asset_2, asset_3 = Asset.objects.all() + asset_ids = [asset_1.asset_id, asset_2.asset_id, asset_3.asset_id] + + response = self.client.post( + playlist_order_url, + data={'ids': ','.join(asset_ids)} + ) + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + + for asset in [asset_1, asset_2, asset_3]: + asset.refresh_from_db() + + self.assertEqual(asset_1.play_order, 0) + self.assertEqual(asset_2.play_order, 1) + self.assertEqual(asset_3.play_order, 2) + + @parametrize( + 'command', + [ + ('next',), + ('previous',), + ('asset&6ee2394e760643748b9353f06f405424',), + ], + ) + @mock.patch('api.views.v1.ZmqPublisher.send_to_viewer', return_value=None) + def test_assets_control(self, send_to_viewer_mock, command): + assets_control_url = reverse('api:assets_control_v1', args=[command]) + response = self.client.get(assets_control_url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(send_to_viewer_mock.call_count, 1) + self.assertEqual(send_to_viewer_mock.call_args[0][0], command) + self.assertEqual(response.data, 'Asset switched') + + @mock.patch( + 'api.views.v1.is_up_to_date', + return_value=False + ) + @mock.patch( + 'lib.diagnostics.get_load_avg', + return_value={'15 min': 0.11} + ) + @mock.patch('api.views.v1.size', return_value='15G') + @mock.patch('api.views.v1.statvfs', mock.MagicMock()) + def test_device_info( + self, + size_mock, + get_load_avg_mock, + is_up_to_date_mock + ): + is_up_to_date_mock.return_value = False + info_url = reverse('api:info_v1') + response = self.client.get(info_url) + data = response.data + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(size_mock.call_count, 1) + self.assertEqual(get_load_avg_mock.call_count, 1) + self.assertEqual(is_up_to_date_mock.call_count, 1) + self.assertEqual(data['viewlog'], 'Not yet implemented') + + @mock.patch( + 'api.views.v1.reboot_anthias.apply_async', + side_effect=(lambda: None) + ) + def test_reboot(self, reboot_anthias_mock): + reboot_url = reverse('api:reboot_v1') + response = self.client.post(reboot_url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(reboot_anthias_mock.call_count, 1) + + @mock.patch( + 'api.views.v1.shutdown_anthias.apply_async', + side_effect=(lambda: None) + ) + def test_shutdown(self, shutdown_anthias_mock): + shutdown_url = reverse('api:shutdown_v1') + response = self.client.post(shutdown_url) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(shutdown_anthias_mock.call_count, 1) + + @mock.patch('api.views.v1.ZmqPublisher.send_to_viewer', return_value=None) + def test_viewer_current_asset(self, send_to_viewer_mock): + asset = Asset.objects.create(**{ + **ASSET_CREATION_DATA, + 'is_enabled': 1, + }) + asset_id = asset.asset_id + + with ( + mock.patch( + 'api.views.v1.ZmqCollector.recv_json', + side_effect=(lambda _: { + 'current_asset_id': asset_id + }) + ) + ): + viewer_current_asset_url = reverse('api:viewer_current_asset_v1') + response = self.client.get(viewer_current_asset_url) + data = response.data + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(send_to_viewer_mock.call_count, 1) + + self.assertEqual(data['asset_id'], asset_id) + self.assertEqual(data['is_active'], 1) diff --git a/api/urls.py b/api/urls.py new file mode 100644 index 000000000..2f7e483d7 --- /dev/null +++ b/api/urls.py @@ -0,0 +1,77 @@ +from django.urls import path +from .views.v1 import ( + AssetViewV1, + AssetListViewV1, + AssetContentView, + FileAssetView, + PlaylistOrderView, + BackupView, + RecoverView, + AssetsControlView, + InfoView, + RebootView, + ShutdownView, + ViewerCurrentAssetView +) +from .views.v1_1 import ( + AssetListViewV1_1, + AssetViewV1_1 +) +from .views.v1_2 import ( + AssetListViewV1_2, + AssetViewV1_2 +) + +app_name = 'api' + +urlpatterns = [ + # v1 endpoints + path('v1/assets', AssetListViewV1.as_view(), name='asset_list_v1'), + path( + 'v1/assets/order', + PlaylistOrderView.as_view(), + name='playlist_order_v1', + ), + path( + 'v1/assets/control/', + AssetsControlView.as_view(), + name='assets_control_v1', + ), + path( + 'v1/assets/', + AssetViewV1.as_view(), + name='asset_detail_v1', + ), + path( + 'v1/assets//content', + AssetContentView.as_view(), + name='asset_content_v1', + ), + path('v1/file_asset', FileAssetView.as_view(), name='file_asset_v1'), + path('v1/backup', BackupView.as_view(), name='backup_v1'), + path('v1/recover', RecoverView.as_view(), name='recover_v1'), + path('v1/info', InfoView.as_view(), name='info_v1'), + path('v1/reboot', RebootView.as_view(), name='reboot_v1'), + path('v1/shutdown', ShutdownView.as_view(), name='shutdown_v1'), + path( + 'v1/viewer_current_asset', + ViewerCurrentAssetView.as_view(), + name='viewer_current_asset_v1', + ), + + # v1.1 endpoints + path('v1.1/assets', AssetListViewV1_1.as_view(), name='asset_list_v1_1'), + path( + 'v1.1/assets/', + AssetViewV1_1.as_view(), + name='asset_detail_v1_1', + ), + + # v1.2 endpoints + path('v1.2/assets', AssetListViewV1_2.as_view(), name='asset_list_v1_2'), + path( + 'v1.2/assets/', + AssetViewV1_2.as_view(), + name='asset_detail_v1_2', + ) +] diff --git a/api/views/v1.py b/api/views/v1.py index aa3efa5ae..b7327b736 100644 --- a/api/views/v1.py +++ b/api/views/v1.py @@ -1,223 +1,252 @@ import uuid -from base64 import b64encode -from flask import request -from flask_restful_swagger_2 import Resource, swagger -from mimetypes import guess_type, guess_extension -from os import path, remove, statvfs -from werkzeug.wrappers import Request - +from inspect import cleandoc +from rest_framework import serializers, status +from rest_framework.response import Response +from rest_framework.views import APIView +from api.serializers import ( + AssetSerializer, + CreateAssetSerializerV1_1, + UpdateAssetSerializer, +) from api.helpers import ( - AssetModel, - AssetContentModel, - api_response, - prepare_asset, + AssetCreationException, + parse_request, + save_active_assets_ordering, +) +from base64 import b64encode +from drf_spectacular.types import OpenApiTypes +from drf_spectacular.utils import ( + extend_schema, + inline_serializer, + OpenApiExample, + OpenApiParameter, + OpenApiRequest, ) -from celery_tasks import shutdown_anthias, reboot_anthias from hurry.filesize import size from lib import ( - db, - diagnostics, - assets_helper, backup_helper, + diagnostics ) from lib.auth import authorized from lib.github import is_up_to_date -from lib.utils import connect_to_redis, url_fails -from settings import ( - settings, - ZmqCollector, - ZmqPublisher, -) +from lib.utils import connect_to_redis +from mimetypes import guess_type, guess_extension +from os import path, remove, statvfs +from anthias_app.models import Asset +from celery_tasks import reboot_anthias, shutdown_anthias +from settings import settings, ZmqCollector, ZmqPublisher r = connect_to_redis() +MODEL_STRING_EXAMPLE = """ +Yes, that is just a string of JSON not JSON itself it will be parsed on the +other end. It's recommended to set `Content-Type` to +`application/x-www-form-urlencoded` and send the model as a string. + +``` +model: "{ + "name": "Website", + "mimetype": "webpage", + "uri": "http://example.com", + "is_active": 0, + "start_date": "2017-02-02T00:33:00.000Z", + "end_date": "2017-03-01T00:33:00.000Z", + "duration": "10", + "is_enabled": 0, + "is_processing": 0, + "nocache": 0, + "play_order": 0, + "skip_asset_check": 0 +}" +``` +""" + +V1_ASSET_REQUEST = OpenApiRequest( + inline_serializer( + name='ModelString', + fields={ + 'model': serializers.CharField( + help_text=MODEL_STRING_EXAMPLE, + ), + }, + ), + examples=[ + OpenApiExample( + name='Example 1', + value={'model': MODEL_STRING_EXAMPLE} + ), + ], +) -class Assets(Resource): - method_decorators = [authorized] - @swagger.doc({ - 'responses': { - '200': { - 'description': 'List of assets', - 'schema': { - 'type': 'array', - 'items': AssetModel +class AssetViewV1(APIView): + serializer_class = AssetSerializer - } - } - } - }) - def get(self): - with db.conn(settings['database']) as conn: - assets = assets_helper.read(conn) - return assets - - @api_response - @swagger.doc({ - 'parameters': [ - { - 'name': 'model', - 'in': 'formData', - 'type': 'string', - 'description': - ''' - Yes, that is just a string of JSON not JSON itself it will - be parsed on the other end. - - Content-Type: application/x-www-form-urlencoded - model: "{ - "name": "Website", - "mimetype": "webpage", - "uri": "http://example.com", - "is_active": 0, - "start_date": "2017-02-02T00:33:00.000Z", - "end_date": "2017-03-01T00:33:00.000Z", - "duration": "10", - "is_enabled": 0, - "is_processing": 0, - "nocache": 0, - "play_order": 0, - "skip_asset_check": 0 - }" - ''' - } - ], - 'responses': { - '201': { - 'description': 'Asset created', - 'schema': AssetModel - } + @extend_schema(summary='Get asset') + @authorized + def get(self, request, asset_id, format=None): + asset = Asset.objects.get(asset_id=asset_id) + return Response(AssetSerializer(asset).data) + + @extend_schema( + summary='Update asset', + request=V1_ASSET_REQUEST, + responses={ + 201: AssetSerializer } - }) - def post(self): - asset = prepare_asset(request) - if url_fails(asset['uri']): - raise Exception("Could not retrieve file. Check the asset URL.") - with db.conn(settings['database']) as conn: - return assets_helper.create(conn, asset), 201 - - -class Asset(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset' - } - ], - 'responses': { - '200': { - 'description': 'Asset', - 'schema': AssetModel + ) + @authorized + def put(self, request, asset_id, format=None): + asset = Asset.objects.get(asset_id=asset_id) + + data = parse_request(request) + serializer = UpdateAssetSerializer(asset, data=data, partial=False) + + if serializer.is_valid(): + serializer.save() + else: + return Response( + serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + asset.refresh_from_db() + return Response(AssetSerializer(asset).data) + + @extend_schema(summary='Delete asset') + @authorized + def delete(self, request, asset_id, format=None): + asset = Asset.objects.get(asset_id=asset_id) + + try: + if asset.uri.startswith(settings['assetdir']): + remove(asset.uri) + except OSError: + pass + + asset.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class AssetContentView(APIView): + @extend_schema( + summary='Get asset content', + description=cleandoc(""" + The content of the asset. + `type` can either be `file` or `url`. + + In case of a file, the fields `mimetype`, `filename`, and `content` + will be present. In case of a URL, the field `url` will be present. + """), + responses={ + 200: { + 'type': 'object', + 'properties': { + 'type': {'type': 'string'}, + 'url': {'type': 'string'}, + 'filename': {'type': 'string'}, + 'mimetype': {'type': 'string'}, + 'content': {'type': 'string'}, + } } } - }) - def get(self, asset_id): - with db.conn(settings['database']) as conn: - return assets_helper.read(conn, asset_id) - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset' - }, - { - 'name': 'model', - 'in': 'formData', - 'type': 'string', - 'description': - ''' - Content-Type: application/x-www-form-urlencoded - model: "{ - "asset_id": "793406aa1fd34b85aa82614004c0e63a", - "name": "Website", - "mimetype": "webpage", - "uri": "http://example.com", - "is_active": 0, - "start_date": "2017-02-02T00:33:00.000Z", - "end_date": "2017-03-01T00:33:00.000Z", - "duration": "10", - "is_enabled": 0, - "is_processing": 0, - "nocache": 0, - "play_order": 0, - "skip_asset_check": 0 - }" - ''' + ) + @authorized + def get(self, request, asset_id, format=None): + asset = Asset.objects.get(asset_id=asset_id) + + if path.isfile(asset.uri): + filename = asset.name + + with open(asset.uri, 'rb') as f: + content = f.read() + + mimetype = guess_type(filename)[0] + if not mimetype: + mimetype = 'application/octet-stream' + + result = { + 'type': 'file', + 'filename': filename, + 'content': b64encode(content).decode(), + 'mimetype': mimetype } - ], - 'responses': { - '200': { - 'description': 'Asset updated', - 'schema': AssetModel + else: + result = { + 'type': 'url', + 'url': asset.uri } + + return Response(result) + + +class AssetListViewV1(APIView): + serializer_class = AssetSerializer + + @extend_schema( + summary='List assets', + responses={ + 200: AssetSerializer(many=True) } - }) - def put(self, asset_id): - with db.conn(settings['database']) as conn: - return assets_helper.update(conn, asset_id, prepare_asset(request)) - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset' - }, - ], - 'responses': { - '204': { - 'description': 'Deleted' - } + ) + @authorized + def get(self, request, format=None): + queryset = Asset.objects.all() + serializer = AssetSerializer(queryset, many=True) + return Response(serializer.data) + + @extend_schema( + summary='Create asset', + request=V1_ASSET_REQUEST, + responses={ + 201: AssetSerializer } - }) - def delete(self, asset_id): - with db.conn(settings['database']) as conn: - asset = assets_helper.read(conn, asset_id) - try: - if asset['uri'].startswith(settings['assetdir']): - remove(asset['uri']) - except OSError: - pass - assets_helper.delete(conn, asset_id) - return '', 204 # return an OK with no content - - -class FileAsset(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'parameters': [ - { - 'name': 'file_upload', - 'type': 'file', - 'in': 'formData', - 'description': 'File to be sent' + ) + @authorized + def post(self, request, format=None): + data = parse_request(request) + + try: + serializer = CreateAssetSerializerV1_1(data=data) + if not serializer.is_valid(): + raise AssetCreationException(serializer.errors) + except AssetCreationException as error: + return Response(error.errors, status=status.HTTP_400_BAD_REQUEST) + + asset = Asset.objects.create(**serializer.data) + + return Response( + AssetSerializer(asset).data, status=status.HTTP_201_CREATED) + + +class FileAssetView(APIView): + @extend_schema( + summary='Upload file asset', + request={ + 'multipart/form-data': { + 'type': 'object', + 'properties': { + 'file_upload': { + 'type': 'string', + 'format': 'binary' + } + } } - ], - 'responses': { - '200': { - 'description': 'File path', - 'schema': { - 'type': 'string' + }, + responses={ + 200: { + 'type': 'object', + 'properties': { + 'uri': {'type': 'string'}, + 'ext': {'type': 'string'} } } } - }) - def post(self): - req = Request(request.environ) - file_upload = req.files.get('file_upload') - filename = file_upload.filename + ) + @authorized + def post(self, request): + file_upload = request.data.get('file_upload') + filename = file_upload.name file_type = guess_type(filename)[0] if not file_type: @@ -228,7 +257,8 @@ def post(self): file_path = path.join( settings['assetdir'], - uuid.uuid5(uuid.NAMESPACE_URL, filename).hex) + ".tmp" + uuid.uuid5(uuid.NAMESPACE_URL, filename).hex, + ) + ".tmp" if 'Content-Range' in request.headers: range_str = request.headers['Content-Range'] @@ -237,242 +267,210 @@ def post(self): f.seek(start_bytes) f.write(file_upload.read()) else: - file_upload.save(file_path) - - return {'uri': file_path, 'ext': guess_extension(file_type)} - - -class PlaylistOrder(Resource): - method_decorators = [api_response, authorized] + with open(file_path, 'wb') as f: + f.write(file_upload.read()) - @swagger.doc({ - 'parameters': [ - { - 'name': 'ids', - 'in': 'formData', - 'type': 'string', - 'description': - ''' - Content-Type: application/x-www-form-urlencoded - ids: "793406aa1fd34b85aa82614004c0e63a,1c5cfa719d1f4a9abae16c983a18903b,9c41068f3b7e452baf4dc3f9b7906595" - comma separated ids - ''' # noqa: E501 - }, - ], - 'responses': { - '204': { - 'description': 'Sorted' + return Response({'uri': file_path, 'ext': guess_extension(file_type)}) + + +class PlaylistOrderView(APIView): + @extend_schema( + summary='Update playlist order', + request={ + 'application/x-www-form-urlencoded': { + 'type': 'object', + 'properties': { + 'ids': { + 'type': 'string', + 'description': cleandoc( + """ + Comma-separated list of asset IDs in the order + they should be played. For example: + + `793406aa1fd34b85aa82614004c0e63a,1c5cfa719d1f4a9abae16c983a18903b,9c41068f3b7e452baf4dc3f9b7906595` + """ + ) + } + }, } } - }) - def post(self): - with db.conn(settings['database']) as conn: - assets_helper.save_ordering( - conn, request.form.get('ids', '').split(',')) - - -class Backup(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'responses': { - '200': { - 'description': 'Backup filename', - 'schema': { - 'type': 'string' - } + ) + @authorized + def post(self, request): + asset_ids = request.data.get('ids', '').split(',') + save_active_assets_ordering(asset_ids) + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class BackupView(APIView): + @extend_schema( + summary='Create backup', + description=cleandoc(""" + Create a backup of the current Anthias instance, which + includes the following: + * current settings + * image and video assets + * asset metadata (e.g. name, duration, play order, status), + which is stored in a SQLite database + """), + responses={ + 201: { + 'type': 'string', + 'example': 'anthias-backup-2021-09-16T15-00-00.tar.gz', + 'description': 'Backup file name' } } - }) - def post(self): + ) + @authorized + def post(self, request): filename = backup_helper.create_backup(name=settings['player_name']) - return filename, 201 - - -class Recover(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'parameters': [ - { - 'name': 'backup_upload', - 'type': 'file', - 'in': 'formData' + return Response(filename, status=status.HTTP_201_CREATED) + + +class RecoverView(APIView): + @extend_schema( + summary='Recover from backup', + description=cleandoc(""" + Recover data from a backup file. The backup file must be + a `.tar.gz` file. + """), + request={ + 'multipart/form-data': { + 'type': 'object', + 'properties': { + 'backup_upload': { + 'type': 'string', + 'format': 'binary' + } + } } - ], - 'responses': { - '200': { - 'description': 'Recovery successful' + }, + responses={ + 200: { + 'type': 'string', + 'example': 'Recovery successful.', } - } - }) - def post(self): + }, + ) + @authorized + def post(self, request): publisher = ZmqPublisher.get_instance() - req = Request(request.environ) - file_upload = (req.files['backup_upload']) - filename = file_upload.filename + file_upload = (request.data.get('backup_upload')) + filename = file_upload.name if guess_type(filename)[0] != 'application/x-tar': raise Exception("Incorrect file extension.") try: publisher.send_to_viewer('stop') location = path.join("static", filename) - file_upload.save(location) + + with open(location, 'wb') as f: + f.write(file_upload.read()) + backup_helper.recover(location) - return "Recovery successful." + + return Response("Recovery successful.") finally: publisher.send_to_viewer('play') -class Reboot(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'responses': { - '200': { - 'description': 'Reboot system' +class AssetsControlView(APIView): + @extend_schema( + summary='Control asset playback', + description=cleandoc(""" + Use any of the following commands to control asset playback: + * `next` - Show the next asset + * `previous` - Show the previous asset + * `asset&{asset_id}` - Show the asset with the specified `asset_id` + """), + responses={ + 200: { + 'type': 'string', + 'example': 'Asset switched', } - } - }) - def post(self): - reboot_anthias.apply_async() - return '', 200 - - -class Shutdown(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'responses': { - '200': { - 'description': 'Shutdown system' + }, + parameters=[ + OpenApiParameter( + name='command', + location=OpenApiParameter.PATH, + type=OpenApiTypes.STR, + enum=['next', 'previous', 'asset&{asset_id}'], + ) + ] + ) + @authorized + def get(self, request, command): + publisher = ZmqPublisher.get_instance() + publisher.send_to_viewer(command) + return Response("Asset switched") + + +class InfoView(APIView): + @extend_schema( + summary='Get system information', + responses={ + 200: { + 'type': 'object', + 'properties': { + 'viewlog': {'type': 'string'}, + 'loadavg': {'type': 'number'}, + 'free_space': {'type': 'string'}, + 'display_power': {'type': 'string'}, + 'up_to_date': {'type': 'boolean'} + }, + 'example': { + 'viewlog': 'Not yet implemented', + 'loadavg': 0.1, + 'free_space': '10G', + 'display_power': 'on', + 'up_to_date': True + } } } - }) - def post(self): - shutdown_anthias.apply_async() - return '', 200 - - -class Info(Resource): - method_decorators = [api_response, authorized] + ) + @authorized + def get(self, request): + viewlog = "Not yet implemented" - def get(self): # Calculate disk space slash = statvfs("/") free_space = size(slash.f_bavail * slash.f_frsize) display_power = r.get('display_power') - return { + return Response({ + 'viewlog': viewlog, 'loadavg': diagnostics.get_load_avg()['15 min'], 'free_space': free_space, 'display_power': display_power, 'up_to_date': is_up_to_date() - } - - -class AssetsControl(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'parameters': [ - { - 'name': 'command', - 'type': 'string', - 'in': 'path', - 'description': - ''' - Control commands: - next - show next asset - previous - show previous asset - asset&asset_id - show asset with `asset_id` id - ''' - } - ], - 'responses': { - '200': { - 'description': 'Asset switched' - } - } - }) - def get(self, command): - publisher = ZmqPublisher.get_instance() - publisher.send_to_viewer(command) - return "Asset switched" - - -class AssetContent(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset' - } - ], - 'responses': { - '200': { - 'description': - ''' - The content of the asset. - - 'type' can either be 'file' or 'url'. - - In case of a file, the fields 'mimetype', 'filename', and - 'content' will be present. In case of a URL, the field - 'url' will be present. - ''', - 'schema': AssetContentModel - } - } - }) - def get(self, asset_id): - with db.conn(settings['database']) as conn: - asset = assets_helper.read(conn, asset_id) - - if isinstance(asset, list): - raise Exception('Invalid asset ID provided') - - if path.isfile(asset['uri']): - filename = asset['name'] + }) - with open(asset['uri'], 'rb') as f: - content = f.read() - mimetype = guess_type(filename)[0] - if not mimetype: - mimetype = 'application/octet-stream' - - result = { - 'type': 'file', - 'filename': filename, - 'content': b64encode(content).decode(), - 'mimetype': mimetype - } - else: - result = { - 'type': 'url', - 'url': asset['uri'] - } +class RebootView(APIView): + @extend_schema(summary='Reboot system') + @authorized + def post(self, request): + reboot_anthias.apply_async() + return Response(status=status.HTTP_200_OK) - return result +class ShutdownView(APIView): + @extend_schema(summary='Shut down system') + @authorized + def post(self, request): + shutdown_anthias.apply_async() + return Response(status=status.HTTP_200_OK) -class ViewerCurrentAsset(Resource): - method_decorators = [api_response, authorized] - @swagger.doc({ - 'responses': { - '200': { - 'description': 'Currently displayed asset in viewer', - 'schema': AssetModel - } - } - }) - def get(self): +class ViewerCurrentAssetView(APIView): + @extend_schema( + summary='Get current asset', + description='Get the current asset being displayed on the screen', + responses={200: AssetSerializer} + ) + @authorized + def get(self, request): collector = ZmqCollector.get_instance() publisher = ZmqPublisher.get_instance() @@ -482,7 +480,7 @@ def get(self): current_asset_id = collector_result.get('current_asset_id') if not current_asset_id: - return [] + return Response([]) - with db.conn(settings['database']) as conn: - return assets_helper.read(conn, current_asset_id) + queryset = Asset.objects.get(asset_id=current_asset_id) + return Response(AssetSerializer(queryset).data) diff --git a/api/views/v1_1.py b/api/views/v1_1.py index d0a37c01b..4a513877c 100644 --- a/api/views/v1_1.py +++ b/api/views/v1_1.py @@ -1,139 +1,103 @@ -from flask import request -from flask_restful_swagger_2 import Resource, swagger -from os import remove +from drf_spectacular.utils import extend_schema +from rest_framework import status +from rest_framework.response import Response +from rest_framework.views import APIView -from api.helpers import ( - AssetModel, - api_response, - prepare_asset, +from anthias_app.models import Asset +from api.helpers import AssetCreationException, parse_request +from api.serializers import ( + AssetSerializer, + CreateAssetSerializerV1_1, + UpdateAssetSerializer, ) -from lib import db, assets_helper +from api.views.v1 import V1_ASSET_REQUEST from lib.auth import authorized -from lib.utils import url_fails +from os import remove from settings import settings -class AssetsV1_1(Resource): - method_decorators = [authorized] - - @swagger.doc({ - 'responses': { - '200': { - 'description': 'List of assets', - 'schema': { - 'type': 'array', - 'items': AssetModel - - } - } - } - }) - def get(self): - with db.conn(settings['database']) as conn: - assets = assets_helper.read(conn) - return assets - - @api_response - @swagger.doc({ - 'parameters': [ - { - 'in': 'body', - 'name': 'model', - 'description': 'Adds a asset', - 'schema': AssetModel, - 'required': True - } - ], - 'responses': { - '201': { - 'description': 'Asset created', - 'schema': AssetModel - } +class AssetListViewV1_1(APIView): + @extend_schema( + summary='List assets', + responses={ + 200: AssetSerializer(many=True) } - }) - def post(self): - asset = prepare_asset(request, unique_name=True) - if url_fails(asset['uri']): - raise Exception("Could not retrieve file. Check the asset URL.") - with db.conn(settings['database']) as conn: - return assets_helper.create(conn, asset), 201 - - -class AssetV1_1(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset' - } - ], - 'responses': { - '200': { - 'description': 'Asset', - 'schema': AssetModel - } + ) + @authorized + def get(self, request): + queryset = Asset.objects.all() + serializer = AssetSerializer(queryset, many=True) + return Response(serializer.data) + + @extend_schema( + summary='Create asset', + request=V1_ASSET_REQUEST, + responses={ + 201: AssetSerializer } - }) - def get(self, asset_id): - with db.conn(settings['database']) as conn: - return assets_helper.read(conn, asset_id) - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset', - 'required': True - }, - { - 'in': 'body', - 'name': 'model', - 'description': 'Adds an asset', - 'schema': AssetModel, - 'required': True - } - ], - 'responses': { - '200': { - 'description': 'Asset updated', - 'schema': AssetModel - } + ) + @authorized + def post(self, request): + data = parse_request(request) + + try: + serializer = CreateAssetSerializerV1_1(data=data, unique_name=True) + if not serializer.is_valid(): + raise AssetCreationException(serializer.errors) + except AssetCreationException as error: + return Response(error.errors, status=status.HTTP_400_BAD_REQUEST) + + asset = Asset.objects.create(**serializer.data) + + return Response( + AssetSerializer(asset).data, status=status.HTTP_201_CREATED) + + +class AssetViewV1_1(APIView): + @extend_schema( + summary='Get asset', + responses={ + 200: AssetSerializer, } - }) - def put(self, asset_id): - with db.conn(settings['database']) as conn: - return assets_helper.update(conn, asset_id, prepare_asset(request)) - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset', - 'required': True - - }, - ], - 'responses': { - '204': { - 'description': 'Deleted' - } + ) + @authorized + def get(self, request, asset_id): + asset = Asset.objects.get(asset_id=asset_id) + return Response(AssetSerializer(asset).data) + + @extend_schema( + summary='Update asset', + request=V1_ASSET_REQUEST, + responses={ + 200: AssetSerializer } - }) - def delete(self, asset_id): - with db.conn(settings['database']) as conn: - asset = assets_helper.read(conn, asset_id) - try: - if asset['uri'].startswith(settings['assetdir']): - remove(asset['uri']) - except OSError: - pass - assets_helper.delete(conn, asset_id) - return '', 204 # return an OK with no content + ) + @authorized + def put(self, request, asset_id): + asset = Asset.objects.get(asset_id=asset_id) + + data = parse_request(request) + serializer = UpdateAssetSerializer(asset, data=data, partial=False) + + if serializer.is_valid(): + serializer.save() + else: + return Response( + serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + asset.refresh_from_db() + return Response(AssetSerializer(asset).data) + + @extend_schema(summary='Delete asset') + @authorized + def delete(self, request, asset_id): + asset = Asset.objects.get(asset_id=asset_id) + + try: + if asset.uri.startswith(settings['assetdir']): + remove(asset.uri) + except OSError: + pass + + asset.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/api/views/v1_2.py b/api/views/v1_2.py index aeebfa809..ec8dc6f3c 100644 --- a/api/views/v1_2.py +++ b/api/views/v1_2.py @@ -1,220 +1,143 @@ -import json - -from flask import request -from flask_restful_swagger_2 import Resource, swagger -from os import remove -from werkzeug.wrappers import Request +from drf_spectacular.utils import extend_schema +from rest_framework import status +from rest_framework.response import Response +from rest_framework.views import APIView +from anthias_app.models import Asset from api.helpers import ( - AssetModel, - AssetPropertiesModel, - AssetRequestModel, - api_response, - prepare_asset_v1_2, - update_asset, + AssetCreationException, + get_active_asset_ids, + save_active_assets_ordering, +) +from api.serializers import ( + AssetSerializer, + CreateAssetSerializerV1_2, + UpdateAssetSerializer, ) -from lib import db, assets_helper from lib.auth import authorized -from lib.utils import url_fails +from os import remove from settings import settings -class AssetsV1_2(Resource): - method_decorators = [authorized] +class AssetListViewV1_2(APIView): + serializer_class = AssetSerializer - @swagger.doc({ - 'responses': { - '200': { - 'description': 'List of assets', - 'schema': { - 'type': 'array', - 'items': AssetModel - } - } + @extend_schema( + summary='List assets', + responses={ + 200: AssetSerializer(many=True) } - }) - def get(self): - with db.conn(settings['database']) as conn: - return assets_helper.read(conn) - - @api_response - @swagger.doc({ - 'parameters': [ - { - 'in': 'body', - 'name': 'model', - 'description': 'Adds an asset', - 'schema': AssetRequestModel, - 'required': True - } - ], - 'responses': { - '201': { - 'description': 'Asset created', - 'schema': AssetModel - } + ) + @authorized + def get(self, request): + queryset = Asset.objects.all() + serializer = self.serializer_class(queryset, many=True) + return Response(serializer.data) + + @extend_schema( + summary='Create asset', + request=CreateAssetSerializerV1_2, + responses={ + 201: AssetSerializer } - }) - def post(self): - request_environ = Request(request.environ) - asset = prepare_asset_v1_2(request_environ, unique_name=True) - if not asset['skip_asset_check'] and url_fails(asset['uri']): - raise Exception("Could not retrieve file. Check the asset URL.") - with db.conn(settings['database']) as conn: - assets = assets_helper.read(conn) - ids_of_active_assets = [ - x['asset_id'] for x in assets if x['is_active']] - - asset = assets_helper.create(conn, asset) - - if asset['is_active']: - ids_of_active_assets.insert( - asset['play_order'], asset['asset_id']) - assets_helper.save_ordering(conn, ids_of_active_assets) - return assets_helper.read(conn, asset['asset_id']), 201 - - -class AssetV1_2(Resource): - method_decorators = [api_response, authorized] - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset' - } - ], - 'responses': { - '200': { - 'description': 'Asset', - 'schema': AssetModel - } - } - }) - def get(self, asset_id): - with db.conn(settings['database']) as conn: - return assets_helper.read(conn, asset_id) - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'ID of an asset', - 'required': True - }, - { - 'in': 'body', - 'name': 'properties', - 'description': 'Properties of an asset', - 'schema': AssetPropertiesModel, - 'required': True - } - ], - 'responses': { - '200': { - 'description': 'Asset updated', - 'schema': AssetModel - } - } - }) - def patch(self, asset_id): - data = json.loads(request.data) - with db.conn(settings['database']) as conn: - - asset = assets_helper.read(conn, asset_id) - if not asset: - raise Exception('Asset not found.') - update_asset(asset, data) - - assets = assets_helper.read(conn) - ids_of_active_assets = [ - x['asset_id'] for x in assets if x['is_active']] - - asset = assets_helper.update(conn, asset_id, asset) - - try: - ids_of_active_assets.remove(asset['asset_id']) - except ValueError: - pass - if asset['is_active']: - ids_of_active_assets.insert( - asset['play_order'], asset['asset_id']) - - assets_helper.save_ordering(conn, ids_of_active_assets) - return assets_helper.read(conn, asset_id) - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset', - 'required': True - }, - { - 'in': 'body', - 'name': 'model', - 'description': 'Adds an asset', - 'schema': AssetRequestModel, - 'required': True - } - ], - 'responses': { - '200': { - 'description': 'Asset updated', - 'schema': AssetModel - } + ) + @authorized + def post(self, request): + try: + serializer = CreateAssetSerializerV1_2( + data=request.data, unique_name=True) + + if not serializer.is_valid(): + raise AssetCreationException(serializer.errors) + except AssetCreationException as error: + return Response(error.errors, status=status.HTTP_400_BAD_REQUEST) + + active_asset_ids = get_active_asset_ids() + asset = Asset.objects.create(**serializer.data) + + if asset.is_active(): + active_asset_ids.insert(asset.play_order, asset.asset_id) + + save_active_assets_ordering(active_asset_ids) + asset.refresh_from_db() + + return Response( + AssetSerializer(asset).data, + status=status.HTTP_201_CREATED, + ) + + +class AssetViewV1_2(APIView): + serializer_class = AssetSerializer + + @extend_schema(summary='Get asset') + @authorized + def get(self, request, asset_id): + asset = Asset.objects.get(asset_id=asset_id) + serializer = self.serializer_class(asset) + return Response(serializer.data) + + def update(self, request, asset_id, partial=False): + asset = Asset.objects.get(asset_id=asset_id) + serializer = UpdateAssetSerializer( + asset, data=request.data, partial=partial) + + if serializer.is_valid(): + serializer.save() + else: + return Response( + serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + active_asset_ids = get_active_asset_ids() + + asset.refresh_from_db() + + try: + active_asset_ids.remove(asset.asset_id) + except ValueError: + pass + + if asset.is_active(): + active_asset_ids.insert(asset.play_order, asset.asset_id) + + save_active_assets_ordering(active_asset_ids) + asset.refresh_from_db() + + return Response(AssetSerializer(asset).data) + + @extend_schema( + summary='Update asset', + request=UpdateAssetSerializer, + responses={ + 200: AssetSerializer } - }) - def put(self, asset_id): - asset = prepare_asset_v1_2(request, asset_id) - with db.conn(settings['database']) as conn: - assets = assets_helper.read(conn) - ids_of_active_assets = [ - x['asset_id'] for x in assets if x['is_active']] - - asset = assets_helper.update(conn, asset_id, asset) - - try: - ids_of_active_assets.remove(asset['asset_id']) - except ValueError: - pass - if asset['is_active']: - ids_of_active_assets.insert( - asset['play_order'], asset['asset_id']) - - assets_helper.save_ordering(conn, ids_of_active_assets) - return assets_helper.read(conn, asset_id) - - @swagger.doc({ - 'parameters': [ - { - 'name': 'asset_id', - 'type': 'string', - 'in': 'path', - 'description': 'id of an asset', - 'required': True - - }, - ], - 'responses': { - '204': { - 'description': 'Deleted' - } + ) + @authorized + def patch(self, request, asset_id): + return self.update(request, asset_id, partial=True) + + @extend_schema( + summary='Update asset', + request=UpdateAssetSerializer, + responses={ + 200: AssetSerializer } - }) - def delete(self, asset_id): - with db.conn(settings['database']) as conn: - asset = assets_helper.read(conn, asset_id) - try: - if asset['uri'].startswith(settings['assetdir']): - remove(asset['uri']) - except OSError: - pass - assets_helper.delete(conn, asset_id) - return '', 204 # return an OK with no content + ) + @authorized + def put(self, request, asset_id): + return self.update(request, asset_id, partial=False) + + @extend_schema(summary='Delete asset') + @authorized + def delete(self, request, asset_id): + asset = Asset.objects.get(asset_id=asset_id) + + try: + if asset.uri.startswith(settings['assetdir']): + remove(asset.uri) + except OSError: + pass + + asset.delete() + + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/bin/build_containers.sh b/bin/build_containers.sh index 29092dfa3..2515eb33f 100755 --- a/bin/build_containers.sh +++ b/bin/build_containers.sh @@ -11,6 +11,7 @@ export GIT_SHORT_HASH=$(git rev-parse --short HEAD) export GIT_HASH=$(git rev-parse HEAD) export BASE_IMAGE_TAG=bookworm export DEBIAN_VERSION=bookworm +export ENVIRONMENT=${ENVIRONMENT:-production} declare -a SERVICES=( server diff --git a/bin/migrate.py b/bin/migrate.py deleted file mode 100644 index 85a320e15..000000000 --- a/bin/migrate.py +++ /dev/null @@ -1,191 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf8 -*- - -from __future__ import print_function -from __future__ import unicode_literals -import sqlite3 -import os -from contextlib import contextmanager -import datetime - -configdir = os.path.join(os.getenv('HOME'), '.screenly/') -database = os.path.join(configdir, 'screenly.db') - -comma = ','.join - - -def quest(values): - return '=?,'.join(values) + '=?' - - -def query_read_all(keys): - return 'SELECT ' + comma(keys) + ' FROM assets ORDER BY name' - - -def query_update(keys): - return 'UPDATE assets SET ' + quest(keys) + ' WHERE asset_id=?' - - -def mkdict(keys): - return (lambda row: dict([(keys[ki], v) for ki, v in enumerate(row)])) - - -def is_active(asset): - if asset['start_date'] and asset['end_date']: - at = datetime.datetime.utcnow() - return asset['start_date'] < at and asset['end_date'] > at - return False - - -def read(c): - keys = 'asset_id start_date end_date is_enabled'.split(' ') - c.execute(query_read_all(keys)) - mk = mkdict(keys) - assets = [mk(asset) for asset in c.fetchall()] - return assets - - -def update(c, asset_id, asset): - del asset['asset_id'] - c.execute(query_update(list(asset.keys())), list(asset.values()) + [asset_id]) - - -def test_column(col, cursor): - """Test if a column is in the db""" - try: - cursor.execute('SELECT ' + col + ' FROM assets') - except sqlite3.OperationalError: - return False - else: - return True - - -@contextmanager -def open_db_get_cursor(): - with sqlite3.connect(database, detect_types=sqlite3.PARSE_DECLTYPES) as conn: - cursor = conn.cursor() - yield (cursor, conn) - cursor.close() - - -query_add_play_order = """ -begin transaction; -alter table assets add play_order integer default 0; -commit; -""" - -query_add_is_processing = """ -begin transaction; -alter table assets add is_processing integer default 0; -commit; -""" - -query_add_skip_asset_check = """ -begin transaction; -alter table assets add skip_asset_check integer default 0; -commit; -""" - - -def migrate_add_column(col, script): - with open_db_get_cursor() as (cursor, conn): - if test_column(col, cursor): - print(f'Column ({col}) already present') - else: - print(f'Adding new column ({col})') - cursor.executescript(script) - assets = read(cursor) - for asset in assets: - asset.update({'play_order': 0}) - update(cursor, asset['asset_id'], asset) - conn.commit() - - -query_create_assets_table = """ -create table assets( -asset_id text primary key, -name text, -uri text, -md5 text, -start_date timestamp, -end_date timestamp, -duration text, -mimetype text, -is_enabled integer default 0, -nocache integer default 0)""" -query_make_asset_id_primary_key = """ -begin transaction; -create table temp as select asset_id,name,uri,md5,start_date,end_date,duration,mimetype,is_enabled,nocache from assets; -drop table assets; -""" + query_create_assets_table + """; -insert or ignore into assets select * from temp; -drop table temp; -commit;""" - - -def migrate_make_asset_id_primary_key(): - has_primary_key = False - with open_db_get_cursor() as (cursor, _): - table_info = cursor.execute('pragma table_info(assets)') - has_primary_key = table_info.fetchone()[-1] == 1 - if has_primary_key: - print('already has primary key') - else: - with open_db_get_cursor() as (cursor, _): - cursor.executescript(query_make_asset_id_primary_key) - print('asset_id is primary key') - - -query_add_is_enabled_and_nocache = """ -begin transaction; -alter table assets add is_enabled integer default 0; -alter table assets add nocache integer default 0; -commit; -""" - - -def migrate_add_is_enabled_and_nocache(): - with open_db_get_cursor() as (cursor, conn): - col = 'is_enabled,nocache' - if test_column(col, cursor): - print(f'Column ({col}) already present') - else: - cursor.executescript(query_add_is_enabled_and_nocache) - assets = read(cursor) - for asset in assets: - asset.update({'is_enabled': is_active(asset)}) - update(cursor, asset['asset_id'], asset) - conn.commit() - print(f'Added new columns ({col})') - - -query_drop_filename = """BEGIN TRANSACTION; -CREATE TEMPORARY TABLE assets_backup(asset_id, name, uri, md5, start_date, end_date, duration, mimetype); -INSERT INTO assets_backup SELECT asset_id, name, uri, md5, start_date, end_date, duration, mimetype FROM assets; -DROP TABLE assets; -CREATE TABLE assets(asset_id TEXT, name TEXT, uri TEXT, md5 TEXT, start_date TIMESTAMP, end_date TIMESTAMP, duration TEXT, mimetype TEXT); -INSERT INTO assets SELECT asset_id, name, uri, md5, start_date, end_date, duration, mimetype FROM assets_backup; -DROP TABLE assets_backup; -COMMIT; -""" - - -def migrate_drop_filename(): - with open_db_get_cursor() as (cursor, _): - col = 'filename' - if test_column(col, cursor): - cursor.executescript(query_drop_filename) - print(f'Dropped obsolete column ({col})') - else: - print(f'Obsolete column ({col}) is not present') -# ✂-------- - - -if __name__ == '__main__': - migrate_drop_filename() - migrate_add_is_enabled_and_nocache() - migrate_make_asset_id_primary_key() - migrate_add_column('play_order', query_add_play_order) - migrate_add_column('is_processing', query_add_is_processing) - migrate_add_column('skip_asset_check', query_add_skip_asset_check) - print("Migration done.") diff --git a/bin/prepare_test_environment.sh b/bin/prepare_test_environment.sh index e849164ed..ae77866f4 100644 --- a/bin/prepare_test_environment.sh +++ b/bin/prepare_test_environment.sh @@ -47,6 +47,10 @@ EOF if [ "$START_SERVER" = true ]; then cd /usr/src/app - python server.py & + + ./manage.py makemigrations + ./manage.py migrate --fake-initial + ./manage.py runserver 127.0.0.1:8080 & + sleep 3 fi diff --git a/bin/start_server.sh b/bin/start_server.sh index bd268b495..b7220a9cc 100755 --- a/bin/start_server.sh +++ b/bin/start_server.sh @@ -5,17 +5,32 @@ ENVIRONMENT=${ENVIRONMENT:-production} mkdir -p \ /data/.config \ /data/.screenly \ + /data/.screenly/backups \ /data/screenly_assets cp -n /usr/src/app/ansible/roles/screenly/files/screenly.conf /data/.screenly/screenly.conf cp -n /usr/src/app/ansible/roles/screenly/files/default_assets.yml /data/.screenly/default_assets.yml -cp -n /usr/src/app/ansible/roles/screenly/files/screenly.db /data/.screenly/screenly.db echo "Running migration..." -python ./bin/migrate.py + +# The following block ensures that the migration is transactional and that the +# database is not left in an inconsistent state if the migration fails. + +if [ -f /data/.screenly/screenly.db ]; then + ./manage.py dbbackup --noinput --clean && \ + ./manage.py migrate --fake-initial --noinput || \ + ./manage.py dbrestore --noinput +else + ./manage.py migrate && \ + ./manage.py dbbackup --noinput --clean +fi if [[ "$ENVIRONMENT" == "development" ]]; then - flask --app server.py run --debug --reload --host 0.0.0.0 --port 8080 + echo "Starting Django development server..." + ./manage.py runserver 0.0.0.0:8080 else - python server.py + echo "Generating Django static files..." + ./manage.py collectstatic --clear --noinput + echo "Starting Gunicorn..." + python run_gunicorn.py fi diff --git a/celery_tasks.py b/celery_tasks.py old mode 100644 new mode 100755 index dc49dd2e9..e6e6c3555 --- a/celery_tasks.py +++ b/celery_tasks.py @@ -1,17 +1,31 @@ +import django import sh from celery import Celery from datetime import timedelta -from lib import diagnostics -from lib.utils import ( - connect_to_redis, - is_balena_app, - reboot_via_balena_supervisor, - shutdown_via_balena_supervisor, -) from os import getenv, path from tenacity import Retrying, stop_after_attempt, wait_fixed +try: + django.setup() + + # Place imports that uses Django in this block. + + from lib import diagnostics + from lib.utils import ( + connect_to_redis, + is_balena_app, + reboot_via_balena_supervisor, + shutdown_via_balena_supervisor, + ) +except Exception: + pass + + +__author__ = "Screenly, Inc" +__copyright__ = "Copyright 2012-2024, Screenly, Inc" +__license__ = "Dual License: GPLv2 and Commercial License" + CELERY_RESULT_BACKEND = getenv( 'CELERY_RESULT_BACKEND', 'redis://localhost:6379/0') diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 7ff00c0fa..127fbe433 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -13,7 +13,7 @@ services: - ENVIRONMENT=development restart: always volumes: - - resin-data:/data + - anthias-data:/data - ./:/usr/src/app/ anthias-websocket: @@ -27,7 +27,7 @@ services: - LISTEN=0.0.0.0 restart: always volumes: - - resin-data:/data + - anthias-data:/data anthias-celery: build: @@ -42,7 +42,7 @@ services: - CELERY_RESULT_BACKEND=redis://redis:6379/0 restart: always volumes: - - resin-data:/data + - anthias-data:/data redis: platform: "linux/amd64" @@ -61,8 +61,8 @@ services: - anthias-websocket restart: always volumes: - - resin-data:/data:ro + - anthias-data:/data:ro volumes: - resin-data: + anthias-data: redis-data: diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 1e7f231ff..468bc1cff 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -14,7 +14,7 @@ services: tty: true volumes: - .:/usr/src/app - - resin-data:/data + - anthias-data:/data anthias-celery: build: @@ -29,11 +29,11 @@ services: - CELERY_RESULT_BACKEND=redis://redis:6379/0 restart: always volumes: - - resin-data:/data + - anthias-data:/data redis: image: redis:alpine volumes: - resin-data: + anthias-data: redis-data: diff --git a/docker-compose.yml.tmpl b/docker-compose.yml.tmpl index 049c04ca2..6bfdd7e61 100644 --- a/docker-compose.yml.tmpl +++ b/docker-compose.yml.tmpl @@ -39,7 +39,7 @@ services: - resin-data:/data - /home/${USER}/.screenly:/data/.screenly - /home/${USER}/screenly_assets:/data/screenly_assets - - /home/${USER}/screenly/static:/data/screenly/static + - /home/${USER}/screenly/staticfiles:/data/screenly/staticfiles - /etc/timezone:/etc/timezone:ro - /etc/localtime:/etc/localtime:ro labels: @@ -142,7 +142,7 @@ services: - resin-data:/data:ro - /home/${USER}/.screenly:/data/.screenly:ro - /home/${USER}/screenly_assets:/data/screenly_assets:ro - - /home/${USER}/screenly/static:/data/screenly/static:ro + - /home/${USER}/screenly/staticfiles:/data/screenly/staticfiles:ro - /etc/timezone:/etc/timezone:ro - /etc/localtime:/etc/localtime:ro diff --git a/docker/Dockerfile.celery.tmpl b/docker/Dockerfile.celery.tmpl index 46f6aa2e2..25db31408 100644 --- a/docker/Dockerfile.celery.tmpl +++ b/docker/Dockerfile.celery.tmpl @@ -11,6 +11,7 @@ COPY . /usr/src/app/ ENV GIT_HASH=$GIT_HASH ENV GIT_SHORT_HASH=$GIT_SHORT_HASH ENV GIT_BRANCH=$GIT_BRANCH +ENV DJANGO_SETTINGS_MODULE="anthias_django.settings" CMD celery -A celery_tasks.celery worker \ -B -n worker@anthias \ diff --git a/docker/Dockerfile.nginx.tmpl b/docker/Dockerfile.nginx.tmpl index 82a56e49a..3c281df64 100644 --- a/docker/Dockerfile.nginx.tmpl +++ b/docker/Dockerfile.nginx.tmpl @@ -12,7 +12,7 @@ ENV GIT_HASH=$GIT_HASH ENV GIT_SHORT_HASH=$GIT_SHORT_HASH ENV GIT_BRANCH=$GIT_BRANCH -COPY docker/nginx/nginx.conf /etc/nginx/sites-enabled/anthias.conf +COPY docker/nginx/nginx.$ENVIRONMENT.conf /etc/nginx/sites-enabled/ RUN rm -f /etc/nginx/sites-enabled/default CMD ["nginx", "-g", "daemon off;"] diff --git a/docker/Dockerfile.test.tmpl b/docker/Dockerfile.test.tmpl index dc1cabcab..4cb27c11e 100644 --- a/docker/Dockerfile.test.tmpl +++ b/docker/Dockerfile.test.tmpl @@ -53,8 +53,8 @@ COPY . /usr/src/app WORKDIR /usr/src/app RUN mkdir -p /data/.screenly /data/screenly_assets -RUN cp ansible/roles/screenly/files/screenly.db \ - ansible/roles/screenly/files/screenly.conf /data/.screenly +RUN cp ansible/roles/screenly/files/screenly.conf \ + /data/.screenly ENV GIT_HASH=$GIT_HASH ENV GIT_SHORT_HASH=$GIT_SHORT_HASH diff --git a/docker/Dockerfile.viewer.tmpl b/docker/Dockerfile.viewer.tmpl index 1d58353b1..91c40ab7d 100644 --- a/docker/Dockerfile.viewer.tmpl +++ b/docker/Dockerfile.viewer.tmpl @@ -166,6 +166,7 @@ ENV GIT_HASH=$GIT_HASH ENV GIT_SHORT_HASH=$GIT_SHORT_HASH ENV GIT_BRANCH=$GIT_BRANCH ENV DEVICE_TYPE=$BOARD +ENV DJANGO_SETTINGS_MODULE="anthias_django.settings" RUN useradd -g video viewer diff --git a/docker/nginx/nginx.development.conf b/docker/nginx/nginx.development.conf new file mode 100644 index 000000000..25e7912e2 --- /dev/null +++ b/docker/nginx/nginx.development.conf @@ -0,0 +1,77 @@ +# vim: tabstop=4 shiftwidth=4 softtabstop=4 + +upstream anthias { + server anthias-server:8080; +} + +upstream websocket { + server anthias-websocket:9999; +} + +server { + server_tokens off; + listen 80 default_server; + listen [::]:80 default_server; + + location / { + + proxy_pass http://anthias; + + client_max_body_size 4G; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host anthias-server; + } + + location ~ ^/api/[0-9a-z]+/backup$ { + proxy_pass http://anthias; + proxy_connect_timeout 1800; + proxy_send_timeout 1800; + proxy_read_timeout 1800; + send_timeout 1800; + + client_max_body_size 4G; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host anthias-server; + } + + location /static { + proxy_pass http://anthias/static; + } + + location /ws { + proxy_pass http://websocket; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + } + + location /screenly_assets { + allow 172.16.0.0/12; + deny all; + + alias /data/screenly_assets; + } + + location /static_with_mime { + allow 10.0.0.0/8; + allow 172.16.0.0/12; + allow 192.168.0.0/16; + deny all; + + alias /data/screenly/staticfiles; + } +} + +server { + # Only allow from localhost and Docker's CIDR + allow 172.16.0.0/12; + allow 172.0.0.1; + deny all; + + server_name *.ngrok.io; + listen 80; + root /data/screenly_assets; + try_files $uri /data/screenly_assets$uri; +} diff --git a/docker/nginx/nginx.conf b/docker/nginx/nginx.production.conf similarity index 76% rename from docker/nginx/nginx.conf rename to docker/nginx/nginx.production.conf index 22f3d96ec..7c551b91b 100644 --- a/docker/nginx/nginx.conf +++ b/docker/nginx/nginx.production.conf @@ -4,10 +4,6 @@ upstream anthias { server anthias-server:8080; } -upstream wifi-connect { - server 192.168.42.1:9090; -} - upstream websocket { server anthias-websocket:9999; } @@ -18,21 +14,12 @@ server { listen [::]:80 default_server; location / { - - # Temporarily disables wifi-connect proxy_pass http://anthias; - #if (-f /data/.screenly/initialized) { - # proxy_pass http://anthias; - #} - #if (!-f /data/.screenly/initialized) { - # proxy_pass http://wifi-connect; - #} - client_max_body_size 4G; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header Host $http_host; + proxy_set_header Host anthias-server; } location ~ ^/api/[0-9a-z]+/backup$ { @@ -45,11 +32,11 @@ server { client_max_body_size 4G; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header Host $http_host; + proxy_set_header Host anthias-server; } - location /static { - proxy_pass http://anthias/static; + location /static/ { + alias /data/screenly/staticfiles/; } location /ws { @@ -83,6 +70,6 @@ server { allow 192.168.0.0/16; deny all; - alias /data/screenly/static; + alias /data/screenly/staticfiles; } } diff --git a/docs/developer-documentation.md b/docs/developer-documentation.md index 79f8fa37a..cf3701852 100644 --- a/docs/developer-documentation.md +++ b/docs/developer-documentation.md @@ -78,10 +78,18 @@ Run the unit tests. ```bash $ docker compose \ -f docker-compose.test.yml \ - exec -T anthias-test bash ./bin/prepare_test_environment.sh -s + exec anthias-test bash ./bin/prepare_test_environment.sh -s + +# Integration and non-integration tests should be run separately as the +# former doesn't run as expected when run together with the latter. + +$ docker compose \ + -f docker-compose.test.yml \ + exec anthias-test ./manage.py test --exclude-tag=integration + $ docker compose \ -f docker-compose.test.yml \ - exec -T anthias-test nose2 -v + exec anthias-test ./manage.py test --tag=integration ``` ### The QA checklist diff --git a/lib/assets_helper.py b/lib/assets_helper.py deleted file mode 100644 index 8c68bc90a..000000000 --- a/lib/assets_helper.py +++ /dev/null @@ -1,163 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals -from builtins import filter -from . import db -from . import queries -import datetime - -FIELDS = [ - "asset_id", "name", "uri", "start_date", "end_date", "duration", - "mimetype", "is_enabled", "is_processing", "nocache", "play_order", - "skip_asset_check", -] - -create_assets_table = 'CREATE TABLE assets(asset_id text primary key, name text, uri text, md5 text, start_date timestamp, end_date timestamp, duration text, mimetype text, is_enabled integer default 0, is_processing integer default 0, nocache integer default 0, play_order integer default 0, skip_asset_check integer default 0)' # noqa: E501 - - -# Note all times are naive for legacy reasons but always UTC. -get_time = datetime.datetime.utcnow - - -def is_active(asset, at_time=None): - """Accepts an asset dictionary and determines if it - is active at the given time. If no time is specified, 'now' is used. - - >>> asset = {'asset_id': u'4c8dbce552edb5812d3a866cfe5f159d', 'mimetype': u'web', 'name': u'WireLoad', 'end_date': datetime.datetime(2013, 1, 19, 23, 59), 'uri': u'http://www.wireload.net', 'duration': u'5', 'is_enabled': True, 'nocache': 0, 'play_order': 1, 'start_date': datetime.datetime(2013, 1, 16, 0, 0), 'skip_asset_check': 0}; - >>> is_active(asset, datetime.datetime(2013, 1, 16, 12, 00)) - True - >>> is_active(asset, datetime.datetime(2014, 1, 1)) - False - - >>> asset['is_enabled'] = False - >>> is_active(asset, datetime.datetime(2013, 1, 16, 12, 00)) - False - - """ # noqa: E501 - - if asset['is_enabled'] and asset['start_date'] and asset['end_date']: - at = at_time or get_time() - return 1 if asset['start_date'] < at < asset['end_date'] else 0 - return 0 - - -def get_names_of_assets(conn): - with db.cursor(conn) as c: - c.execute(queries.read_all(['name', ])) - return [asset[0] for asset in c.fetchall()] - - -def get_playlist(conn): - """Returns all currently active assets.""" - return list(filter(is_active, read(conn))) - - -def mkdict(keys): - """Returns a function that creates a dict from a database record.""" - return lambda row: dict([(keys[ki], v) for ki, v in enumerate(row)]) - - -def create(conn, asset): - """ - Create a database record for an asset. - Returns the asset. - Asset's is_active field is updated before returning. - """ - if 'is_active' in asset: - asset.pop('is_active') - with db.commit(conn) as c: - c.execute(queries.create(list(asset.keys())), list(asset.values())) - asset.update({'is_active': is_active(asset)}) - return asset - - -def create_multiple(conn, assets): - """ - Create a database record for each asset. - Returns asset list. - Asset's is_active field is updated before returning. - """ - - with db.commit(conn) as c: - for asset in assets: - if 'is_active' in asset: - asset.pop('is_active') - - c.execute(queries.create(list(asset.keys())), list(asset.values())) - - asset.update({'is_active': is_active(asset)}) - - return assets - - -def read(conn, asset_id=None, keys=FIELDS): - """ - Fetch one or more assets from the database. - Returns a list of dicts or one dict. - Assets' is_active field is updated before returning. - """ - assets = [] - mk = mkdict(keys) - with db.cursor(conn) as c: - if asset_id is None: - c.execute(queries.read_all(keys)) - else: - c.execute(queries.read(keys), [asset_id]) - assets = [mk(asset) for asset in c.fetchall()] - [asset.update({'is_active': is_active(asset)}) for asset in assets] - if asset_id and len(assets): - return assets[0] - return assets - - -def update(conn, asset_id, asset): - """ - Update an asset in the database. - Returns the asset. - Asset's asset_id and is_active field is updated before returning. - """ - if asset.get('asset_id'): - del asset['asset_id'] - if 'is_active' in asset: - asset.pop('is_active') - with db.commit(conn) as c: - c.execute( - queries.update(list(asset.keys())), - list(asset.values()) + [asset_id], - ) - asset.update({'asset_id': asset_id}) - if 'start_date' in asset: - asset.update({'is_active': is_active(asset)}) - return asset - - -def delete(conn, asset_id): - """Remove an asset from the database.""" - with db.commit(conn) as c: - c.execute(queries.remove, [asset_id]) - - -def save_ordering(db_conn, ids): - """ - Order assets. Move to last position assets which not presented - in list of id - """ - - if ids: - with db.commit(db_conn) as c: - c.execute( - queries.multiple_update_with_case(['play_order', ], len(ids)), - sum( - [ - [asset_id, play_order] - for play_order, asset_id in enumerate(ids) - ], - [], - ) + ids, - ) - - # Set the play order to a high value for all inactive assets. - with db.commit(db_conn) as c: - c.execute( - queries.multiple_update_not_in(['play_order', ], len(ids)), - [len(ids)] + ids, - ) diff --git a/lib/auth.py b/lib/auth.py index caffcec19..77738887f 100644 --- a/lib/auth.py +++ b/lib/auth.py @@ -1,16 +1,17 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- + from __future__ import unicode_literals +import hashlib +import os.path +from base64 import b64decode from builtins import str from builtins import object -from abc import ABCMeta, abstractmethod, abstractproperty +from abc import ABCMeta, abstractmethod from functools import wraps -import hashlib -import os.path - -from flask import request, Response from future.utils import with_metaclass + LINUX_USER = os.getenv('USER', 'pi') @@ -23,15 +24,14 @@ def authenticate(self): """ pass - @abstractproperty - def is_authenticated(self): + def is_authenticated(self, request): """ See if the user is authenticated for the request. :return: bool """ pass - def authenticate_if_needed(self): + def authenticate_if_needed(self, request): """ If the user performing the request is not authenticated, initiate authentication. @@ -39,14 +39,16 @@ def authenticate_if_needed(self): :return: a Response which initiates authentication or None if already authenticated. """ + from django.http import HttpResponse + try: - if not self.is_authenticated: + if not self.is_authenticated(request): return self.authenticate() except ValueError as e: - return Response( - "Authorization backend is unavailable: " + str(e), 503) + return HttpResponse( + "Authorization backend is unavailable: " + str(e), status=503) - def update_settings(self, current_pass_correct): + def update_settings(self, request, current_pass_correct): """ Submit updated values from Settings page. :param current_pass_correct: the value of "Current Password" field @@ -80,7 +82,7 @@ class NoAuth(Auth): name = '' config = {} - def is_authenticated(self): + def is_authenticated(self, request): return True def authenticate(self): @@ -118,27 +120,44 @@ def check_password(self, password): hashed_password = hashlib.sha256(password.encode('utf-8')).hexdigest() return self.settings['password'] == hashed_password - @property - def is_authenticated(self): - auth = request.authorization - return auth and self._check(auth.username, auth.password) + def is_authenticated(self, request): + authorization = request.headers.get('Authorization') + if not authorization: + return False + + content = authorization.split(' ') + + if len(content) != 2: + return False + + auth_type = content[0] + auth_data = content[1] + if auth_type == 'Basic': + auth_data = b64decode(auth_data).decode('utf-8') + auth_data = auth_data.split(':') + if len(auth_data) == 2: + username = auth_data[0] + password = auth_data[1] + return self._check(username, password) + return False @property def template(self): return 'auth_basic.html', {'user': self.settings['user']} def authenticate(self): - realm = "Anthias OSE {}".format(self.settings['player_name']) - return Response( + from django.http import HttpResponse + realm = "Anthias {}".format(self.settings['player_name']) + return HttpResponse( "Access denied", - 401, - {"WWW-Authenticate": 'Basic realm="{}"'.format(realm)}, + status=401, + headers={"WWW-Authenticate": 'Basic realm="{}"'.format(realm)} ) - def update_settings(self, current_pass_correct): - new_user = request.form.get('user', '') - new_pass = request.form.get('password', '').encode('utf-8') - new_pass2 = request.form.get('password2', '').encode('utf-8') + def update_settings(self, request, current_pass_correct): + new_user = request.POST.get('user', '') + new_pass = request.POST.get('password', '').encode('utf-8') + new_pass2 = request.POST.get('password2', '').encode('utf-8') new_pass = hashlib.sha256(new_pass).hexdigest() if new_pass else None new_pass2 = hashlib.sha256(new_pass2).hexdigest() if new_pass else None # Handle auth components @@ -179,17 +198,27 @@ def update_settings(self, current_pass_correct): def authorized(orig): - """ - Annotation which initiates authentication if the request is unauthorized. - :param orig: Flask function - :return: Response - """ from settings import settings + from django.http import HttpRequest + from rest_framework.request import Request @wraps(orig) def decorated(*args, **kwargs): if not settings.auth: return orig(*args, **kwargs) - return settings.auth.authenticate_if_needed() or orig(*args, **kwargs) + + if len(args) == 0: + raise ValueError('No request object passed to decorated function') + + request = args[-1] + + if not isinstance(request, (HttpRequest, Request)): + raise ValueError( + 'Request object is not of type HttpRequest or Request') + + return ( + settings.auth.authenticate_if_needed(request) or + orig(*args, **kwargs) + ) return decorated diff --git a/lib/backup_helper.py b/lib/backup_helper.py index 962bbf753..74f4797b6 100644 --- a/lib/backup_helper.py +++ b/lib/backup_helper.py @@ -7,7 +7,7 @@ directories = ['.screenly', 'screenly_assets'] default_archive_name = "anthias-backup" -static_dir = "screenly/static" +static_dir = "screenly/staticfiles" def create_backup(name=default_archive_name): diff --git a/lib/db.py b/lib/db.py deleted file mode 100644 index f7e27f77f..000000000 --- a/lib/db.py +++ /dev/null @@ -1,23 +0,0 @@ -from __future__ import absolute_import -from __future__ import unicode_literals -import sqlite3 -from contextlib import contextmanager - - -def conn(db): - return sqlite3.connect(db, detect_types=sqlite3.PARSE_DECLTYPES) - - -@contextmanager -def cursor(connection): - cur = connection.cursor() - yield cur - cur.close() - - -@contextmanager -def commit(connection): - cur = connection.cursor() - yield cur - connection.commit() - cur.close() diff --git a/lib/diagnostics.py b/lib/diagnostics.py index 10569dbdf..dee79f5a3 100755 --- a/lib/diagnostics.py +++ b/lib/diagnostics.py @@ -4,11 +4,9 @@ from __future__ import unicode_literals from builtins import str import os -import sqlite3 from . import utils import cec from lib import device_helper -from pprint import pprint from datetime import datetime @@ -39,18 +37,6 @@ def get_uptime(): return uptime_seconds -def get_playlist(): - anthias_db = os.path.join(os.getenv('HOME'), '.screenly/screenly.db') - playlist = [] - if os.path.isfile(anthias_db): - conn = sqlite3.connect(anthias_db) - c = conn.cursor() - for row in c.execute('SELECT * FROM assets;'): - playlist.append(row) - c.close - return playlist - - def get_load_avg(): """ Returns load average rounded to two digits. @@ -114,30 +100,3 @@ def get_raspberry_code(): def get_raspberry_model(): return device_helper.parse_cpu_info().get('model', "Unknown") - - -def compile_report(): - """ - Compile report with various data points. - """ - report = {} - report['cpu_info'] = get_raspberry_code() - report['pi_model'] = get_raspberry_model() - report['uptime'] = get_uptime() - report['display_power'] = get_display_power() - report['playlist'] = get_playlist() - report['git_hash'] = get_git_hash() - report['connectivity'] = try_connectivity() - report['loadavg'] = get_load_avg() - report['utc_isodate'] = get_utc_isodate() - report['debian_version'] = get_debian_version() - - return report - - -def main(): - pprint(compile_report()) - - -if __name__ == "__main__": - main() diff --git a/lib/queries.py b/lib/queries.py deleted file mode 100644 index 5b5f498bb..000000000 --- a/lib/queries.py +++ /dev/null @@ -1,48 +0,0 @@ -# flake8: noqa - -from __future__ import unicode_literals - -comma = ','.join - - -def quest(values): - return '=?,'.join(values) + '=?' - - -def quest_2(values, c): - return ', '.join([('%s=CASE ' % x) + ("WHEN asset_id=? THEN ? " * c) + 'ELSE asset_id END' for x in values]) - - -exists_table = "SELECT name FROM sqlite_master WHERE type='table' AND name='assets'" - - -def read_all(keys): - return 'select ' + comma(keys) + ' from assets order by play_order' - - -def read(keys): - return 'select ' + comma(keys) + ' from assets where asset_id=?' - - -def create(keys): - return 'insert into assets (' + comma(keys) + ') values (' + comma(['?'] * len(keys)) + ')' - - -remove = 'delete from assets where asset_id=?' - - -def update(keys): - return 'update assets set ' + quest(keys) + ' where asset_id=?' - - -def multiple_update(keys, count): - return 'UPDATE assets SET ' + quest(keys) + ' WHERE asset_id IN (' + comma(['?'] * count) + ')' - - -def multiple_update_not_in(keys, count): - return 'UPDATE assets SET ' + quest(keys) + ' WHERE asset_id NOT IN (' + comma(['?'] * count) + ')' - - -def multiple_update_with_case(keys, count): - return 'UPDATE assets SET ' + quest_2(keys, count) + \ - ' WHERE asset_id IN (' + comma(['?'] * count) + ')' diff --git a/lib/utils.py b/lib/utils.py index 464ed7c3a..d66020bb1 100644 --- a/lib/utils.py +++ b/lib/utils.py @@ -4,7 +4,6 @@ from builtins import str from builtins import range import certifi -from . import db import json import logging import os @@ -26,7 +25,7 @@ from time import sleep from urllib.parse import urlparse -from .assets_helper import update +from anthias_app.models import Asset standard_library.install_aliases() @@ -378,9 +377,14 @@ def __init__(self, location, uri, asset_id): def run(self): publisher = ZmqPublisher.get_instance() call(['yt-dlp', '-f', 'mp4', '-o', self.location, self.uri]) - with db.conn(settings['database']) as conn: - update(conn, self.asset_id, - {'asset_id': self.asset_id, 'is_processing': 0}) + + try: + asset = Asset.objects.get(asset_id=self.asset_id) + asset.is_processing = 0 + asset.save() + except Asset.DoesNotExist: + logging.warning('Asset %s not found', self.asset_id) + return publisher.send_to_ws_server(self.asset_id) diff --git a/manage.py b/manage.py new file mode 100755 index 000000000..73e0dd8af --- /dev/null +++ b/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'anthias_django.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/nose2.cfg b/nose2.cfg deleted file mode 100644 index 0adf5ae9c..000000000 --- a/nose2.cfg +++ /dev/null @@ -1,3 +0,0 @@ -[unittest] -test-file-pattern = test_*.py -plugins = nose2.plugins.attrib diff --git a/requirements/requirements-websocket.txt b/requirements/requirements-websocket.txt index 462e15cc2..90de25e28 100644 --- a/requirements/requirements-websocket.txt +++ b/requirements/requirements-websocket.txt @@ -1,10 +1,7 @@ Cython==0.29.33 -Flask==2.2.5 future==0.18.3 gevent-websocket==0.10.1 gevent==24.2.1 -itsdangerous==2.0.1 Jinja2==3.1.4 pytz==2022.2.1 pyzmq==23.2.1 -Werkzeug==2.2.3 diff --git a/requirements/requirements.dev.txt b/requirements/requirements.dev.txt index 8d8890856..aecbf9115 100644 --- a/requirements/requirements.dev.txt +++ b/requirements/requirements.dev.txt @@ -1,6 +1,5 @@ future==0.18.3 mock==3.0.5 -nose2==0.15.1 pep8==1.7.1 selenium==3.141.0 splinter==0.14.0 diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 5a545c116..f43ee8064 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -2,22 +2,21 @@ cec==0.2.8 celery==5.2.2 certifi==2024.7.4 cffi==1.14.4 -click==8.1.7 configparser==4.0.2 cryptography==3.3.2 Cython==0.29.33 -Flask-Cors==5.0.0 -flask-restful-swagger-2==0.35 -flask-swagger-ui==3.36.0 -Flask==2.2.5 +Django==3.2.18 +djangorestframework==3.14.0 +django-dbbackup==4.2.1 +drf-spectacular==0.27.2 future==0.18.3 gevent-websocket==0.10.1 gevent==24.2.1 gunicorn==22.0.0 hurry.filesize==0.9 importlib-metadata==4.13.0 -itsdangerous==2.0.1 Jinja2==3.1.4 +jsonschema==4.17.3 # This is the latest version that doesn't require Rust and Cargo. kombu==5.2.4 Mako==1.2.2 netifaces==0.10.9 @@ -35,6 +34,5 @@ tenacity==9.0.0 sh==1.8 six==1.15.0 urllib3==1.26.19 -Werkzeug==2.2.3 wheel==0.38.1 yt-dlp==2024.7.7 diff --git a/requirements/requirements.viewer.txt b/requirements/requirements.viewer.txt index cac4b7672..d0724fe84 100644 --- a/requirements/requirements.viewer.txt +++ b/requirements/requirements.viewer.txt @@ -5,11 +5,13 @@ certifi==2024.7.4 configparser==4.0.2 cryptography==3.3.2 Cython==0.29.33 -Flask==2.2.5 +Django==3.2.18 +django-dbbackup==4.2.1 +drf-spectacular==0.27.2 future==0.18.3 idna==3.7 -itsdangerous==2.0.1 Jinja2==3.1.4 +jsonschema==4.17.3 # This is the latest version that doesn't require Rust and Cargo. netifaces==0.10.9 pydbus==0.6.0 python-dateutil==2.8.1 @@ -22,4 +24,3 @@ tenacity==9.0.0 sh==1.8 uptime==3.0.1 urllib3==1.26.19 -Werkzeug==2.2.3 diff --git a/run_gunicorn.py b/run_gunicorn.py new file mode 100644 index 000000000..558cd6d29 --- /dev/null +++ b/run_gunicorn.py @@ -0,0 +1,19 @@ +from gunicorn.app.base import Application +from anthias_django import wsgi +from settings import LISTEN, PORT + + +class GunicornApplication(Application): + def init(self, parser, opts, args): + return { + 'bind': f'{LISTEN}:{PORT}', + 'threads': 2, + 'timeout': 20, + } + + def load(self): + return wsgi.application + + +if __name__ == '__main__': + GunicornApplication().run() diff --git a/server.py b/server.py deleted file mode 100755 index f16459d8a..000000000 --- a/server.py +++ /dev/null @@ -1,193 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -from __future__ import unicode_literals -from future import standard_library -__author__ = "Screenly, Inc" -__copyright__ = "Copyright 2012-2023, Screenly, Inc" -__license__ = "Dual License: GPLv2 and Commercial License" - -from os import getenv, path, stat - -from flask import ( - Flask, - make_response, - request, - send_from_directory, - url_for, -) -from flask_cors import CORS -from flask_restful_swagger_2 import Api -from flask_swagger_ui import get_swaggerui_blueprint -from gunicorn.app.base import Application - -from api.views.v1 import ( - Asset, - AssetContent, - Assets, - AssetsControl, - Backup, - FileAsset, - Info, - PlaylistOrder, - Reboot, - Recover, - Shutdown, - ViewerCurrentAsset, -) -from api.views.v1_1 import ( - AssetV1_1, - AssetsV1_1, -) -from api.views.v1_2 import ( - AssetV1_2, - AssetsV1_2, -) - - -from lib import assets_helper -from lib import db -from lib import queries - -from lib.auth import authorized -from lib.utils import ( - json_dump, - get_node_ip, - connect_to_redis, -) -from anthias_app.views import anthias_app_bp -from settings import LISTEN, PORT, settings - - -standard_library.install_aliases() - -HOME = getenv('HOME') - -app = Flask(__name__) -app.register_blueprint(anthias_app_bp) - -CORS(app) -api = Api(app, api_version="v1", title="Anthias API") - -r = connect_to_redis() - - -################################ -# Utilities -################################ - - -@api.representation('application/json') -def output_json(data, code, headers=None): - response = make_response(json_dump(data), code) - response.headers.extend(headers or {}) - return response - - -################################ -# API -################################ - - -api.add_resource(Assets, '/api/v1/assets') -api.add_resource(Asset, '/api/v1/assets/') -api.add_resource(AssetsV1_1, '/api/v1.1/assets') -api.add_resource(AssetV1_1, '/api/v1.1/assets/') -api.add_resource(AssetsV1_2, '/api/v1.2/assets') -api.add_resource(AssetV1_2, '/api/v1.2/assets/') -api.add_resource(AssetContent, '/api/v1/assets//content') -api.add_resource(FileAsset, '/api/v1/file_asset') -api.add_resource(PlaylistOrder, '/api/v1/assets/order') -api.add_resource(Backup, '/api/v1/backup') -api.add_resource(Recover, '/api/v1/recover') -api.add_resource(AssetsControl, '/api/v1/assets/control/') -api.add_resource(Info, '/api/v1/info') -api.add_resource(Reboot, '/api/v1/reboot') -api.add_resource(Shutdown, '/api/v1/shutdown') -api.add_resource(ViewerCurrentAsset, '/api/v1/viewer_current_asset') - -try: - my_ip = get_node_ip() -except Exception: - pass -else: - SWAGGER_URL = '/api/docs' - API_URL = "/api/swagger.json" - - swaggerui_blueprint = get_swaggerui_blueprint( - SWAGGER_URL, - API_URL, - config={ - 'app_name': "Anthias API" - } - ) - app.register_blueprint(swaggerui_blueprint, url_prefix=SWAGGER_URL) - - -@app.errorhandler(403) -def mistake403(code): - return 'The parameter you passed has the wrong format!' - - -@app.errorhandler(404) -def mistake404(code): - return 'Sorry, this page does not exist!' - - -################################ -# Static -################################ - - -@app.context_processor -def override_url_for(): - return dict(url_for=dated_url_for) - - -def dated_url_for(endpoint, **values): - if endpoint == 'static': - filename = values.get('filename', None) - if filename: - file_path = path.join(app.root_path, - endpoint, filename) - if path.isfile(file_path): - values['q'] = int(stat(file_path).st_mtime) - return url_for(endpoint, **values) - - -@app.route('/static_with_mime/') -@authorized -def static_with_mime(path): - mimetype = request.args['mime'] if 'mime' in request.args else 'auto' - return send_from_directory( - directory='static', filename=path, mimetype=mimetype) - - -@app.before_first_request -def main(): - with db.conn(settings['database']) as conn: - with db.cursor(conn) as cursor: - cursor.execute(queries.exists_table) - if cursor.fetchone() is None: - cursor.execute(assets_helper.create_assets_table) - - -def is_development(): - return getenv('ENVIRONMENT', '') == 'development' - - -if __name__ == "__main__" and not is_development(): - config = { - 'bind': '{}:{}'.format(LISTEN, PORT), - 'threads': 2, - 'timeout': 20 - } - - class GunicornApplication(Application): - def init(self, parser, opts, args): - return config - - def load(self): - return app - - GunicornApplication().run() diff --git a/settings.py b/settings.py index 4521c1532..547145949 100644 --- a/settings.py +++ b/settings.py @@ -1,7 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import unicode_literals -from future import standard_library from builtins import str from builtins import object import hashlib @@ -17,7 +16,6 @@ from lib.auth import BasicAuth, NoAuth from lib.errors import ZmqCollectorTimeout -standard_library.install_aliases() CONFIG_DIR = '.screenly/' CONFIG_FILE = 'screenly.conf' @@ -30,12 +28,13 @@ 'use_24_hour_clock': False, 'use_ssl': False, 'auth_backend': '', - 'websocket_port': '9999' + 'websocket_port': '9999', + 'django_secret_key': '' }, 'viewer': { 'audio_output': 'hdmi', 'debug_logging': False, - 'default_duration': '10', + 'default_duration': 10, 'default_streaming_duration': '300', 'player_name': '', 'resolution': '1920x1080', diff --git a/static/img/anthias-loading.png b/static/img/anthias-loading.png new file mode 100644 index 000000000..218217454 Binary files /dev/null and b/static/img/anthias-loading.png differ diff --git a/static/js/settings.coffee b/static/js/settings.coffee index c638acbf9..aee79d619 100644 --- a/static/js/settings.coffee +++ b/static/js/settings.coffee @@ -12,13 +12,13 @@ $().ready -> $.ajax({ method: "POST" - url: "api/v1/backup" + url: "/api/v1/backup" timeout: 1800 * 1000 }) .done (data, e) -> if (data) - window.location = "static_with_mime/" + data + "?mime=application/x-tgz" + window.location = "/static_with_mime/" + data + "?mime=application/x-tgz" .fail (data, e) -> $("#request-error .alert").addClass "alert-danger" @@ -40,7 +40,7 @@ $().ready -> $("[name='backup_upload']").click() $("[name='backup_upload']").fileupload - url: "api/v1/recover" + url: "/api/v1/recover" progressall: (e, data) -> if data.loaded and data.total valuenow = data.loaded/data.total*100 $(".progress .bar").css "width", valuenow + "%" diff --git a/static/js/settings.js b/static/js/settings.js index f461093f6..88e176736 100644 --- a/static/js/settings.js +++ b/static/js/settings.js @@ -13,11 +13,11 @@ $("#btn-backup").prop("disabled", true); return $.ajax({ method: "POST", - url: "api/v1/backup", + url: "/api/v1/backup", timeout: 1800 * 1000 }).done(function(data, e) { if (data) { - return window.location = "static_with_mime/" + data + "?mime=application/x-tgz"; + return window.location = "/static_with_mime/" + data + "?mime=application/x-tgz"; } }).fail(function(data, e) { var err, j; @@ -40,7 +40,7 @@ return $("[name='backup_upload']").click(); }); $("[name='backup_upload']").fileupload({ - url: "api/v1/recover", + url: "/api/v1/recover", progressall: function(e, data) { var valuenow; if (data.loaded && data.total) { diff --git a/static/js/settings.js.map b/static/js/settings.js.map index dde8e886c..9d4527aed 100644 --- a/static/js/settings.js.map +++ b/static/js/settings.js.map @@ -6,5 +6,5 @@ "static/js/settings.coffee" ], "names": [], - "mappings": ";AAAA;EAAA,CAAA,CAAA,CAAG,CAAC,KAAJ,CAAU,SAAA;AAER,QAAA;IAAA,CAAA,CAAE,uBAAF,CAA0B,CAAC,KAA3B,CAAiC,SAAC,CAAD;aAC/B,CAAA,CAAE,uBAAF,CAA0B,CAAC,IAA3B,CAAA;IAD+B,CAAjC;IAGA,CAAA,CAAE,aAAF,CAAgB,CAAC,KAAjB,CAAuB,SAAC,CAAD;AACrB,UAAA;MAAA,OAAA,GAAU,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;MACV,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,sBAAtB;MAEA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,UAAtB,EAAkC,IAAlC;MACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,UAAtB,EAAkC,IAAlC;aAEA,CAAC,CAAC,IAAF,CAAO;QACL,MAAA,EAAQ,MADH;QAEL,GAAA,EAAK,eAFA;QAGL,OAAA,EAAS,IAAA,GAAO,IAHX;OAAP,CAMA,CAAC,IAND,CAMO,SAAC,IAAD,EAAO,CAAP;QACL,IAAI,IAAJ;iBACE,MAAM,CAAC,QAAP,GAAkB,mBAAA,GAAsB,IAAtB,GAA6B,0BADjD;;MADK,CANP,CAUA,CAAC,IAVD,CAUO,SAAC,IAAD,EAAO,CAAP;AACL,YAAA;QAAA,CAAA,CAAE,uBAAF,CAA0B,CAAC,QAA3B,CAAoC,cAApC;QACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,WAA3B,CAAuC,eAAvC;QACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,IAA3B,CAAA;QACA,IAAG,CAAC,IAAI,CAAC,YAAL,KAAqB,EAAtB,CAAA,IAA8B,CAAC,CAAA,GAAI,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,YAAjB,CAAL,CAA9B,IAAsE,CAAC,GAAA,GAAM,CAAC,CAAC,KAAT,CAAzE;iBACE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,gBAAA,GAAmB,GAAlD,EADF;SAAA,MAAA;iBAGE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6DAA/B,EAHF;;MAJK,CAVP,CAmBA,CAAC,MAnBD,CAmBQ,SAAC,IAAD,EAAO,CAAP;QACN,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,OAAtB;QACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,UAAtB,EAAkC,KAAlC;eACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,UAAtB,EAAkC,KAAlC;MAHM,CAnBR;IAPqB,CAAvB;IAgCA,CAAA,CAAE,aAAF,CAAgB,CAAC,KAAjB,CAAuB,SAAC,CAAD;MACrB,CAAC,CAAC,cAAF,CAAA;aACA,CAAA,CAAE,wBAAF,CAA2B,CAAC,KAA5B,CAAA;IAFqB,CAAvB;IAIA,CAAA,CAAE,wBAAF,CAA2B,CAAC,UAA5B,CACE;MAAA,GAAA,EAAK,gBAAL;MACA,WAAA,EAAa,SAAC,CAAD,EAAI,IAAJ;AAAa,YAAA;QAAA,IAAG,IAAI,CAAC,MAAL,IAAgB,IAAI,CAAC,KAAxB;UACxB,QAAA,GAAW,IAAI,CAAC,MAAL,GAAY,IAAI,CAAC,KAAjB,GAAuB;UAClC,CAAA,CAAE,gBAAF,CAAmB,CAAC,GAApB,CAAwB,OAAxB,EAAiC,QAAA,GAAW,GAA5C;iBACA,CAAA,CAAE,gBAAF,CAAmB,CAAC,IAApB,CAAyB,aAAA,GAAgB,IAAI,CAAC,KAAL,CAAW,QAAX,CAAhB,GAAuC,GAAhE,EAHwB;;MAAb,CADb;MAKA,GAAA,EAAK,SAAC,CAAD,EAAI,IAAJ;QACH,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;QACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;QACA,CAAA,CAAE,WAAF,CAAc,CAAC,IAAf,CAAA;eAEA,IAAI,CAAC,MAAL,CAAA;MALG,CALL;MAWA,IAAA,EAAM,SAAC,CAAD,EAAI,IAAJ;AACJ,YAAA;QAAA,IAAG,CAAC,IAAI,CAAC,KAAK,CAAC,YAAX,KAA2B,EAA5B,CAAA,IAAoC,CAAC,OAAA,GAAU,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,KAAK,CAAC,YAAvB,CAAX,CAAvC;UACE,CAAA,CAAE,uBAAF,CAA0B,CAAC,IAA3B,CAAA;UACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,QAA3B,CAAoC,eAApC;UACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,WAA3B,CAAuC,cAAvC;iBACA,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,OAA/B,EAJF;;MADI,CAXN;MAiBA,IAAA,EAAM,SAAC,CAAD,EAAI,IAAJ;AACJ,YAAA;QAAA,CAAA,CAAE,uBAAF,CAA0B,CAAC,IAA3B,CAAA;QACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,QAA3B,CAAoC,cAApC;QACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,WAA3B,CAAuC,eAAvC;QACA,IAAG,CAAC,IAAI,CAAC,KAAK,CAAC,YAAX,KAA2B,EAA5B,CAAA,IAAoC,CAAC,CAAA,GAAI,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,KAAK,CAAC,YAAvB,CAAL,CAApC,IAAkF,CAAC,GAAA,GAAM,CAAC,CAAC,KAAT,CAArF;iBACE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,gBAAA,GAAmB,GAAlD,EADF;SAAA,MAAA;iBAGE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6DAA/B,EAHF;;MAJI,CAjBN;MAyBA,MAAA,EAAQ,SAAC,CAAD,EAAI,IAAJ;QACN,CAAA,CAAE,WAAF,CAAc,CAAC,IAAf,CAAA;QACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;eACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;MAHM,CAzBR;KADF;IA+BA,CAAA,CAAE,oBAAF,CAAuB,CAAC,KAAxB,CAA8B,SAAC,CAAD;MAC5B,IAAG,OAAA,CAAQ,8CAAR,CAAH;eACE,CAAC,CAAC,IAAF,CAAO,gBAAP,CACA,CAAC,IADD,CACO,SAAC,CAAD;UACL,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,IAA5B,CAAA;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,QAA5B,CAAqC,eAArC;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,WAA5B,CAAwC,cAAxC;iBACA,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,kCAA/B;QAJK,CADP,CAMA,CAAC,IAND,CAMM,SAAC,IAAD,EAAO,CAAP;AACJ,cAAA;UAAA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,IAA5B,CAAA;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,QAA5B,CAAqC,cAArC;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,WAA5B,CAAwC,eAAxC;UACA,IAAG,CAAC,IAAI,CAAC,YAAL,KAAqB,EAAtB,CAAA,IAA8B,CAAC,CAAA,GAAI,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,YAAjB,CAAL,CAA9B,IAAsE,CAAC,GAAA,GAAM,CAAC,CAAC,KAAT,CAAzE;mBACE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,gBAAA,GAAmB,GAAlD,EADF;WAAA,MAAA;mBAGE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6DAA/B,EAHF;;QAJI,CANN,EADF;;IAD4B,CAA9B;IAiBA,CAAA,CAAE,sBAAF,CAAyB,CAAC,KAA1B,CAAgC,SAAC,CAAD;MAC9B,IAAG,OAAA,CAAQ,gDAAR,CAAH;eACE,CAAC,CAAC,IAAF,CAAO,kBAAP,CACA,CAAC,IADD,CACO,SAAC,CAAD;UACL,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,IAA5B,CAAA;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,QAA5B,CAAqC,eAArC;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,WAA5B,CAAwC,cAAxC;iBACA,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6GAA/B;QAJK,CADP,CAMA,CAAC,IAND,CAMM,SAAC,IAAD,EAAO,CAAP;AACJ,cAAA;UAAA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,IAA5B,CAAA;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,QAA5B,CAAqC,cAArC;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,WAA5B,CAAwC,eAAxC;UACA,IAAG,CAAC,IAAI,CAAC,YAAL,KAAqB,EAAtB,CAAA,IAA8B,CAAC,CAAA,GAAI,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,YAAjB,CAAL,CAA9B,IAAsE,CAAC,GAAA,GAAM,CAAC,CAAC,KAAT,CAAzE;mBACE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,gBAAA,GAAmB,GAAlD,EADF;WAAA,MAAA;mBAGE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6DAA/B,EAHF;;QAJI,CANN,EADF;;IAD8B,CAAhC;IAiBA,YAAA,GAAe,SAAA;MACb,CAAA,CAAE,kBAAF,CAAqB,CAAC,IAAtB,CAAA;aACA,CAAC,CAAC,IAAF,CAAO,CAAA,CAAE,sBAAF,CAAP,EAAkC,SAAC,CAAD,EAAI,CAAJ;eAChC,CAAA,CAAE,gBAAA,GAAiB,CAAC,CAAC,KAArB,CAA2B,CAAC,MAA5B,CAAmC,CAAA,CAAE,eAAF,CAAkB,CAAC,GAAnB,CAAA,CAAA,KAA4B,CAAC,CAAC,KAAjE;MADgC,CAAlC;IAFa;IAKf,CAAA,CAAE,eAAF,CAAkB,CAAC,MAAnB,CAA0B,SAAC,CAAD;aACxB,YAAA,CAAA;IADwB,CAA1B;WAGA,YAAA,CAAA;EAlHQ,CAAV;AAAA" + "mappings": ";AAAA;EAAA,CAAA,CAAA,CAAG,CAAC,KAAJ,CAAU,SAAA;AAER,QAAA;IAAA,CAAA,CAAE,uBAAF,CAA0B,CAAC,KAA3B,CAAiC,SAAC,CAAD;aAC/B,CAAA,CAAE,uBAAF,CAA0B,CAAC,IAA3B,CAAA;IAD+B,CAAjC;IAGA,CAAA,CAAE,aAAF,CAAgB,CAAC,KAAjB,CAAuB,SAAC,CAAD;AACrB,UAAA;MAAA,OAAA,GAAU,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;MACV,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,sBAAtB;MAEA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,UAAtB,EAAkC,IAAlC;MACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,UAAtB,EAAkC,IAAlC;aAEA,CAAC,CAAC,IAAF,CAAO;QACL,MAAA,EAAQ,MADH;QAEL,GAAA,EAAK,gBAFA;QAGL,OAAA,EAAS,IAAA,GAAO,IAHX;OAAP,CAMA,CAAC,IAND,CAMO,SAAC,IAAD,EAAO,CAAP;QACL,IAAI,IAAJ;iBACE,MAAM,CAAC,QAAP,GAAkB,oBAAA,GAAuB,IAAvB,GAA8B,0BADlD;;MADK,CANP,CAUA,CAAC,IAVD,CAUO,SAAC,IAAD,EAAO,CAAP;AACL,YAAA;QAAA,CAAA,CAAE,uBAAF,CAA0B,CAAC,QAA3B,CAAoC,cAApC;QACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,WAA3B,CAAuC,eAAvC;QACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,IAA3B,CAAA;QACA,IAAG,CAAC,IAAI,CAAC,YAAL,KAAqB,EAAtB,CAAA,IAA8B,CAAC,CAAA,GAAI,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,YAAjB,CAAL,CAA9B,IAAsE,CAAC,GAAA,GAAM,CAAC,CAAC,KAAT,CAAzE;iBACE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,gBAAA,GAAmB,GAAlD,EADF;SAAA,MAAA;iBAGE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6DAA/B,EAHF;;MAJK,CAVP,CAmBA,CAAC,MAnBD,CAmBQ,SAAC,IAAD,EAAO,CAAP;QACN,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,OAAtB;QACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,UAAtB,EAAkC,KAAlC;eACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAsB,UAAtB,EAAkC,KAAlC;MAHM,CAnBR;IAPqB,CAAvB;IAgCA,CAAA,CAAE,aAAF,CAAgB,CAAC,KAAjB,CAAuB,SAAC,CAAD;MACrB,CAAC,CAAC,cAAF,CAAA;aACA,CAAA,CAAE,wBAAF,CAA2B,CAAC,KAA5B,CAAA;IAFqB,CAAvB;IAIA,CAAA,CAAE,wBAAF,CAA2B,CAAC,UAA5B,CACE;MAAA,GAAA,EAAK,iBAAL;MACA,WAAA,EAAa,SAAC,CAAD,EAAI,IAAJ;AAAa,YAAA;QAAA,IAAG,IAAI,CAAC,MAAL,IAAgB,IAAI,CAAC,KAAxB;UACxB,QAAA,GAAW,IAAI,CAAC,MAAL,GAAY,IAAI,CAAC,KAAjB,GAAuB;UAClC,CAAA,CAAE,gBAAF,CAAmB,CAAC,GAApB,CAAwB,OAAxB,EAAiC,QAAA,GAAW,GAA5C;iBACA,CAAA,CAAE,gBAAF,CAAmB,CAAC,IAApB,CAAyB,aAAA,GAAgB,IAAI,CAAC,KAAL,CAAW,QAAX,CAAhB,GAAuC,GAAhE,EAHwB;;MAAb,CADb;MAKA,GAAA,EAAK,SAAC,CAAD,EAAI,IAAJ;QACH,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;QACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;QACA,CAAA,CAAE,WAAF,CAAc,CAAC,IAAf,CAAA;eAEA,IAAI,CAAC,MAAL,CAAA;MALG,CALL;MAWA,IAAA,EAAM,SAAC,CAAD,EAAI,IAAJ;AACJ,YAAA;QAAA,IAAG,CAAC,IAAI,CAAC,KAAK,CAAC,YAAX,KAA2B,EAA5B,CAAA,IAAoC,CAAC,OAAA,GAAU,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,KAAK,CAAC,YAAvB,CAAX,CAAvC;UACE,CAAA,CAAE,uBAAF,CAA0B,CAAC,IAA3B,CAAA;UACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,QAA3B,CAAoC,eAApC;UACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,WAA3B,CAAuC,cAAvC;iBACA,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,OAA/B,EAJF;;MADI,CAXN;MAiBA,IAAA,EAAM,SAAC,CAAD,EAAI,IAAJ;AACJ,YAAA;QAAA,CAAA,CAAE,uBAAF,CAA0B,CAAC,IAA3B,CAAA;QACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,QAA3B,CAAoC,cAApC;QACA,CAAA,CAAE,uBAAF,CAA0B,CAAC,WAA3B,CAAuC,eAAvC;QACA,IAAG,CAAC,IAAI,CAAC,KAAK,CAAC,YAAX,KAA2B,EAA5B,CAAA,IAAoC,CAAC,CAAA,GAAI,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,KAAK,CAAC,YAAvB,CAAL,CAApC,IAAkF,CAAC,GAAA,GAAM,CAAC,CAAC,KAAT,CAArF;iBACE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,gBAAA,GAAmB,GAAlD,EADF;SAAA,MAAA;iBAGE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6DAA/B,EAHF;;MAJI,CAjBN;MAyBA,MAAA,EAAQ,SAAC,CAAD,EAAI,IAAJ;QACN,CAAA,CAAE,WAAF,CAAc,CAAC,IAAf,CAAA;QACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;eACA,CAAA,CAAE,aAAF,CAAgB,CAAC,IAAjB,CAAA;MAHM,CAzBR;KADF;IA+BA,CAAA,CAAE,oBAAF,CAAuB,CAAC,KAAxB,CAA8B,SAAC,CAAD;MAC5B,IAAG,OAAA,CAAQ,8CAAR,CAAH;eACE,CAAC,CAAC,IAAF,CAAO,gBAAP,CACA,CAAC,IADD,CACO,SAAC,CAAD;UACL,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,IAA5B,CAAA;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,QAA5B,CAAqC,eAArC;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,WAA5B,CAAwC,cAAxC;iBACA,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,kCAA/B;QAJK,CADP,CAMA,CAAC,IAND,CAMM,SAAC,IAAD,EAAO,CAAP;AACJ,cAAA;UAAA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,IAA5B,CAAA;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,QAA5B,CAAqC,cAArC;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,WAA5B,CAAwC,eAAxC;UACA,IAAG,CAAC,IAAI,CAAC,YAAL,KAAqB,EAAtB,CAAA,IAA8B,CAAC,CAAA,GAAI,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,YAAjB,CAAL,CAA9B,IAAsE,CAAC,GAAA,GAAM,CAAC,CAAC,KAAT,CAAzE;mBACE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,gBAAA,GAAmB,GAAlD,EADF;WAAA,MAAA;mBAGE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6DAA/B,EAHF;;QAJI,CANN,EADF;;IAD4B,CAA9B;IAiBA,CAAA,CAAE,sBAAF,CAAyB,CAAC,KAA1B,CAAgC,SAAC,CAAD;MAC9B,IAAG,OAAA,CAAQ,gDAAR,CAAH;eACE,CAAC,CAAC,IAAF,CAAO,kBAAP,CACA,CAAC,IADD,CACO,SAAC,CAAD;UACL,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,IAA5B,CAAA;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,QAA5B,CAAqC,eAArC;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,WAA5B,CAAwC,cAAxC;iBACA,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6GAA/B;QAJK,CADP,CAMA,CAAC,IAND,CAMM,SAAC,IAAD,EAAO,CAAP;AACJ,cAAA;UAAA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,IAA5B,CAAA;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,QAA5B,CAAqC,cAArC;UACA,CAAC,CAAA,CAAE,uBAAF,CAAD,CAA2B,CAAC,WAA5B,CAAwC,eAAxC;UACA,IAAG,CAAC,IAAI,CAAC,YAAL,KAAqB,EAAtB,CAAA,IAA8B,CAAC,CAAA,GAAI,CAAC,CAAC,SAAF,CAAY,IAAI,CAAC,YAAjB,CAAL,CAA9B,IAAsE,CAAC,GAAA,GAAM,CAAC,CAAC,KAAT,CAAzE;mBACE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,gBAAA,GAAmB,GAAlD,EADF;WAAA,MAAA;mBAGE,CAAC,CAAA,CAAE,qBAAF,CAAD,CAAyB,CAAC,IAA1B,CAA+B,6DAA/B,EAHF;;QAJI,CANN,EADF;;IAD8B,CAAhC;IAiBA,YAAA,GAAe,SAAA;MACb,CAAA,CAAE,kBAAF,CAAqB,CAAC,IAAtB,CAAA;aACA,CAAC,CAAC,IAAF,CAAO,CAAA,CAAE,sBAAF,CAAP,EAAkC,SAAC,CAAD,EAAI,CAAJ;eAChC,CAAA,CAAE,gBAAA,GAAiB,CAAC,CAAC,KAArB,CAA2B,CAAC,MAA5B,CAAmC,CAAA,CAAE,eAAF,CAAkB,CAAC,GAAnB,CAAA,CAAA,KAA4B,CAAC,CAAC,KAAjE;MADgC,CAAlC;IAFa;IAKf,CAAA,CAAE,eAAF,CAAkB,CAAC,MAAnB,CAA0B,SAAC,CAAD;aACxB,YAAA,CAAA;IADwB,CAA1B;WAGA,YAAA,CAAA;EAlHQ,CAAV;AAAA" } \ No newline at end of file diff --git a/templates/auth_basic.html b/templates/auth_basic.html index 39ef02a8d..646c7ec81 100644 --- a/templates/auth_basic.html +++ b/templates/auth_basic.html @@ -1,6 +1,6 @@
- +
diff --git a/templates/head.html b/templates/head.html index 89f625302..4291b76b1 100644 --- a/templates/head.html +++ b/templates/head.html @@ -1,3 +1,6 @@ +{# vim: ft=htmldjango #} +{% load static %} + {% if context.player_name %} @@ -29,13 +32,13 @@ - + - + - + {% if context.is_demo %} diff --git a/templates/header.html b/templates/header.html index 34f6354eb..016838976 100644 --- a/templates/header.html +++ b/templates/header.html @@ -1,3 +1,7 @@ +{# vim: ft=htmldjango #} + +{% load static %} +