Skip to content

Commit

Permalink
reimport of MDU in admin
Browse files Browse the repository at this point in the history
  • Loading branch information
beda42 committed May 13, 2022
1 parent 3e45771 commit ca132e3
Show file tree
Hide file tree
Showing 8 changed files with 76 additions and 14 deletions.
2 changes: 0 additions & 2 deletions apps/export/tests/test_api.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import json
from base64 import b64encode
from unittest.mock import patch

import pytest
Expand Down
29 changes: 27 additions & 2 deletions apps/logs/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from modeltranslation.admin import TranslationAdmin

from . import models
from .models import ReportInterestMetric
from .models import ReportInterestMetric, MduState
from .tasks import reprocess_mdu_task, import_manual_upload_data


@admin.register(models.OrganizationPlatform)
Expand Down Expand Up @@ -252,7 +253,7 @@ class ManualDataUploadAdmin(admin.ModelAdmin):
'user__username',
'user__email',
]
actions = ['regenerate_preflight']
actions = ['regenerate_preflight', 'reimport']

@admin.action(description="Regenerate preflight data")
def regenerate_preflight(self, request, queryset):
Expand All @@ -269,3 +270,27 @@ def regenerate_preflight(self, request, queryset):
% count,
messages.SUCCESS,
)

@admin.action(
description="Reimport data - deletes old and re-imports the file again. "
"Runs in the background"
)
def reimport(self, request, queryset):
count = 0
total_count = queryset.count()
for mdu in queryset.select_for_update(skip_locked=True, of=('self',)):
mdu.unprocess()
# import_manual_upload_data does not work without the state being `IMPORTING`
mdu.state = MduState.IMPORTING
mdu.save()
import_manual_upload_data.apply_async(args=(mdu.pk, mdu.user_id), countdown=2)
count += 1

already_running_text = (
f" ({total_count - count} imports already running.)" if total_count != count else ""
)
messages.info(
request,
f'{count} uploads planned to be reimported.{already_running_text}',
messages.SUCCESS,
)
5 changes: 2 additions & 3 deletions apps/logs/logic/attempt_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,12 @@

from logs.exceptions import DataStructureError
from logs.logic.data_import import import_counter_records, create_import_batch_or_crash
from logs.models import OrganizationPlatform, ImportBatch
from logs.models import OrganizationPlatform
from nigiri.client import Sushi5Client, SushiException, SushiError
from nigiri.counter5 import CounterError, Counter5ReportBase, TransportError
from nigiri.counter4 import Counter4ReportBase
from nigiri.counter5 import Counter5ReportBase, TransportError
from sushi.models import SushiFetchAttempt, AttemptStatus


logger = logging.getLogger(__name__)


Expand Down
13 changes: 8 additions & 5 deletions apps/logs/logic/custom_import.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,25 @@
import logging
import typing
import statistics

from collections import Counter
from datetime import date
from functools import lru_cache

from core.logic.dates import parse_date_fuzzy
from core.models import User
from django.conf import settings
from django.db.transaction import atomic
from django.utils.timezone import now
from django.utils.translation import gettext as _

from core.logic.dates import parse_date_fuzzy
from core.models import User
from logs.logic.data_import import import_counter_records
from logs.logic.materialized_reports import sync_materialized_reports_for_import_batch
from logs.models import ImportBatch, ManualDataUpload, MduState, OrganizationPlatform
from logs.models import ManualDataUpload, OrganizationPlatform, MduState
from nigiri.counter5 import CounterRecord


logger = logging.getLogger(__name__)


@lru_cache
def col_name_to_month(row_name: str) -> typing.Optional[date]:
"""
Expand Down
7 changes: 7 additions & 0 deletions apps/logs/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -650,6 +650,13 @@ def delete(self, using=None, keep_parents=False):
import_batch.delete()
super().delete(using=using, keep_parents=keep_parents)

def unprocess(self):
self.import_batches.all().delete()
self.state = MduState.INITIAL
self.error_details = None
self.error = None
self.save()

@property
def accesslogs(self):
return AccessLog.objects.filter(import_batch__in=self.import_batches.all())
Expand Down
12 changes: 12 additions & 0 deletions apps/logs/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,3 +335,15 @@ def unstuck_import_manual_upload_data():
& Q(created__lt=now() - timedelta(minutes=5)) # don't start right away
):
import_manual_upload_data.delay(mdu.pk, mdu.user.pk)


@celery.shared_task
@email_if_fails
def reprocess_mdu_task(mdu_id):
try:
mdu = ManualDataUpload.objects.get(pk=mdu_id)
except ManualDataUpload.DoesNotExist:
logger.error(f'MDU #{mdu_id} for reprocessing does not exist')
else:
mdu.unprocess()
import_manual_upload_data.delay(mdu.pk, mdu.user.pk)
17 changes: 17 additions & 0 deletions apps/logs/tests/test_data_import.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
from collections import Counter
from pathlib import Path
from unittest.mock import patch

import pytest
from django.db.models import Count, Sum
from django.urls import reverse

from logs.models import ReportType, AccessLog, DimensionText, ImportBatch
from nigiri.counter4 import Counter4BR2Report
from nigiri.counter5 import Counter5TableReport, Counter5TRReport
from organizations.tests.conftest import organizations, organization_random # noqa - fixture
from publications.models import Title, PlatformTitle
from test_fixtures.entities.logs import ManualDataUploadFullFactory, ImportBatchFullFactory
from ..exceptions import DataStructureError
from ..logic.data_import import import_counter_records

Expand Down Expand Up @@ -304,3 +308,16 @@ def test_c5_tr_nature_merging(self, organization_random, report_type_nd, platfor
)
import_counter_records(rt, organization_random, platform, records)
assert Title.objects.filter(name='Nature').count() == 1, 'only one Nature'


@pytest.mark.django_db
class TestReprocessMDU:
def test_reimport_admin_action(self, admin_client):
mdu = ManualDataUploadFullFactory.create()
# reprocess and check
with patch('logs.admin.import_manual_upload_data') as task_patch:
admin_client.post(
reverse('admin:logs_manualdataupload_changelist'),
{'action': 'reimport', '_selected_action': [str(mdu.pk)]},
)
task_patch.apply_async.assert_called_once()
5 changes: 3 additions & 2 deletions config/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,7 @@
# in production, we remap this queue to the name 'normal', but here it should either be omitted
# or explicitly set to 'celery'
CELERY_TASK_ROUTES = {
'core.tasks.empty_task_export': {'queue': 'export'},
'core.tasks.test': {'queue': 'celery'},
'export.tasks.process_flexible_export_task': {'queue': 'export'},
'knowledgebase.tasks.sync_routes': {'queue': 'celery'},
Expand All @@ -295,11 +296,11 @@
'logs.tasks.export_raw_data_task': {'queue': 'export'},
'logs.tasks.prepare_preflight': {'queue': 'preflight'},
'logs.tasks.import_manual_upload_data': {'queue': 'import'},
'logs.tasks.prepare_preflights': {'queue': 'preflight'},
'logs.tasks.reprocess_mdu_task': {'queue': 'import'},
'scheduler.tasks.plan_schedulers_triggering': {'queue': 'sushi'},
'scheduler.tasks.update_automatic_harvesting': {'queue': 'sushi'},
'scheduler.tasks.trigger_scheduler': {'queue': 'sushi'},
'logs.tasks.prepare_preflights': {'queue': 'preflight'},
'core.tasks.empty_task_export': {'queue': 'export'},
}

CELERY_BEAT_SCHEDULE = {
Expand Down

0 comments on commit ca132e3

Please sign in to comment.