From 0c2ad83f9820e16ab46d57f291c94d6d1a7d29e8 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 11 Jul 2024 08:04:10 -0400 Subject: [PATCH 001/119] Replace default values with (invalid) placeholders --- kobo/settings/base.py | 37 +++++++++++++++++++++---------------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/kobo/settings/base.py b/kobo/settings/base.py index d7551c437f..967651d61d 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -998,18 +998,20 @@ def __init__(self, *args, **kwargs): if SENTRY_JS_DSN_URL := env.url('SENTRY_JS_DSN', default=None): SENTRY_JS_DSN = SENTRY_JS_DSN_URL.geturl() -# replace this with the pointer to the kobocat server, if it exists -KOBOCAT_URL = os.environ.get('KOBOCAT_URL', 'http://kobocat') +# replace this with the pointer to the KoboCAT server, if it exists +KOBOCAT_URL = os.environ.get('KOBOCAT_URL', 'https://change-me.invalid') -# In case server must serve two KoBoCAT domain names (e.g. during a +# In case server must serve two KoboCAT domain names (e.g. during a # domain name transfer), `settings.KOBOCAT_OLD_URL` adds support for # the domain name. KOBOCAT_OLD_URL = os.environ.get('KOBOCAT_OLD_URL') -KOBOCAT_INTERNAL_URL = os.environ.get('KOBOCAT_INTERNAL_URL', - 'http://kobocat') +# Internal URL does not use HTTPS +KOBOCAT_INTERNAL_URL = os.environ.get( + 'KOBOCAT_INTERNAL_URL', 'http://change-me.invalid' +) -KOBOFORM_URL = os.environ.get('KOBOFORM_URL', 'http://kpi') +KOBOFORM_URL = os.environ.get('KOBOFORM_URL', 'https://change-me.invalid') if 'KOBOCAT_URL' in os.environ: DEFAULT_DEPLOYMENT_BACKEND = 'kobocat' @@ -1018,7 +1020,8 @@ def __init__(self, *args, **kwargs): ''' Stripe configuration intended for kf.kobotoolbox.org only, tracks usage limit exceptions ''' -STRIPE_ENABLED = env.bool("STRIPE_ENABLED", False) +STRIPE_ENABLED = env.bool('STRIPE_ENABLED', False) + def dj_stripe_request_callback_method(): # This method exists because dj-stripe's documentation doesn't reflect reality. @@ -1030,15 +1033,15 @@ def dj_stripe_request_callback_method(): pass -DJSTRIPE_SUBSCRIBER_MODEL = "organizations.Organization" +DJSTRIPE_SUBSCRIBER_MODEL = 'organizations.Organization' DJSTRIPE_SUBSCRIBER_MODEL_REQUEST_CALLBACK = dj_stripe_request_callback_method DJSTRIPE_FOREIGN_KEY_TO_FIELD = 'id' DJSTRIPE_USE_NATIVE_JSONFIELD = True STRIPE_LIVE_MODE = env.bool('STRIPE_LIVE_MODE', False) -STRIPE_TEST_PUBLIC_KEY = env.str('STRIPE_TEST_PUBLIC_KEY', "pk_test_qliDXQRyVGPWmsYR69tB1NPx00ndTrJfVM") -STRIPE_LIVE_PUBLIC_KEY = "pk_live_7JRQ5elvhnmz4YuWdlSRNmMj00lhvqZz8P" +STRIPE_TEST_PUBLIC_KEY = env.str('STRIPE_TEST_PUBLIC_KEY', 'pk_test_qliDXQRyVGPWmsYR69tB1NPx00ndTrJfVM') +STRIPE_LIVE_PUBLIC_KEY = 'pk_live_7JRQ5elvhnmz4YuWdlSRNmMj00lhvqZz8P' if STRIPE_ENABLED: - INSTALLED_APPS += ('djstripe', "kobo.apps.stripe") + INSTALLED_APPS += ('djstripe', 'kobo.apps.stripe') STRIPE_LIVE_SECRET_KEY = env.str('STRIPE_LIVE_SECRET_KEY', None) STRIPE_TEST_SECRET_KEY = env.str('STRIPE_TEST_SECRET_KEY', None) DJSTRIPE_WEBHOOK_SECRET = env.str('DJSTRIPE_WEBHOOK_SECRET', None) @@ -1052,7 +1055,7 @@ def dj_stripe_request_callback_method(): ''' Enketo configuration ''' -ENKETO_URL = os.environ.get('ENKETO_URL') or os.environ.get('ENKETO_SERVER', 'https://enketo.org') +ENKETO_URL = os.environ.get('ENKETO_URL') or os.environ.get('ENKETO_SERVER', 'https://change-me.invalid') ENKETO_URL = ENKETO_URL.rstrip('/') # Remove any trailing slashes ENKETO_VERSION = os.environ.get('ENKETO_VERSION', 'Legacy').lower() ENKETO_INTERNAL_URL = os.environ.get('ENKETO_INTERNAL_URL', ENKETO_URL) @@ -1207,7 +1210,9 @@ def dj_stripe_request_callback_method(): SYNC_KOBOCAT_PERMISSIONS = ( os.environ.get('SYNC_KOBOCAT_PERMISSIONS', 'True') == 'True') -CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/1') +CELERY_BROKER_URL = os.environ.get( + 'CELERY_BROKER_URL', 'redis://change-me.invalid:6379/1' +) CELERY_RESULT_BACKEND = CELERY_BROKER_URL # Increase limits for long-running tasks @@ -1517,7 +1522,7 @@ def dj_stripe_request_callback_method(): SESSION_ENGINE = 'redis_sessions.session' # django-redis-session expects a dictionary with `url` redis_session_url = env.cache_url( - 'REDIS_SESSION_URL', default='redis://redis_cache:6380/2' + 'REDIS_SESSION_URL', default='redis://change-me.invalid:6380/2' ) SESSION_REDIS = { 'url': redis_session_url['LOCATION'], @@ -1527,7 +1532,7 @@ def dj_stripe_request_callback_method(): CACHES = { # Set CACHE_URL to override - 'default': env.cache_url(default='redis://redis_cache:6380/3'), + 'default': env.cache_url(default='redis://change-me.invalid:6380/3'), 'enketo_redis_main': env.cache_url( 'ENKETO_REDIS_MAIN_URL', default='redis://change-me.invalid/0' ), @@ -1605,7 +1610,7 @@ def dj_stripe_request_callback_method(): SERVICE_ACCOUNT = { 'BACKEND': env.cache_url( - 'SERVICE_ACCOUNT_BACKEND_URL', default='redis://redis_cache:6380/6' + 'SERVICE_ACCOUNT_BACKEND_URL', default='redis://change-me.invalid:6380/6' ), 'WHITELISTED_HOSTS': env.list('SERVICE_ACCOUNT_WHITELISTED_HOSTS', default=[]), } From cf24c2a9d376beff5953f3f176d5cce2e0fe8b41 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 20 Jun 2024 10:23:30 -0400 Subject: [PATCH 002/119] Replace shadow models imports with open rosa models --- hub/admin/extend_user.py | 8 +++++--- hub/models/extra_user_detail.py | 4 +++- kobo/apps/accounts/mfa/models.py | 4 ++-- kobo/apps/form_disclaimer/models.py | 2 +- kobo/apps/kobo_auth/models.py | 18 ++++++++++++++++++ kobo/apps/project_ownership/utils.py | 6 ++---- kobo/apps/superuser_stats/models.py | 4 ++-- kobo/apps/superuser_stats/tasks.py | 22 +++++++++++----------- kobo/apps/trackers/submission_utils.py | 6 +++--- kobo/apps/trash_bin/models/project.py | 6 ++++-- kpi/signals.py | 3 ++- 11 files changed, 53 insertions(+), 30 deletions(-) diff --git a/hub/admin/extend_user.py b/hub/admin/extend_user.py index ce8a0d602e..ca545a624f 100644 --- a/hub/admin/extend_user.py +++ b/hub/admin/extend_user.py @@ -21,13 +21,15 @@ USERNAME_INVALID_MESSAGE, username_validators, ) +from kobo.apps.openrosa.apps.logger.models import ( + MonthlyXFormSubmissionCounter as KobocatMonthlyXFormSubmissionCounter, +) from kobo.apps.organizations.models import OrganizationUser from kobo.apps.trash_bin.exceptions import TrashIntegrityError from kobo.apps.trash_bin.models.account import AccountTrash from kobo.apps.trash_bin.utils import move_to_trash -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatMonthlyXFormSubmissionCounter, -) + + from kpi.models.asset import AssetDeploymentStatus from .filters import UserAdvancedSearchFilter from .mixins import AdvancedSearchMixin diff --git a/hub/models/extra_user_detail.py b/hub/models/extra_user_detail.py index 260f2e9be6..3c4213aa23 100644 --- a/hub/models/extra_user_detail.py +++ b/hub/models/extra_user_detail.py @@ -1,7 +1,9 @@ from django.conf import settings from django.db import models -from kpi.deployment_backends.kc_access.shadow_models import KobocatUserProfile +from kobo.apps.openrosa.apps.main.models import ( + UserProfile as KobocatUserProfile, +) from kpi.fields import KpiUidField from kpi.mixins import StandardizeSearchableFieldMixin diff --git a/kobo/apps/accounts/mfa/models.py b/kobo/apps/accounts/mfa/models.py index 424105e9d9..6c822a0e16 100644 --- a/kobo/apps/accounts/mfa/models.py +++ b/kobo/apps/accounts/mfa/models.py @@ -8,8 +8,8 @@ MFAMethodAdmin as TrenchMFAMethodAdmin, ) -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatUserProfile, +from kobo.apps.openrosa.apps.main.models import ( + UserProfile as KobocatUserProfile, ) diff --git a/kobo/apps/form_disclaimer/models.py b/kobo/apps/form_disclaimer/models.py index 0b1896a181..ce2fe53e8e 100644 --- a/kobo/apps/form_disclaimer/models.py +++ b/kobo/apps/form_disclaimer/models.py @@ -1,5 +1,5 @@ from django.conf import settings -from django.db import models, transaction +from django.db import models from django.db.models import Q from django.db.models.constraints import UniqueConstraint from markdownx.models import MarkdownxField diff --git a/kobo/apps/kobo_auth/models.py b/kobo/apps/kobo_auth/models.py index b9b0151a7e..ecec05a1a0 100644 --- a/kobo/apps/kobo_auth/models.py +++ b/kobo/apps/kobo_auth/models.py @@ -39,3 +39,21 @@ def has_perm(self, perm, obj=None): # Otherwise, check in KPI DB return super().has_perm(perm, obj) + + def sync_to_openrosa_db(self): + self.__class__.objects.using(settings.OPENROSA_DB_ALIAS).bulk_create( + [self], + update_conflicts=True, + update_fields=[ + 'password', + 'last_login', + 'is_superuser', + 'first_name', + 'last_name', + 'email', + 'is_staff', + 'is_active', + 'date_joined', + ], + unique_fields=['pk'] + ) diff --git a/kobo/apps/project_ownership/utils.py b/kobo/apps/project_ownership/utils.py index 77d5e526a0..eec2855850 100644 --- a/kobo/apps/project_ownership/utils.py +++ b/kobo/apps/project_ownership/utils.py @@ -4,10 +4,8 @@ from django.apps import apps from django.utils import timezone -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatAttachment, - KobocatMetadata, -) +from kobo.apps.openrosa.apps.logger.models import Attachment as KobocatAttachment +from kobo.apps.openrosa.apps.main.models import MetaData as KobocatMetadata from kpi.models.asset import AssetFile from .models.choices import TransferStatusChoices, TransferStatusTypeChoices from .exceptions import AsyncTaskException diff --git a/kobo/apps/superuser_stats/models.py b/kobo/apps/superuser_stats/models.py index 036aee12a0..61ab97cf74 100644 --- a/kobo/apps/superuser_stats/models.py +++ b/kobo/apps/superuser_stats/models.py @@ -1,5 +1,5 @@ -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatMonthlyXFormSubmissionCounter, +from kobo.apps.openrosa.apps.logger.models import ( + MonthlyXFormSubmissionCounter as KobocatMonthlyXFormSubmissionCounter, ) diff --git a/kobo/apps/superuser_stats/tasks.py b/kobo/apps/superuser_stats/tasks.py index 543bcccd0c..a65d78deb2 100644 --- a/kobo/apps/superuser_stats/tasks.py +++ b/kobo/apps/superuser_stats/tasks.py @@ -28,15 +28,17 @@ from hub.models import ExtraUserDetail from kobo.apps.kobo_auth.shortcuts import User + from kobo.apps.trackers.models import NLPUsageCounter from kobo.static_lists import COUNTRIES from kpi.constants import ASSET_TYPE_SURVEY -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatMonthlyXFormSubmissionCounter, - KobocatXForm, - KobocatUser, - KobocatUserProfile, - ReadOnlyKobocatInstance, +from kobo.apps.openrosa.apps.logger.models import ( + Instance as ReadOnlyKobocatInstance, + MonthlyXFormSubmissionCounter as KobocatMonthlyXFormSubmissionCounter, + XForm as KobocatXForm, +) +from kobo.apps.openrosa.apps.main.models import ( + UserProfile as KobocatUserProfile, ) from kpi.models.asset import Asset, AssetDeploymentStatus @@ -364,11 +366,11 @@ def format_date(d): else: return d - def get_row_for_user(u: KobocatUser) -> list: + def get_row_for_user(u: 'kobo_auth.User') -> list: row_ = [] try: - profile = KobocatUserProfile.objects.get(user=u) + profile = KobocatUserProfile.objects.get(user_id=u.pk) except KobocatUserProfile.DoesNotExist: profile = None @@ -437,9 +439,7 @@ def get_row_for_user(u: KobocatUser) -> list: with default_storage.open(output_filename, 'w') as output_file: writer = csv.writer(output_file) writer.writerow(columns) - kc_users = KobocatUser.objects.exclude( - pk=settings.ANONYMOUS_USER_ID - ).order_by('pk') + kc_users = User.objects.exclude(pk=settings.ANONYMOUS_USER_ID).order_by('pk') for kc_user in kc_users.iterator(CHUNK_SIZE): try: row = get_row_for_user(kc_user) diff --git a/kobo/apps/trackers/submission_utils.py b/kobo/apps/trackers/submission_utils.py index 51a85bff07..28bb219499 100644 --- a/kobo/apps/trackers/submission_utils.py +++ b/kobo/apps/trackers/submission_utils.py @@ -5,9 +5,9 @@ from django.utils import timezone from model_bakery import baker -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatDailyXFormSubmissionCounter, - KobocatXForm, +from kobo.apps.openrosa.apps.logger.models import ( + DailyXFormSubmissionCounter as KobocatDailyXFormSubmissionCounter, + XForm as KobocatXForm, ) from kpi.models import Asset from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE diff --git a/kobo/apps/trash_bin/models/project.py b/kobo/apps/trash_bin/models/project.py index 04fdeaae5e..c3b52f3599 100644 --- a/kobo/apps/trash_bin/models/project.py +++ b/kobo/apps/trash_bin/models/project.py @@ -4,12 +4,14 @@ from django.db import models, transaction from django.utils.timezone import now +from kobo.apps.openrosa.apps.logger.models import ( + XForm as KobocatXForm, +) from kobo.apps.project_ownership.models import ( Invite, InviteStatusChoices, Transfer, ) -from kpi.deployment_backends.kc_access.shadow_models import KobocatUser, KobocatXForm from kpi.deployment_backends.kc_access.utils import kc_transaction_atomic from kpi.fields import KpiUidField from kpi.models.asset import Asset, AssetDeploymentStatus @@ -49,7 +51,7 @@ def toggle_asset_statuses( kc_filter_params = {'kpi_asset_uid__in': asset_uids} filter_params = {'uid__in': asset_uids} else: - kc_filter_params = {'user': KobocatUser.get_kc_user(owner)} + kc_filter_params = {'user_id': owner.pk} filter_params = {'owner': owner} kc_update_params = {'downloadable': active} diff --git a/kpi/signals.py b/kpi/signals.py index bf25e01eb8..1543a155e5 100644 --- a/kpi/signals.py +++ b/kpi/signals.py @@ -13,6 +13,7 @@ from kpi.deployment_backends.kc_access.shadow_models import ( KobocatUser, ) + from kpi.deployment_backends.kc_access.utils import ( grant_kc_model_level_perms, kc_transaction_atomic, @@ -63,7 +64,7 @@ def save_kobocat_user(sender, instance, created, raw, **kwargs): if not settings.TESTING: with kc_transaction_atomic(): - KobocatUser.sync(instance) + instance.sync_to_openrosa_db() if created: grant_kc_model_level_perms(instance) From 647eee22798b0915aa610be707f7b8974bfc52b3 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 4 Jul 2024 09:31:22 -0400 Subject: [PATCH 003/119] Replace MockAttachment with Attachment model in unit tests --- kobo/apps/kobo_auth/models.py | 6 +- .../openrosa/apps/logger/models/__init__.py | 1 - .../openrosa/apps/logger/models/attachment.py | 117 ++++++++++++++++-- .../apps/openrosa/apps/logger/models/xform.py | 7 +- .../logger/tests/test_simple_submission.py | 1 + .../tests/api/v2/test_api.py | 28 ++--- kobo/apps/shadow_model/__init__.py | 2 +- .../tests/test_submission_extras_api_post.py | 6 +- .../migrations/0001_initial.py | 20 +++ .../superuser_stats/migrations/__init__.py | 0 kobo/apps/superuser_stats/tasks.py | 1 - .../kc_access/shadow_models.py | 35 ------ kpi/deployment_backends/mock_backend.py | 85 +++++++++++-- kpi/management/commands/sync_kobocat_perms.py | 12 +- .../commands/sync_kobocat_xforms.py | 41 +++--- kpi/serializers/v2/service_usage.py | 8 +- kpi/signals.py | 3 - kpi/tests/api/v2/test_api_attachments.py | 1 - kpi/tests/api/v2/test_api_paired_data.py | 14 +-- kpi/tests/utils/mock.py | 85 +------------ kpi/utils/database.py | 41 ++++++ kpi/utils/project_view_exports.py | 3 +- kpi/views/v2/attachment.py | 2 +- 23 files changed, 311 insertions(+), 208 deletions(-) create mode 100644 kobo/apps/superuser_stats/migrations/0001_initial.py create mode 100644 kobo/apps/superuser_stats/migrations/__init__.py diff --git a/kobo/apps/kobo_auth/models.py b/kobo/apps/kobo_auth/models.py index ecec05a1a0..307e7614ac 100644 --- a/kobo/apps/kobo_auth/models.py +++ b/kobo/apps/kobo_auth/models.py @@ -7,7 +7,7 @@ OPENROSA_APP_LABELS, ) from kobo.apps.openrosa.libs.permissions import get_model_permission_codenames -from kpi.utils.database import use_db +from kpi.utils.database import use_db, update_autofield_sequence class User(AbstractUser): @@ -41,7 +41,8 @@ def has_perm(self, perm, obj=None): return super().has_perm(perm, obj) def sync_to_openrosa_db(self): - self.__class__.objects.using(settings.OPENROSA_DB_ALIAS).bulk_create( + User = self.__class__ # noqa + User.objects.using(settings.OPENROSA_DB_ALIAS).bulk_create( [self], update_conflicts=True, update_fields=[ @@ -57,3 +58,4 @@ def sync_to_openrosa_db(self): ], unique_fields=['pk'] ) + update_autofield_sequence(User) diff --git a/kobo/apps/openrosa/apps/logger/models/__init__.py b/kobo/apps/openrosa/apps/logger/models/__init__.py index 01a8162a96..6defed3784 100644 --- a/kobo/apps/openrosa/apps/logger/models/__init__.py +++ b/kobo/apps/openrosa/apps/logger/models/__init__.py @@ -3,7 +3,6 @@ from kobo.apps.openrosa.apps.logger.models.instance import Instance from kobo.apps.openrosa.apps.logger.models.survey_type import SurveyType from kobo.apps.openrosa.apps.logger.models.xform import XForm -from kobo.apps.openrosa.apps.logger.xform_instance_parser import InstanceParseError from kobo.apps.openrosa.apps.logger.models.note import Note from kobo.apps.openrosa.apps.logger.models.daily_xform_submission_counter import ( DailyXFormSubmissionCounter, diff --git a/kobo/apps/openrosa/apps/logger/models/attachment.py b/kobo/apps/openrosa/apps/logger/models/attachment.py index 024ee06b11..5fa0457e96 100644 --- a/kobo/apps/openrosa/apps/logger/models/attachment.py +++ b/kobo/apps/openrosa/apps/logger/models/attachment.py @@ -1,15 +1,24 @@ # coding: utf-8 import mimetypes import os +from typing import Optional +from urllib.parse import quote as urlquote from django.conf import settings +from django.core.files.base import ContentFile from django.db import models from django.utils.http import urlencode +from kobo.apps.openrosa.libs.utils.image_tools import ( + get_optimized_image_path, + resize, +) from kobo.apps.openrosa.libs.utils.hash import get_hash from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, + KobocatFileSystemStorage, ) +from kpi.mixins.audio_transcoding import AudioTranscodingMixin from .instance import Instance @@ -37,7 +46,7 @@ def get_queryset(self): return super().get_queryset().filter(deleted_at__isnull=True) -class Attachment(models.Model): +class Attachment(models.Model, AudioTranscodingMixin): instance = models.ForeignKey( Instance, related_name='attachments', on_delete=models.CASCADE ) @@ -62,19 +71,32 @@ class Attachment(models.Model): class Meta: app_label = 'logger' - def save(self, *args, **kwargs): - if self.media_file: - self.media_file_basename = self.filename - if self.mimetype == '': - # guess mimetype - mimetype, encoding = mimetypes.guess_type(self.media_file.name) - if mimetype: - self.mimetype = mimetype - # Cache the file size in the database to avoid expensive calls to - # the storage engine when running reports - self.media_file_size = self.media_file.size + @property + def absolute_mp3_path(self): + """ + Return the absolute path on local file system of the converted version of + attachment. Otherwise, return the AWS url (e.g. https://...) + """ - super().save(*args, **kwargs) + if not default_storage.exists(self.mp3_storage_path): + content = self.get_transcoded_audio('mp3') + default_storage.save(self.mp3_storage_path, ContentFile(content)) + + if isinstance(default_storage, KobocatFileSystemStorage): + return f'{self.media_file.path}.mp3' + + return default_storage.url(self.mp3_storage_path) + + @property + def absolute_path(self): + """ + Return the absolute path on local file system of the attachment. + Otherwise, return the AWS url (e.g. https://...) + """ + if isinstance(default_storage, KobocatFileSystemStorage): + return self.media_file.path + + return self.media_file.url @property def file_hash(self): @@ -90,6 +112,71 @@ def file_hash(self): def filename(self): return os.path.basename(self.media_file.name) + @property + def mp3_storage_path(self): + """ + Return the path of file after conversion. It is the exact same name, plus + the conversion audio format extension concatenated. + E.g: file.mp4 and file.mp4.mp3 + """ + return f'{self.storage_path}.mp3' + + def protected_path( + self, format_: Optional[str] = None, suffix: Optional[str] = None + ) -> str: + """ + Return path to be served as protected file served by NGINX + """ + if format_ == 'mp3': + attachment_file_path = self.absolute_mp3_path + else: + attachment_file_path = self.absolute_path + + optimized_image_path = None + if suffix and self.mimetype.startswith('image/'): + optimized_image_path = get_optimized_image_path( + self.media_file.name, suffix + ) + if not default_storage.exists(optimized_image_path): + resize(self.media_file.name) + + if isinstance(default_storage, KobocatFileSystemStorage): + # Django normally sanitizes accented characters in file names during + # save on disk but some languages have extra letters + # (out of ASCII character set) and must be encoded to let NGINX serve + # them + if optimized_image_path: + attachment_file_path = default_storage.path( + optimized_image_path + ) + protected_url = urlquote(attachment_file_path.replace( + settings.KOBOCAT_MEDIA_ROOT, '/protected') + ) + else: + # Double-encode the S3 URL to take advantage of NGINX's + # otherwise troublesome automatic decoding + if optimized_image_path: + attachment_file_path = default_storage.url( + optimized_image_path + ) + protected_url = f'/protected-s3/{urlquote(attachment_file_path)}' + + return protected_url + + def save(self, *args, **kwargs): + if self.media_file: + self.media_file_basename = self.filename + if self.mimetype == '': + # guess mimetype + mimetype, encoding = mimetypes.guess_type(self.media_file.name) + if mimetype: + self.mimetype = mimetype + # Cache the file size in the database to avoid expensive calls to + # the storage engine when running reports + self.media_file_size = self.media_file.size + + super().save(*args, **kwargs) + def secure_url(self, suffix: str = 'original'): """ Returns image URL through KoboCAT redirector. @@ -105,3 +192,7 @@ def secure_url(self, suffix: str = 'original'): suffix=suffix, media_file=urlencode({'media_file': self.media_file.name}) ) + + @property + def storage_path(self): + return str(self.media_file) diff --git a/kobo/apps/openrosa/apps/logger/models/xform.py b/kobo/apps/openrosa/apps/logger/models/xform.py index be24470ff6..c0c68ab6a2 100644 --- a/kobo/apps/openrosa/apps/logger/models/xform.py +++ b/kobo/apps/openrosa/apps/logger/models/xform.py @@ -6,6 +6,7 @@ from io import BytesIO from xml.sax.saxutils import escape as xml_escape +from django.apps import apps from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.urls import reverse @@ -25,15 +26,10 @@ CAN_DELETE_DATA_XFORM, CAN_TRANSFER_OWNERSHIP, ) -from kobo.apps.openrosa.libs.utils.guardian import ( - assign_perm, - get_perms_for_model -) from kobo.apps.openrosa.libs.utils.hash import get_hash from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) -from kpi.models.asset import Asset from kpi.utils.xml import XMLFormWithDisclaimer XFORM_TITLE_LENGTH = 255 @@ -131,6 +127,7 @@ def asset(self): Useful to display form disclaimer in Enketo. See kpi.utils.xml.XMLFormWithDisclaimer for more details. """ + Asset = apps.get_model('kpi', 'Asset') # noqa if not hasattr(self, '_cache_asset'): try: asset = Asset.objects.get(uid=self.kpi_asset_uid) diff --git a/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py b/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py index c3d845af45..1630ea1a28 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py @@ -10,6 +10,7 @@ create_instance, safe_create_instance ) + class TempFileProxy: """ create_instance will be looking for a file object, diff --git a/kobo/apps/project_ownership/tests/api/v2/test_api.py b/kobo/apps/project_ownership/tests/api/v2/test_api.py index 58f708f1de..92a531e470 100644 --- a/kobo/apps/project_ownership/tests/api/v2/test_api.py +++ b/kobo/apps/project_ownership/tests/api/v2/test_api.py @@ -1,15 +1,12 @@ import uuid from constance.test import override_config -from datetime import timedelta -from dateutil.parser import isoparse from django.conf import settings from django.contrib.auth import get_user_model from django.utils import timezone from mock import patch, MagicMock from rest_framework import status from rest_framework.reverse import reverse -from unittest.mock import ANY from kobo.apps.project_ownership.models import ( Invite, @@ -441,14 +438,10 @@ def test_account_usage_transferred_to_new_user(self): ), 'assets': [self.asset.uid] } - with patch( - 'kpi.deployment_backends.backends.MockDeploymentBackend.xform', - MagicMock(), - ): - response = self.client.post( - self.invite_url, data=payload, format='json' - ) - assert response.status_code == status.HTTP_201_CREATED + response = self.client.post( + self.invite_url, data=payload, format='json' + ) + assert response.status_code == status.HTTP_201_CREATED # someuser should have no usage reported anymore response = self.client.get(service_usage_url) @@ -504,14 +497,11 @@ def test_data_accessible_to_new_user(self): ), 'assets': [self.asset.uid] } - with patch( - 'kpi.deployment_backends.backends.MockDeploymentBackend.xform', - MagicMock(), - ): - response = self.client.post( - self.invite_url, data=payload, format='json' - ) - assert response.status_code == status.HTTP_201_CREATED + + response = self.client.post( + self.invite_url, data=payload, format='json' + ) + assert response.status_code == status.HTTP_201_CREATED # anotheruser is the owner and should see the project self.client.login(username='anotheruser', password='anotheruser') diff --git a/kobo/apps/shadow_model/__init__.py b/kobo/apps/shadow_model/__init__.py index 1c1c9a2a79..73019b2e86 100644 --- a/kobo/apps/shadow_model/__init__.py +++ b/kobo/apps/shadow_model/__init__.py @@ -9,5 +9,5 @@ class ShadowModelAppConfig(AppConfig): in Django Admin. """ name = 'kobo.apps.shadow_model' - verbose_name = 'KoBoCAT data' + verbose_name = 'KoboCAT data' label = SHADOW_MODEL_APP_LABEL diff --git a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py index f530d870ee..1e951ff389 100644 --- a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py +++ b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py @@ -387,7 +387,9 @@ def test_change_language_list(self): class GoogleTranscriptionSubmissionTest(APITestCase): def setUp(self): self.user = User.objects.create_user(username='someuser', email='user@example.com') - self.asset = Asset(content={'survey': [{'type': 'audio', 'name': 'q1'}]}) + self.asset = Asset( + content={'survey': [{'type': 'audio', 'label': 'q1'}]} + ) self.asset.advanced_features = {'transcript': {'values': ['q1']}} self.asset.owner = self.user self.asset.save() @@ -429,7 +431,7 @@ def test_google_transcript_post(self, m1, m2): 'submission': submission_id, 'q1': {GOOGLETS: {'status': 'requested', 'languageCode': ''}} } - with self.assertNumQueries(FuzzyInt(51, 57)): + with self.assertNumQueries(FuzzyInt(210, 215)): res = self.client.post(url, data, format='json') self.assertContains(res, 'complete') with self.assertNumQueries(FuzzyInt(20, 26)): diff --git a/kobo/apps/superuser_stats/migrations/0001_initial.py b/kobo/apps/superuser_stats/migrations/0001_initial.py new file mode 100644 index 0000000000..7b2d303df9 --- /dev/null +++ b/kobo/apps/superuser_stats/migrations/0001_initial.py @@ -0,0 +1,20 @@ +# Generated by Django 4.2.11 on 2024-07-03 19:37 + +from django.db import migrations + + +class Migration(migrations.Migration): + """ + Dummy migration to be able to register SuperuserStatsModel with its admin + model without raising an error. + + > django.db.migrations.exceptions.InvalidBasesError: Cannot resolve bases for [] + > This can happen if you are inheriting models from an app with migrations (e.g. contrib.auth) + > in an app with no migrations; see https://docs.djangoproject.com/en/4.2/topics/migrations/#dependencies for more details + """ + dependencies = [ + ('logger', '0034_set_require_auth_at_project_level'), + ] + + operations = [ + ] diff --git a/kobo/apps/superuser_stats/migrations/__init__.py b/kobo/apps/superuser_stats/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/kobo/apps/superuser_stats/tasks.py b/kobo/apps/superuser_stats/tasks.py index a65d78deb2..06ab534fc7 100644 --- a/kobo/apps/superuser_stats/tasks.py +++ b/kobo/apps/superuser_stats/tasks.py @@ -28,7 +28,6 @@ from hub.models import ExtraUserDetail from kobo.apps.kobo_auth.shortcuts import User - from kobo.apps.trackers.models import NLPUsageCounter from kobo.static_lists import COUNTRIES from kpi.constants import ASSET_TYPE_SURVEY diff --git a/kpi/deployment_backends/kc_access/shadow_models.py b/kpi/deployment_backends/kc_access/shadow_models.py index df01578af9..f78bb6d418 100644 --- a/kpi/deployment_backends/kc_access/shadow_models.py +++ b/kpi/deployment_backends/kc_access/shadow_models.py @@ -11,12 +11,10 @@ from django.core.files.base import ContentFile from django.db import ( ProgrammingError, - connections, models, transaction, ) from django.utils import timezone -from django_digest.models import PartialDigest from kobo.apps.openrosa.libs.utils.image_tools import ( get_optimized_image_path, @@ -38,39 +36,6 @@ ) -def update_autofield_sequence(model): - """ - Fixes the PostgreSQL sequence for the first (and only?) `AutoField` on - `model`, à la `manage.py sqlsequencereset` - """ - # Updating sequences on fresh environments fails because the only user - # in the DB is django-guardian AnonymousUser and `max(pk)` returns -1. - # Error: - # > setval: value -1 is out of bounds for sequence - # Using abs() and testing if max(pk) equals -1, leaves the sequence alone. - sql_template = ( - "SELECT setval(" - " pg_get_serial_sequence('{table}','{column}'), " - " abs(coalesce(max({column}), 1)), " - " max({column}) IS NOT null and max({column}) != -1" - ") " - "FROM {table};" - ) - autofield = None - for f in model._meta.get_fields(): - if isinstance(f, models.AutoField): - autofield = f - break - if not autofield: - return - query = sql_template.format( - table=model._meta.db_table, column=autofield.column - ) - connection = connections[settings.OPENROSA_DB_ALIAS] - with connection.cursor() as cursor: - cursor.execute(query) - - class ShadowModel(models.Model): """ Allows identification of writeable and read-only shadow models diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 0d6012a684..19c7e1025b 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -16,12 +16,17 @@ from deepmerge import always_merger from dict2xml import dict2xml as dict2xml_real +from django.db.models import Q from django.conf import settings +from django.core.files.base import ContentFile from django.db.models import Sum from django.db.models.functions import Coalesce from django.urls import reverse from rest_framework import status +from kobo.apps.openrosa.apps.logger.models import Attachment, Instance, XForm +from kobo.apps.openrosa.apps.logger.models.attachment import upload_to +from kobo.apps.openrosa.apps.main.models import UserProfile from kobo.apps.trackers.models import NLPUsageCounter from kpi.constants import ( SUBMISSION_FORMAT_TYPE_JSON, @@ -38,7 +43,6 @@ ) from kpi.interfaces.sync_backend_media import SyncBackendMediaInterface from kpi.models.asset_file import AssetFile -from kpi.tests.utils.mock import MockAttachment from kpi.utils.mongo_helper import MongoHelper, drop_mock_only from kpi.utils.xml import fromstring_preserve_root_xmlns from .base_backend import BaseDeploymentBackend @@ -275,7 +279,7 @@ def get_attachment( user: settings.AUTH_USER_MODEL, attachment_id: Optional[int] = None, xpath: Optional[str] = None, - ) -> MockAttachment: + ) -> 'logger.Attachment': submission_json = None # First try to get the json version of the submission. # It helps to retrieve the id if `submission_id_or_uuid` is a `UUIDv4` @@ -325,7 +329,15 @@ def get_attachment( is_good_file = int(attachment['id']) == int(attachment_id) if is_good_file: - return MockAttachment(pk=attachment_id, **attachment) + print("ATTACHMENT:" + attachment['filename'], flush=True) + print('attachment', attachment) + return self._get_attachment_object( + attachment_id=attachment['id'], + submission_xml=submission_xml, + submission_id=submission_json['_id'], + filename=filename, + mimetype=attachment.get('mimetype'), + ) raise AttachmentNotFoundException @@ -333,8 +345,18 @@ def get_attachment_objects_from_dict(self, submission: dict) -> list: if not submission.get('_attachments'): return [] attachments = submission.get('_attachments') + submission_xml = self.get_submission( + submission['_id'], self.asset.owner, format_type=SUBMISSION_FORMAT_TYPE_XML + ) + return [ - MockAttachment(pk=attachment['id'], **attachment) + self._get_attachment_object( + attachment_id=attachment['id'], + submission_xml=submission_xml, + submission_id=submission['_id'], + filename=os.path.basename(attachment['filename']), + mimetype=attachment.get('mimetype'), + ) for attachment in attachments ] @@ -704,13 +726,62 @@ def transfer_submissions_ownership( @property def xform(self): """ - Dummy property, only present to be mocked by unit tests + Create related XForm on the fly """ - pass + if not ( + xform := XForm.objects.filter(id_string=self.asset.uid).first() + ): + UserProfile.objects.get_or_create(user_id=self.asset.owner_id) + xform = XForm() + xform.xml = self.asset.snapshot().xml + xform.user_id = self.asset.owner_id + xform.kpi_asset_uid = self.asset.uid + xform.save() + + return xform @property def xform_id_string(self): - return self.asset.uid + return self.xform.id_string + + def _get_attachment_object( + self, + submission_xml: str, + submission_id: int, + attachment_id: Optional[int, str] = None, + filename: Optional[str] = None, + mimetype: Optional[str] = None, + ): + if not ( + attachment := Attachment.objects.filter( + Q(pk=attachment_id) | Q(media_file_basename=filename) + ).first() + ): + if not ( + instance := Instance.objects.filter(pk=submission_id).first() + ): + instance = Instance.objects.create( + pk=submission_id, xml=submission_xml, xform=self.xform + ) + + attachment = Attachment() + attachment.instance = instance + basename = os.path.basename(filename) + file_ = os.path.join( + settings.BASE_DIR, + 'kpi', + 'tests', + basename + ) + with open(file_, 'rb') as f: + attachment.media_file = ContentFile( + f.read(), name=upload_to(attachment, basename) + ) + if mimetype: + attachment.mimetype = mimetype + attachment.save() + + return attachment @classmethod def __prepare_bulk_update_data(cls, updates: dict) -> dict: diff --git a/kpi/management/commands/sync_kobocat_perms.py b/kpi/management/commands/sync_kobocat_perms.py index 03efc0d869..5e7e1b1396 100644 --- a/kpi/management/commands/sync_kobocat_perms.py +++ b/kpi/management/commands/sync_kobocat_perms.py @@ -3,6 +3,7 @@ from django.contrib.postgres.aggregates import ArrayAgg from django.core.exceptions import ImproperlyConfigured from django.core.management.base import BaseCommand +from guardian.models import UserObjectPermission from kpi.constants import PERM_FROM_KC_ONLY from kpi.models import Asset, ObjectPermission @@ -10,9 +11,6 @@ assign_applicable_kc_permissions, kc_transaction_atomic, ) -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatUserObjectPermission -) from kpi.management.commands.sync_kobocat_xforms import _sync_permissions from kpi.utils.object_permission import get_perm_ids_from_code_names @@ -94,9 +92,11 @@ def _sync_perms(self, **options): with kc_transaction_atomic(): kc_user_obj_perm_qs = ( - KobocatUserObjectPermission.objects.filter( - object_pk=asset.deployment.xform_id - ).exclude(user_id=asset.owner_id) + UserObjectPermission.objects.using( + settings.OPENROSA_DB_ALIAS + ) + .filter(object_pk=asset.deployment.xform_id) + .exclude(user_id=asset.owner_id) ) if kc_user_obj_perm_qs.exists(): if self._verbosity >= 1: diff --git a/kpi/management/commands/sync_kobocat_xforms.py b/kpi/management/commands/sync_kobocat_xforms.py index f38e79f683..89e47c8ad5 100644 --- a/kpi/management/commands/sync_kobocat_xforms.py +++ b/kpi/management/commands/sync_kobocat_xforms.py @@ -14,6 +14,7 @@ from django.core.management import call_command from django.core.management.base import BaseCommand from django.db import transaction +from guardian.models import UserObjectPermission from formpack.utils.xls_to_ss_structure import xlsx_to_dicts from pyxform import xls2json_backends from rest_framework.authtoken.models import Token @@ -21,11 +22,7 @@ from kobo.apps.kobo_auth.shortcuts import User from kpi.constants import PERM_FROM_KC_ONLY from kpi.utils.log import logging -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatPermission, - KobocatUserObjectPermission, - KobocatXForm, -) +from kobo.apps.openrosa.apps.logger.models.xform import XForm from kpi.deployment_backends.kobocat_backend import KobocatDeploymentBackend from kpi.models import Asset, ObjectPermission from kpi.utils.object_permission import get_anonymous_user @@ -40,17 +37,21 @@ ASSET_CT = ContentType.objects.get_for_model(Asset) FROM_KC_ONLY_PERMISSION = Permission.objects.get( content_type=ASSET_CT, codename=PERM_FROM_KC_ONLY) -XFORM_CT = KobocatXForm.get_content_type() +XFORM_CT = XForm.get_content_type() ANONYMOUS_USER = get_anonymous_user() # Replace codenames with Permission PKs, remembering the codenames permission_map_copy = dict(PERMISSIONS_MAP) KPI_PKS_TO_CODENAMES = {} for kc_codename, kpi_codename in permission_map_copy.items(): - kc_perm_pk = KobocatPermission.objects.get( - content_type=XFORM_CT, codename=kc_codename).pk + kc_perm_pk = ( + Permission.objects.using(settings.OPENROSA_DB_ALIAS) + .get(content_type=XFORM_CT, codename=kc_codename) + .pk + ) kpi_perm_pk = Permission.objects.get( - content_type=ASSET_CT, codename=kpi_codename).pk + content_type=ASSET_CT, codename=kpi_codename + ).pk del PERMISSIONS_MAP[kc_codename] @@ -318,11 +319,15 @@ def _sync_permissions(asset, xform): return [] # Get all applicable KC permissions set for this xform - xform_user_perms = KobocatUserObjectPermission.objects.filter( - permission_id__in=PERMISSIONS_MAP.keys(), - content_type=XFORM_CT, - object_pk=xform.pk - ).values_list('user', 'permission') + xform_user_perms = ( + UserObjectPermission.objects.using(settings.OPENROSA_DB_ALIAS) + .filter( + permission_id__in=PERMISSIONS_MAP.keys(), + content_type=XFORM_CT, + object_pk=xform.pk, + ) + .values_list('user', 'permission') + ) if not xform_user_perms and not asset.pk: # Nothing to do @@ -473,9 +478,9 @@ def handle(self, *args, **options): sync_kobocat_form_media = options.get('sync_kobocat_form_media') verbosity = options.get('verbosity') users = User.objects.all() - # Do a basic query just to make sure the KobocatXForm model is + # Do a basic query just to make sure the XForm model is # loaded - if not KobocatXForm.objects.exists(): + if not XForm.objects.exists(): return self._print_str('%d total users' % users.count()) # A specific user or everyone? @@ -503,8 +508,8 @@ def handle(self, *args, **options): xform_uuids_to_asset_pks[backend_response['uuid']] = \ existing_survey.pk - # KobocatXForm has a foreign key on KobocatUser, not on User - xforms = KobocatXForm.objects.filter(user_id=user.pk).all() + # XForm has a foreign key on KobocatUser, not on User + xforms = XForm.objects.filter(user_id=user.pk).all() for xform in xforms: try: with transaction.atomic(): diff --git a/kpi/serializers/v2/service_usage.py b/kpi/serializers/v2/service_usage.py index 105be87c70..30758a3a78 100644 --- a/kpi/serializers/v2/service_usage.py +++ b/kpi/serializers/v2/service_usage.py @@ -6,14 +6,14 @@ from rest_framework.fields import empty from kobo.apps.kobo_auth.shortcuts import User +from kobo.apps.openrosa.apps.logger.models import ( + DailyXFormSubmissionCounter as KobocatDailyXFormSubmissionCounter, + XForm as KobocatXForm, +) from kobo.apps.organizations.models import Organization from kobo.apps.organizations.utils import organization_month_start, organization_year_start from kobo.apps.stripe.constants import ACTIVE_STRIPE_STATUSES from kobo.apps.trackers.models import NLPUsageCounter -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatXForm, - KobocatDailyXFormSubmissionCounter, -) from kpi.deployment_backends.kobocat_backend import KobocatDeploymentBackend from kpi.models.asset import Asset diff --git a/kpi/signals.py b/kpi/signals.py index 1543a155e5..d3ad80f9d2 100644 --- a/kpi/signals.py +++ b/kpi/signals.py @@ -10,9 +10,6 @@ from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.hook.models.hook import Hook from kpi.constants import PERM_ADD_SUBMISSIONS -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatUser, -) from kpi.deployment_backends.kc_access.utils import ( grant_kc_model_level_perms, diff --git a/kpi/tests/api/v2/test_api_attachments.py b/kpi/tests/api/v2/test_api_attachments.py index 3ceb4ae8e9..0f3c891cf7 100644 --- a/kpi/tests/api/v2/test_api_attachments.py +++ b/kpi/tests/api/v2/test_api_attachments.py @@ -95,7 +95,6 @@ def test_convert_mp4_to_mp3(self): ), querystring=query_dict.urlencode() ) - response = self.client.get(url) assert response.status_code == status.HTTP_200_OK assert response['Content-Type'] == 'audio/mpeg' diff --git a/kpi/tests/api/v2/test_api_paired_data.py b/kpi/tests/api/v2/test_api_paired_data.py index 5ca5121623..bf4fc5821f 100644 --- a/kpi/tests/api/v2/test_api_paired_data.py +++ b/kpi/tests/api/v2/test_api_paired_data.py @@ -1,6 +1,6 @@ # coding: utf-8 import unittest -from mock import patch, MagicMock +from mock import patch from django.urls import reverse from rest_framework import status @@ -390,13 +390,11 @@ def test_get_external_with_no_auth(self): # When owner's destination asset does not require any authentications, # everybody can see their data self.client.logout() - with patch( - 'kpi.deployment_backends.backends.MockDeploymentBackend.xform', - MagicMock(), - ) as xf_mock: - xf_mock.require_auth = False - response = self.client.get(self.external_xml_url) - self.assertEqual(response.status_code, status.HTTP_200_OK) + xform = self.destination_asset.deployment.xform + xform.require_auth = False + xform.save(update_fields=['require_auth']) + response = self.client.get(self.external_xml_url) + self.assertEqual(response.status_code, status.HTTP_200_OK) @unittest.skip(reason='Skip until mock back end supports XML submissions') def test_get_external_with_changed_source_fields(self): diff --git a/kpi/tests/utils/mock.py b/kpi/tests/utils/mock.py index 631c364f6a..1400228e93 100644 --- a/kpi/tests/utils/mock.py +++ b/kpi/tests/utils/mock.py @@ -9,9 +9,14 @@ from django.conf import settings from django.core.files import File + from django.core.files.storage import default_storage from rest_framework import status +from kobo.apps.openrosa.apps.logger.models.attachment import ( + Attachment, + upload_to, +) from kobo.apps.openrosa.libs.utils.image_tools import ( get_optimized_image_path, resize, @@ -107,83 +112,3 @@ def enketo_view_instance_response(request): } headers = {} return status.HTTP_201_CREATED, headers, json.dumps(resp_body) - - -class MockAttachment(AudioTranscodingMixin): - """ - Mock object to simulate KobocatAttachment. - Relationship with ReadOnlyKobocatInstance is ignored but could be implemented - - TODO Remove this class and use `Attachment` model everywhere in tests - """ - def __init__(self, pk: int, filename: str, mimetype: str = None, **kwargs): - - self.id = pk # To mimic Django model instances - self.pk = pk - - # Unit test `test_thumbnail_creation_on_demand()` is using real `Attachment` - # objects while other tests are using `MockAttachment` objects. - # If an Attachment object exists, let's assume unit test is using real - # Attachment objects. Otherwise, use MockAttachment. - from kobo.apps.openrosa.apps.logger.models import Attachment # Avoid circular import - - attachment_object = Attachment.objects.filter(pk=pk).first() - if attachment_object: - self.media_file = attachment_object.media_file - self.media_file_size = attachment_object.media_file_size - self.media_file_basename = attachment_object.media_file_basename - else: - basename = os.path.basename(filename) - file_ = os.path.join( - settings.BASE_DIR, - 'kpi', - 'tests', - basename - ) - self.media_file = File(open(file_, 'rb'), basename) - self.media_file.path = file_ - self.media_file_size = os.path.getsize(file_) - self.media_file_basename = basename - - self.content = self.media_file.read() - - if not mimetype: - self.mimetype, _ = guess_type(file_) - else: - self.mimetype = mimetype - - def __exit__(self, exc_type, exc_val, exc_tb): - self.media_file.close() - - @property - def absolute_path(self): - """ - Return the absolute path on local file system of the attachment. - Otherwise, return the AWS url (e.g. https://...) - """ - if isinstance(default_kobocat_storage, KobocatFileSystemStorage): - return self.media_file.path - - return self.media_file.url - - def protected_path( - self, format_: Optional[str] = None, suffix: Optional[str] = None - ) -> str: - if format_ == 'mp3': - extension = '.mp3' - with NamedTemporaryFile(suffix=extension) as f: - self.content = self.get_transcoded_audio(format_) - return f.name - else: - if suffix and self.mimetype.startswith('image/'): - optimized_image_path = get_optimized_image_path( - self.media_file.name, suffix - ) - if not default_storage.exists(optimized_image_path): - resize(self.media_file.name) - if isinstance(default_kobocat_storage, KobocatFileSystemStorage): - return default_kobocat_storage.path(optimized_image_path) - else: - return default_kobocat_storage.url(optimized_image_path) - else: - return self.absolute_path diff --git a/kpi/utils/database.py b/kpi/utils/database.py index 77bf0e70a8..6b1e624b53 100644 --- a/kpi/utils/database.py +++ b/kpi/utils/database.py @@ -1,6 +1,12 @@ import threading from functools import wraps +from django.conf import settings +from django.db import ( + connections, + models, +) + thread_local = threading.local() @@ -26,3 +32,38 @@ def inner(*args, **kwargs): def get_thread_local(attr, default=None): return getattr(thread_local, attr, None) or default + + +def update_autofield_sequence( + model: models.Model, using: str = settings.OPENROSA_DB_ALIAS +): + """ + Fixes the PostgreSQL sequence for the first (and only?) `AutoField` on + `model`, à la `manage.py sqlsequencereset` + """ + # Updating sequences on fresh environments fails because the only user + # in the DB is django-guardian AnonymousUser and `max(pk)` returns -1. + # Error: + # > setval: value -1 is out of bounds for sequence + # Using abs() and testing if max(pk) equals -1, leaves the sequence alone. + sql_template = ( + "SELECT setval(" + " pg_get_serial_sequence('{table}','{column}'), " + " abs(coalesce(max({column}), 1)), " + " max({column}) IS NOT null and max({column}) != -1" + ") " + "FROM {table};" + ) + autofield = None + for f in model._meta.get_fields(): + if isinstance(f, models.AutoField): + autofield = f + break + if not autofield: + return + query = sql_template.format( + table=model._meta.db_table, column=autofield.column + ) + connection = connections[using] + with connection.cursor() as cursor: + cursor.execute(query) diff --git a/kpi/utils/project_view_exports.py b/kpi/utils/project_view_exports.py index 96044fca44..f92df149ed 100644 --- a/kpi/utils/project_view_exports.py +++ b/kpi/utils/project_view_exports.py @@ -2,14 +2,15 @@ from __future__ import annotations import csv from io import StringIO +from typing import Union from django.conf import settings from django.db.models import Count, F, Q from django.db.models.query import QuerySet from kobo.apps.kobo_auth.shortcuts import User +from kobo.apps.openrosa.apps.logger.models.xform import XForm as KobocatXForm from kpi.constants import ASSET_TYPE_SURVEY -from kpi.deployment_backends.kc_access.shadow_models import KobocatXForm from kpi.models import Asset from kpi.utils.project_views import get_region_for_view diff --git a/kpi/views/v2/attachment.py b/kpi/views/v2/attachment.py index 75aa81887f..b35c1dd4d0 100644 --- a/kpi/views/v2/attachment.py +++ b/kpi/views/v2/attachment.py @@ -147,7 +147,7 @@ def _get_response( else None ) return Response( - attachment.content, + attachment.media_file, content_type=content_type, ) From fb05fb725f6cf2f7373464632bb6d58e7e7bf346 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 16 Jul 2024 16:16:23 -0400 Subject: [PATCH 004/119] Remove import shadow model aliases --- hub/admin/extend_user.py | 6 ++-- hub/models/extra_user_detail.py | 10 +++--- kobo/apps/accounts/mfa/models.py | 10 +++--- .../openrosa/apps/main/models/user_profile.py | 35 +++++++++++++------ kobo/apps/project_ownership/utils.py | 8 ++--- kobo/apps/superuser_stats/models.py | 6 ++-- kobo/apps/superuser_stats/tasks.py | 30 ++++++++-------- kobo/apps/trackers/submission_utils.py | 8 ++--- kobo/apps/trash_bin/models/project.py | 6 ++-- kpi/deployment_backends/base_backend.py | 5 --- .../kc_access/shadow_models.py | 23 ------------ kpi/deployment_backends/kobocat_backend.py | 4 --- kpi/serializers/v2/service_usage.py | 8 ++--- kpi/utils/project_view_exports.py | 4 +-- 14 files changed, 68 insertions(+), 95 deletions(-) diff --git a/hub/admin/extend_user.py b/hub/admin/extend_user.py index ca545a624f..0ee48c965a 100644 --- a/hub/admin/extend_user.py +++ b/hub/admin/extend_user.py @@ -21,9 +21,7 @@ USERNAME_INVALID_MESSAGE, username_validators, ) -from kobo.apps.openrosa.apps.logger.models import ( - MonthlyXFormSubmissionCounter as KobocatMonthlyXFormSubmissionCounter, -) +from kobo.apps.openrosa.apps.logger.models import MonthlyXFormSubmissionCounter from kobo.apps.organizations.models import OrganizationUser from kobo.apps.trash_bin.exceptions import TrashIntegrityError from kobo.apps.trash_bin.models.account import AccountTrash @@ -259,7 +257,7 @@ def monthly_submission_count(self, obj): displayed in the Django admin user changelist page """ today = timezone.now().date() - instances = KobocatMonthlyXFormSubmissionCounter.objects.filter( + instances = MonthlyXFormSubmissionCounter.objects.filter( user_id=obj.id, year=today.year, month=today.month, diff --git a/hub/models/extra_user_detail.py b/hub/models/extra_user_detail.py index 3c4213aa23..990f091910 100644 --- a/hub/models/extra_user_detail.py +++ b/hub/models/extra_user_detail.py @@ -1,9 +1,7 @@ from django.conf import settings from django.db import models -from kobo.apps.openrosa.apps.main.models import ( - UserProfile as KobocatUserProfile, -) +from kobo.apps.openrosa.apps.main.models import UserProfile from kpi.fields import KpiUidField from kpi.mixins import StandardizeSearchableFieldMixin @@ -45,9 +43,9 @@ def save( update_fields=update_fields, ) - # Sync `validated_password` field to `KobocatUserProfile` only when + # Sync `validated_password` field to `UserProfile` only when # this object is updated to avoid a race condition and an IntegrityError - # when trying to save `KobocatUserProfile` object whereas the related + # when trying to save `UserProfile` object whereas the related # `KobocatUser` object has not been created yet. if ( not settings.TESTING @@ -57,7 +55,7 @@ def save( or (update_fields and 'validated_password' in update_fields) ) ): - KobocatUserProfile.set_password_details( + UserProfile.set_password_details( self.user.id, self.validated_password, ) diff --git a/kobo/apps/accounts/mfa/models.py b/kobo/apps/accounts/mfa/models.py index 6c822a0e16..0891a24a03 100644 --- a/kobo/apps/accounts/mfa/models.py +++ b/kobo/apps/accounts/mfa/models.py @@ -8,9 +8,7 @@ MFAMethodAdmin as TrenchMFAMethodAdmin, ) -from kobo.apps.openrosa.apps.main.models import ( - UserProfile as KobocatUserProfile, -) +from kobo.apps.openrosa.apps.main.models import UserProfile class MfaAvailableToUser(models.Model): @@ -74,7 +72,7 @@ def save( Update user's profile in KoBoCAT database. """ if not settings.TESTING and not created: - KobocatUserProfile.set_mfa_status( + UserProfile.set_mfa_status( user_id=self.user.pk, is_active=self.is_active ) @@ -83,10 +81,10 @@ def delete(self, using=None, keep_parents=False): super().delete(using, keep_parents) """ - Update user's profile in KoBoCAT database. + Update user's profile in KoboCAT database. """ if not settings.TESTING: - KobocatUserProfile.set_mfa_status( + UserProfile.set_mfa_status( user_id=user_id, is_active=False ) diff --git a/kobo/apps/openrosa/apps/main/models/user_profile.py b/kobo/apps/openrosa/apps/main/models/user_profile.py index 37a191370b..fb36140aa3 100644 --- a/kobo/apps/openrosa/apps/main/models/user_profile.py +++ b/kobo/apps/openrosa/apps/main/models/user_profile.py @@ -35,6 +35,13 @@ class UserProfile(models.Model): is_mfa_active = LazyDefaultBooleanField(default=False) validated_password = models.BooleanField(default=True) + class Meta: + app_label = 'main' + permissions = ( + ('can_add_xform', "Can add/upload an xform to user profile"), + ('view_profile', "Can view user profile"), + ) + def __str__(self): return '%s[%s]' % (self.name, self.user.username) @@ -46,17 +53,25 @@ def gravatar(self): def gravatar_exists(self): return gravatar_exists(self.user) - @property - def twitter_clean(self): - if self.twitter.startswith("@"): - return self.twitter[1:] - return self.twitter + @classmethod + def set_mfa_status(cls, user_id: int, is_active: bool): + user_profile, created = cls.objects.get_or_create(user_id=user_id) + user_profile.is_mfa_active = int(is_active) + user_profile.save(update_fields=['is_mfa_active']) - class Meta: - app_label = 'main' - permissions = ( - ('can_add_xform', "Can add/upload an xform to user profile"), - ('view_profile', "Can view user profile"), + @classmethod + def set_password_details( + cls, + user_id: int, + validated: bool, + ): + """ + Update the kobocat user's password_change_date and validated_password fields + """ + user_profile, created = cls.objects.get_or_create(user_id=user_id) + user_profile.validated_password = validated + user_profile.save( + update_fields=['validated_password'] ) diff --git a/kobo/apps/project_ownership/utils.py b/kobo/apps/project_ownership/utils.py index eec2855850..f3db74104c 100644 --- a/kobo/apps/project_ownership/utils.py +++ b/kobo/apps/project_ownership/utils.py @@ -4,8 +4,8 @@ from django.apps import apps from django.utils import timezone -from kobo.apps.openrosa.apps.logger.models import Attachment as KobocatAttachment -from kobo.apps.openrosa.apps.main.models import MetaData as KobocatMetadata +from kobo.apps.openrosa.apps.logger.models import Attachment +from kobo.apps.openrosa.apps.main.models import MetaData from kpi.models.asset import AssetFile from .models.choices import TransferStatusChoices, TransferStatusTypeChoices from .exceptions import AsyncTaskException @@ -53,7 +53,7 @@ def move_attachments(transfer: 'project_ownership.Transfer'): _mark_task_as_successful(transfer, async_task_type) return - attachments = KobocatAttachment.all_objects.filter( + attachments = Attachment.all_objects.filter( instance_id__in=submission_ids ).exclude(media_file__startswith=f'{transfer.asset.owner.username}/') @@ -95,7 +95,7 @@ def move_media_files(transfer: 'project_ownership.Transfer'): if transfer.asset.has_deployment: kc_files = { kc_file.file_hash: kc_file - for kc_file in KobocatMetadata.objects.filter( + for kc_file in MetaData.objects.filter( xform_id=transfer.asset.deployment.xform.pk ) } diff --git a/kobo/apps/superuser_stats/models.py b/kobo/apps/superuser_stats/models.py index 61ab97cf74..6c8f824cc8 100644 --- a/kobo/apps/superuser_stats/models.py +++ b/kobo/apps/superuser_stats/models.py @@ -1,9 +1,9 @@ from kobo.apps.openrosa.apps.logger.models import ( - MonthlyXFormSubmissionCounter as KobocatMonthlyXFormSubmissionCounter, + MonthlyXFormSubmissionCounter ) -class SuperuserStatsModel(KobocatMonthlyXFormSubmissionCounter): +class SuperuserStatsModel(MonthlyXFormSubmissionCounter): """ Spoiler: Kludgy! @@ -11,7 +11,7 @@ class SuperuserStatsModel(KobocatMonthlyXFormSubmissionCounter): the superuser section in Django Admin. Django needs a model to register an admin model, so it extends a shadow model (as a proxy) to avoid creating new migrations. - It extends `KobocatMonthlyXFormSubmissionCounter` but it could have + It extends `MonthlyXFormSubmissionCounter` but it could have been anyone of the (shadow) models since we do not add/update/delete objects from the admin interface. The HTML template only lists the available reports. """ diff --git a/kobo/apps/superuser_stats/tasks.py b/kobo/apps/superuser_stats/tasks.py index 06ab534fc7..f858fc6b96 100644 --- a/kobo/apps/superuser_stats/tasks.py +++ b/kobo/apps/superuser_stats/tasks.py @@ -32,13 +32,11 @@ from kobo.static_lists import COUNTRIES from kpi.constants import ASSET_TYPE_SURVEY from kobo.apps.openrosa.apps.logger.models import ( - Instance as ReadOnlyKobocatInstance, - MonthlyXFormSubmissionCounter as KobocatMonthlyXFormSubmissionCounter, - XForm as KobocatXForm, -) -from kobo.apps.openrosa.apps.main.models import ( - UserProfile as KobocatUserProfile, + Instance, + MonthlyXFormSubmissionCounter, + XForm, ) +from kobo.apps.openrosa.apps.main.models import UserProfile from kpi.models.asset import Asset, AssetDeploymentStatus @@ -63,7 +61,7 @@ def get_row_for_country(code_: str, label_: str): ) # Doing it this way because this report is focused on crises in # very specific time frames - instances_count = ReadOnlyKobocatInstance.objects.filter( + instances_count = Instance.objects.filter( xform_id__in=list(xform_ids), date_created__date__range=(start_date, end_date), ).count() @@ -112,7 +110,7 @@ def generate_continued_usage_report(output_filename: str, end_date: str): date_created__date__range=(twelve_months_time, end_date), ) submissions_count = ( - KobocatMonthlyXFormSubmissionCounter.objects.annotate( + MonthlyXFormSubmissionCounter.objects.annotate( date=Cast( Concat(F('year'), Value('-'), F('month'), Value('-'), 1), DateField(), @@ -202,7 +200,7 @@ def generate_domain_report(output_filename: str, start_date: str, end_date: str) # get a count of the submissions domain_submissions = { - domain: KobocatMonthlyXFormSubmissionCounter.objects.annotate( + domain: MonthlyXFormSubmissionCounter.objects.annotate( date=Cast( Concat(F('year'), Value('-'), F('month'), Value('-'), 1), DateField(), @@ -272,11 +270,11 @@ def generate_forms_count_by_submission_range(output_filename: str): today = datetime.today() date_ = today - relativedelta(years=1) - no_submissions = KobocatXForm.objects.filter( + no_submissions = XForm.objects.filter( date_created__date__gte=date_, num_of_submissions=0 ) - queryset = ReadOnlyKobocatInstance.objects.values( + queryset = Instance.objects.values( 'xform_id' ).filter( date_created__date__gte=date_, @@ -300,7 +298,7 @@ def generate_forms_count_by_submission_range(output_filename: str): @shared_task def generate_media_storage_report(output_filename: str): - attachments = KobocatUserProfile.objects.all().values( + attachments = UserProfile.objects.all().values( 'user__username', 'attachment_storage_bytes', ) @@ -369,8 +367,8 @@ def get_row_for_user(u: 'kobo_auth.User') -> list: row_ = [] try: - profile = KobocatUserProfile.objects.get(user_id=u.pk) - except KobocatUserProfile.DoesNotExist: + profile = UserProfile.objects.get(user_id=u.pk) + except UserProfile.DoesNotExist: profile = None try: @@ -408,7 +406,7 @@ def get_row_for_user(u: 'kobo_auth.User') -> list: else: row_.append('') - row_.append(KobocatXForm.objects.filter(user=u).count()) + row_.append(XForm.objects.filter(user=u).count()) if profile: row_.append(profile.num_of_submissions) @@ -477,7 +475,7 @@ def generate_user_statistics_report( # Get records from SubmissionCounter records = ( - KobocatMonthlyXFormSubmissionCounter.objects.annotate( + MonthlyXFormSubmissionCounter.objects.annotate( date=Cast( Concat(F('year'), Value('-'), F('month'), Value('-'), 1), DateField(), diff --git a/kobo/apps/trackers/submission_utils.py b/kobo/apps/trackers/submission_utils.py index 28bb219499..a47118a28e 100644 --- a/kobo/apps/trackers/submission_utils.py +++ b/kobo/apps/trackers/submission_utils.py @@ -6,8 +6,8 @@ from model_bakery import baker from kobo.apps.openrosa.apps.logger.models import ( - DailyXFormSubmissionCounter as KobocatDailyXFormSubmissionCounter, - XForm as KobocatXForm, + DailyXFormSubmissionCounter, + XForm, ) from kpi.models import Asset from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE @@ -61,7 +61,7 @@ def expected_file_size(submissions: int = 1): def update_xform_counters( - asset: Asset, xform: KobocatXForm = None, submissions: int = 1 + asset: Asset, xform: XForm = None, submissions: int = 1 ): """ Create/update the daily submission counter and the shadow xform we use to query it @@ -103,7 +103,7 @@ def update_xform_counters( ) xform.save() - counter = KobocatDailyXFormSubmissionCounter.objects.filter( + counter = DailyXFormSubmissionCounter.objects.filter( date=today.date(), user_id=asset.owner.id, ).first() diff --git a/kobo/apps/trash_bin/models/project.py b/kobo/apps/trash_bin/models/project.py index c3b52f3599..e0ee2354a7 100644 --- a/kobo/apps/trash_bin/models/project.py +++ b/kobo/apps/trash_bin/models/project.py @@ -4,9 +4,7 @@ from django.db import models, transaction from django.utils.timezone import now -from kobo.apps.openrosa.apps.logger.models import ( - XForm as KobocatXForm, -) +from kobo.apps.openrosa.apps.logger.models import XForm from kobo.apps.project_ownership.models import ( Invite, InviteStatusChoices, @@ -92,7 +90,7 @@ def toggle_asset_statuses( ).update(status=InviteStatusChoices.CANCELLED) if not settings.TESTING: - kc_updated = KobocatXForm.objects.filter( + kc_updated = XForm.objects.filter( **kc_filter_params ).update(**kc_update_params) assert updated >= kc_updated diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index 9afb004403..825e663235 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -13,7 +13,6 @@ from bson import json_util from django.conf import settings -from django.core.files.storage import default_storage from django.db.models.query import QuerySet from django.utils import timezone from django.utils.translation import gettext_lazy as t @@ -749,10 +748,6 @@ def version(self): def version_id(self): return self.get_data('version') - @property - def _open_rosa_server_storage(self): - return default_storage - def _get_metadata_queryset(self, file_type: str) -> Union[QuerySet, list]: """ Returns a list of objects, or a QuerySet to pass to Celery to diff --git a/kpi/deployment_backends/kc_access/shadow_models.py b/kpi/deployment_backends/kc_access/shadow_models.py index f78bb6d418..d810adbf07 100644 --- a/kpi/deployment_backends/kc_access/shadow_models.py +++ b/kpi/deployment_backends/kc_access/shadow_models.py @@ -524,29 +524,6 @@ def set_password_details( ) -class KobocatToken(ShadowModel): - - key = models.CharField("Key", max_length=40, primary_key=True) - user = models.OneToOneField(KobocatUser, - related_name='auth_token', - on_delete=models.CASCADE, verbose_name="User") - created = models.DateTimeField("Created", auto_now_add=True) - - class Meta(ShadowModel.Meta): - db_table = "authtoken_token" - - @classmethod - def sync(cls, auth_token): - try: - # Token use a One-to-One relationship on User. - # Thus, we can retrieve tokens from users' id. - kc_auth_token = cls.objects.get(user_id=auth_token.user_id) - except KobocatToken.DoesNotExist: - kc_auth_token = cls(pk=auth_token.pk, user_id=auth_token.user_id) - - kc_auth_token.save() - - class KobocatXForm(ShadowModel): class Meta(ShadowModel.Meta): diff --git a/kpi/deployment_backends/kobocat_backend.py b/kpi/deployment_backends/kobocat_backend.py index a9b0984457..3b07760d48 100644 --- a/kpi/deployment_backends/kobocat_backend.py +++ b/kpi/deployment_backends/kobocat_backend.py @@ -1407,10 +1407,6 @@ def _last_submission_time(self): return last_submission_time( xform_id_string=id_string, user_id=self.asset.owner.pk) - @property - def _open_rosa_server_storage(self): - return default_kobocat_storage - def __delete_kc_metadata( self, kc_file_: dict, file_: Union[AssetFile, PairedData] = None ): diff --git a/kpi/serializers/v2/service_usage.py b/kpi/serializers/v2/service_usage.py index 30758a3a78..09388d0d61 100644 --- a/kpi/serializers/v2/service_usage.py +++ b/kpi/serializers/v2/service_usage.py @@ -7,8 +7,8 @@ from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.logger.models import ( - DailyXFormSubmissionCounter as KobocatDailyXFormSubmissionCounter, - XForm as KobocatXForm, + DailyXFormSubmissionCounter, + XForm, ) from kobo.apps.organizations.models import Organization from kobo.apps.organizations.utils import organization_month_start, organization_year_start @@ -237,7 +237,7 @@ def _get_storage_usage(self): Users are represented by their ids with `self._user_ids` """ - xforms = KobocatXForm.objects.only('attachment_storage_bytes', 'id').exclude( + xforms = XForm.objects.only('attachment_storage_bytes', 'id').exclude( pending_delete=True ).filter(self._user_id_query) @@ -253,7 +253,7 @@ def _get_submission_counters(self, month_filter, year_filter): Users are represented by their ids with `self._user_ids` """ - submission_count = KobocatDailyXFormSubmissionCounter.objects.only( + submission_count = DailyXFormSubmissionCounter.objects.only( 'counter', 'user_id' ).filter(self._user_id_query).aggregate( all_time=Coalesce(Sum('counter'), 0), diff --git a/kpi/utils/project_view_exports.py b/kpi/utils/project_view_exports.py index f92df149ed..5655f4496f 100644 --- a/kpi/utils/project_view_exports.py +++ b/kpi/utils/project_view_exports.py @@ -9,7 +9,7 @@ from django.db.models.query import QuerySet from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.apps.logger.models.xform import XForm as KobocatXForm +from kobo.apps.openrosa.apps.logger.models.xform import XForm from kpi.constants import ASSET_TYPE_SURVEY from kpi.models import Asset from kpi.utils.project_views import get_region_for_view @@ -118,7 +118,7 @@ def get_q(countries: list[str], export_type: str) -> QuerySet: def get_submission_count(xform_id: int) -> int: result = ( - KobocatXForm.objects.values('num_of_submissions') + XForm.objects.values('num_of_submissions') .filter(pk=xform_id) .first() ) From 3a8d00f7c4adc6c357fa36d556b94489542d26c3 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 18 Jul 2024 11:13:45 -0400 Subject: [PATCH 005/119] Remove debug print --- kpi/deployment_backends/mock_backend.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 19c7e1025b..75fa7d580e 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -329,8 +329,6 @@ def get_attachment( is_good_file = int(attachment['id']) == int(attachment_id) if is_good_file: - print("ATTACHMENT:" + attachment['filename'], flush=True) - print('attachment', attachment) return self._get_attachment_object( attachment_id=attachment['id'], submission_xml=submission_xml, From 4fa415bdb51ad2818dc41826d6cc90a1e06fb5bc Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 18 Jul 2024 14:24:20 -0400 Subject: [PATCH 006/119] WIP - New OpenRosa deployment backend --- kobo/apps/trash_bin/utils.py | 9 +- kobo/settings/base.py | 3 +- kpi/deployment_backends/backends.py | 2 + kpi/deployment_backends/base_backend.py | 18 +- kpi/deployment_backends/kobocat_backend.py | 7 - kpi/deployment_backends/mixin.py | 1 - kpi/deployment_backends/mock_backend.py | 2 +- kpi/deployment_backends/openrosa_backend.py | 1505 +++++++++++++++++++ kpi/models/paired_data.py | 2 - kpi/serializers/v2/deployment.py | 10 +- kpi/signals.py | 2 +- kpi/tasks.py | 12 +- kpi/tests/api/v2/test_api_submissions.py | 16 +- kpi/views/v2/data.py | 16 +- 14 files changed, 1549 insertions(+), 56 deletions(-) create mode 100644 kpi/deployment_backends/openrosa_backend.py diff --git a/kobo/apps/trash_bin/utils.py b/kobo/apps/trash_bin/utils.py index 4f2e96fb22..c5b7d22487 100644 --- a/kobo/apps/trash_bin/utils.py +++ b/kobo/apps/trash_bin/utils.py @@ -287,11 +287,6 @@ def replace_user_with_placeholder( def _delete_submissions(request_author: settings.AUTH_USER_MODEL, asset: 'kpi.Asset'): - ( - app_label, - model_name, - ) = asset.deployment.submission_model.get_app_label_and_model_name() - while True: audit_logs = [] submissions = list(asset.deployment.get_submissions( @@ -315,8 +310,8 @@ def _delete_submissions(request_author: settings.AUTH_USER_MODEL, asset: 'kpi.As submission_ids = [] for submission in submissions: audit_logs.append(AuditLog( - app_label=app_label, - model_name=model_name, + app_label='logger', + model_name='instance', object_id=submission['_id'], user=request_author, user_uid=request_author.extra_details.uid, diff --git a/kobo/settings/base.py b/kobo/settings/base.py index 26d1ee8a0f..4e72e62a72 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -1012,7 +1012,7 @@ def __init__(self, *args, **kwargs): KOBOFORM_URL = os.environ.get('KOBOFORM_URL', 'http://kpi') if 'KOBOCAT_URL' in os.environ: - DEFAULT_DEPLOYMENT_BACKEND = 'kobocat' + DEFAULT_DEPLOYMENT_BACKEND = 'openrosa' else: DEFAULT_DEPLOYMENT_BACKEND = 'mock' @@ -1020,6 +1020,7 @@ def __init__(self, *args, **kwargs): ''' Stripe configuration intended for kf.kobotoolbox.org only, tracks usage limit exceptions ''' STRIPE_ENABLED = env.bool("STRIPE_ENABLED", False) + def dj_stripe_request_callback_method(): # This method exists because dj-stripe's documentation doesn't reflect reality. # It claims that DJSTRIPE_SUBSCRIBER_MODEL no longer needs a request callback but diff --git a/kpi/deployment_backends/backends.py b/kpi/deployment_backends/backends.py index 04e49705a4..8f972ba79e 100644 --- a/kpi/deployment_backends/backends.py +++ b/kpi/deployment_backends/backends.py @@ -1,8 +1,10 @@ # coding: utf-8 from .mock_backend import MockDeploymentBackend from .kobocat_backend import KobocatDeploymentBackend +from .openrosa_backend import OpenRosaDeploymentBackend DEPLOYMENT_BACKENDS = { 'mock': MockDeploymentBackend, 'kobocat': KobocatDeploymentBackend, + 'openrosa': OpenRosaDeploymentBackend, } diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index 825e663235..5131d8ac8a 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -184,7 +184,6 @@ def bulk_update_submissions( return self.prepare_bulk_update_response(kc_responses) - @abc.abstractmethod def calculated_submission_count(self, user: settings.AUTH_USER_MODEL, **kwargs): pass @@ -198,8 +197,11 @@ def connect(self, active=False): def form_uuid(self): pass + @staticmethod @abc.abstractmethod - def nlp_tracking_data(self, start_date: Optional[datetime.date] = None): + def nlp_tracking_data( + asset_ids: list[int], start_date: Optional[datetime.date] = None + ): pass def delete(self): @@ -446,11 +448,13 @@ def set_status(self, status): self.save_to_db({'status': status}) @abc.abstractmethod - def set_validation_status(self, - submission_id: int, - user: settings.AUTH_USER_MODEL, - data: dict, - method: str) -> dict: + def set_validation_status( + self, + submission_id: int, + user: settings.AUTH_USER_MODEL, + data: dict, + method: str, + ) -> dict: pass @abc.abstractmethod diff --git a/kpi/deployment_backends/kobocat_backend.py b/kpi/deployment_backends/kobocat_backend.py index 3b07760d48..b67355abc6 100644 --- a/kpi/deployment_backends/kobocat_backend.py +++ b/kpi/deployment_backends/kobocat_backend.py @@ -1258,13 +1258,6 @@ def xform_id(self): def xform_id_string(self): return self.get_data('backend_response.id_string') - @property - def timestamp(self): - try: - return self.backend_response['date_modified'] - except KeyError: - return None - @staticmethod @contextmanager def suspend_submissions(user_ids: list[int]): diff --git a/kpi/deployment_backends/mixin.py b/kpi/deployment_backends/mixin.py index ae1690bf14..4a448106b3 100644 --- a/kpi/deployment_backends/mixin.py +++ b/kpi/deployment_backends/mixin.py @@ -20,7 +20,6 @@ def sync_media_files_async(self, always=True): file_type=AssetFile.FORM_MEDIA, synced_with_backend=False ).exists(): self.save(create_version=False, adjust_content=False) - # Not using .delay() due to circular import in tasks.py celery.current_app.send_task('kpi.tasks.sync_media_files', (self.uid,)) diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 75fa7d580e..0f36ac2bf3 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -105,7 +105,7 @@ def generate_uuid_for_form(): def form_uuid(self): return 'formhub-uuid' # to match existing tests - def nlp_tracking_data(self, start_date=None): + def nlp_tracking_data(asset_ids=None): """ Get the NLP tracking data since a specified date If no date is provided, get all-time data diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py new file mode 100644 index 0000000000..6bc8eda1aa --- /dev/null +++ b/kpi/deployment_backends/openrosa_backend.py @@ -0,0 +1,1505 @@ +from __future__ import annotations + +import io +import json +import re +from collections import defaultdict +from contextlib import contextmanager +from datetime import date, datetime +from typing import Generator, Optional, Union +from urllib.parse import urlparse +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + +import requests +import redis.exceptions +from defusedxml import ElementTree as DET +from django.conf import settings +from django.core.files import File +from django.core.files.base import ContentFile +from django.db.models import Sum, F +from django.db.models.functions import Coalesce +from django.db.models.query import QuerySet +from django.utils import timezone +from django.utils.translation import gettext_lazy as t +from django_redis import get_redis_connection +from kobo_service_account.utils import get_request_headers +from rest_framework import status + +from kobo.apps.openrosa.apps.logger.models import ( + Attachment, + DailyXFormSubmissionCounter, + MonthlyXFormSubmissionCounter, + Instance, + XForm, +) +from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile +from kobo.apps.openrosa.libs.utils.logger_tools import publish_xls_form +from kobo.apps.subsequences.utils import stream_with_extras +from kobo.apps.trackers.models import NLPUsageCounter +from kpi.constants import ( + SUBMISSION_FORMAT_TYPE_JSON, + SUBMISSION_FORMAT_TYPE_XML, + PERM_FROM_KC_ONLY, + PERM_CHANGE_SUBMISSIONS, + PERM_DELETE_SUBMISSIONS, + PERM_PARTIAL_SUBMISSIONS, + PERM_VALIDATE_SUBMISSIONS, + PERM_VIEW_SUBMISSIONS, +) +from kpi.deployment_backends.kc_access.storage import ( + default_kobocat_storage as default_storage, +) +from kpi.exceptions import ( + AttachmentNotFoundException, + InvalidXFormException, + KobocatCommunicationError, + SubmissionIntegrityError, + SubmissionNotFoundException, + XPathNotFoundException, +) +from kpi.interfaces.sync_backend_media import SyncBackendMediaInterface +from kpi.models.asset_file import AssetFile +from kpi.models.object_permission import ObjectPermission +from kpi.models.paired_data import PairedData +from kpi.utils.django_orm_helper import UpdateJSONFieldAttributes +from kpi.utils.log import logging +from kpi.utils.mongo_helper import MongoHelper +from kpi.utils.object_permission import get_database_user +from kpi.utils.permissions import is_user_anonymous +from kpi.utils.xml import fromstring_preserve_root_xmlns, xml_tostring +from .base_backend import BaseDeploymentBackend +from .kc_access.utils import ( + assign_applicable_kc_permissions, + kc_transaction_atomic, + last_submission_time +) +from ..exceptions import ( + BadFormatException, + KobocatDeploymentException, + KobocatDuplicateSubmissionException, +) + + +class OpenRosaDeploymentBackend(BaseDeploymentBackend): + """ + Used to deploy a project into KoboCAT. + """ + + SYNCED_DATA_FILE_TYPES = { + AssetFile.FORM_MEDIA: 'media', + AssetFile.PAIRED_DATA: 'paired_data', + } + + def __init__(self, asset): + super().__init__(asset) + self._xform = None + + @property + def attachment_storage_bytes(self): + try: + return self.xform.attachment_storage_bytes + except InvalidXFormException: + return 0 + + def bulk_assign_mapped_perms(self): + """ + Bulk assign all KoBoCAT permissions related to KPI permissions. + Useful to assign permissions retroactively upon deployment. + Beware: it only adds permissions, it does not remove or sync permissions. + """ + users_with_perms = self.asset.get_users_with_perms(attach_perms=True) + + # if only the owner has permissions, no need to go further + if ( + len(users_with_perms) == 1 + and list(users_with_perms)[0].id == self.asset.owner_id + ): + return + + with kc_transaction_atomic(): + for user, perms in users_with_perms.items(): + if user.id == self.asset.owner_id: + continue + assign_applicable_kc_permissions(self.asset, user, perms) + + def calculated_submission_count( + self, user: settings.AUTH_USER_MODEL, **kwargs + ) -> int: + params = self.validate_submission_list_params( + user, validate_count=True, **kwargs + ) + return MongoHelper.get_count(self.mongo_userform_id, **params) + + def connect(self, active=False): + """ + `POST` initial survey content to KoBoCAT and create a new project. + Store results in deployment data. + CAUTION: Does not save deployment data to the database! + """ + xlsx_io = self.asset.to_xlsx_io( + versioned=True, append={ + 'settings': { + 'id_string': self.asset.uid, + 'form_title': self.asset.name, + } + } + ) + xlsx_file = ContentFile(xlsx_io.read(), name=f'{self.asset.uid}.xlsx') + + with kc_transaction_atomic(): + self._xform = publish_xls_form(xlsx_file, self.asset.owner) + self._xform.downloadable = active + self._xform.has_kpi_hooks = self.asset.has_active_hooks + self._xform.kpi_asset_uid = self.asset.uid + self._xform.save( + update_fields=['downloadable', 'has_kpi_hooks', 'kpi_asset_uid'] + ) + + self.store_data( + { + 'backend': 'openrosa', + 'active': active, + 'backend_response': { + 'formid': self._xform.pk, + 'uuid': self._xform.uuid, + 'id_string': self._xform.id_string, + 'kpi_asset_uid': self.asset.uid, + }, + 'version': self.asset.version_id, + } + ) + + @property + def form_uuid(self): + try: + return self.backend_response['uuid'] + except KeyError: + logging.warning( + 'OpenRosa backend response has no `uuid`', exc_info=True + ) + return None + + @staticmethod + def format_openrosa_datetime(dt: Optional[datetime] = None) -> str: + """ + Format a given datetime object or generate a new timestamp matching the + OpenRosa datetime formatting + """ + if dt is None: + dt = datetime.now(tz=ZoneInfo('UTC')) + + # Awkward check, but it's prescribed by + # https://docs.python.org/3/library/datetime.html#determining-if-an-object-is-aware-or-naive + if dt.tzinfo is None or dt.tzinfo.utcoffset(None) is None: + raise ValueError('An offset-aware datetime is required') + return dt.isoformat('T', 'milliseconds') + + def delete(self): + """ + WARNING! Deletes all submitted data! + """ + try: + self._xform.delete() + except XForm.DoesNotExist: + pass + + super().delete() + + # FIXME + def delete_submission( + self, submission_id: int, user: settings.AUTH_USER_MODEL + ) -> dict: + """ + Delete a submission through KoBoCAT proxy + + It returns a dictionary which can used as Response object arguments + """ + + self.validate_access_with_partial_perms( + user=user, + perm=PERM_DELETE_SUBMISSIONS, + submission_ids=[submission_id] + ) + + Instance.objects.filter(pk=submission_id).delete() + + # FIXME + return 1/0 + + # FIXME + def delete_submissions( + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs + ) -> dict: + """ + Bulk delete provided submissions. + + `data` should contain the submission ids or the query to get the subset + of submissions to delete + Example: + {"submission_ids": [1, 2, 3]} + or + {"query": {"Question": "response"} + """ + + submission_ids = self.validate_access_with_partial_perms( + user=user, + perm=PERM_DELETE_SUBMISSIONS, + submission_ids=data['submission_ids'], + query=data['query'], + ) + + # If `submission_ids` is not empty, user has partial permissions. + # Otherwise, they have full access. + if submission_ids: + # Remove query from `data` because all the submission ids have been + # already retrieved + data.pop('query', None) + data['submission_ids'] = submission_ids + + kc_url = self.submission_list_url + kc_request = requests.Request(method='DELETE', url=kc_url, json=data) + kc_response = self.__kobocat_proxy_request(kc_request, user) + + drf_response = self.__prepare_as_drf_response_signature(kc_response) + return drf_response + + # FIXME + def duplicate_submission( + self, submission_id: int, user: 'settings.AUTH_USER_MODEL' + ) -> dict: + """ + Duplicates a single submission proxied through KoBoCAT. The submission + with the given `submission_id` is duplicated and the `start`, `end` and + `instanceID` parameters of the submission are reset before being posted + to KoBoCAT. + + Returns a dict with message response from KoBoCAT and uuid of created + submission if successful + + """ + + self.validate_access_with_partial_perms( + user=user, + perm=PERM_CHANGE_SUBMISSIONS, + submission_ids=[submission_id], + ) + + submission = self.get_submission( + submission_id, + user=user, + format_type=SUBMISSION_FORMAT_TYPE_XML, + ) + + # Get attachments for the duplicated submission if there are any + attachment_objects = Attachment.objects.filter( + instance_id=submission_id + ) + attachments = ( + {a.media_file_basename: a.media_file for a in attachment_objects} + if attachment_objects + else None + ) + + # parse XML string to ET object + xml_parsed = fromstring_preserve_root_xmlns(submission) + + # attempt to update XML fields for duplicate submission. Note that + # `start` and `end` are not guaranteed to be included in the XML object + _uuid, uuid_formatted = self.generate_new_instance_id() + date_formatted = self.format_openrosa_datetime() + for date_field in ('start', 'end'): + element = xml_parsed.find(date_field) + # Even if the element is found, `bool(element)` is `False`. How + # very un-Pythonic! + if element is not None: + element.text = date_formatted + # Rely on `meta/instanceID` being present. If it's absent, something is + # fishy enough to warrant raising an exception instead of continuing + # silently + xml_parsed.find(self.SUBMISSION_CURRENT_UUID_XPATH).text = ( + uuid_formatted + ) + + # + + kc_response = self.store_submission( + user, xml_tostring(xml_parsed), _uuid, attachments + ) + if kc_response.status_code == status.HTTP_201_CREATED: + return next(self.get_submissions(user, query={'_uuid': _uuid})) + else: + raise KobocatDuplicateSubmissionException + + # FIXME + def edit_submission( + self, + xml_submission_file: File, + user: settings.AUTH_USER_MODEL, + attachments: dict = None, + ): + """ + Edit a submission through KoBoCAT proxy on behalf of `user`. + Attachments can be uploaded by passing a dictionary (name, File object) + + The returned Response should be in XML (expected format by Enketo Express) + """ + submission_xml = xml_submission_file.read() + try: + xml_root = fromstring_preserve_root_xmlns(submission_xml) + except DET.ParseError: + raise SubmissionIntegrityError( + t('Your submission XML is malformed.') + ) + try: + deprecated_uuid = xml_root.find( + self.SUBMISSION_DEPRECATED_UUID_XPATH + ).text + xform_uuid = xml_root.find(self.FORM_UUID_XPATH).text + except AttributeError: + raise SubmissionIntegrityError( + t('Your submission XML is missing critical elements.') + ) + # Remove UUID prefix + deprecated_uuid = deprecated_uuid[len('uuid:'):] + try: + instance = Instance.objects.get( + uuid=deprecated_uuid, + xform__uuid=xform_uuid, + xform__kpi_asset_uid=self.asset.uid, + ) + except Instance.DoesNotExist: + raise SubmissionIntegrityError( + t( + 'The submission you attempted to edit could not be found, ' + 'or you do not have access to it.' + ) + ) + + # Validate write access for users with partial permissions + self.validate_access_with_partial_perms( + user=user, + perm=PERM_CHANGE_SUBMISSIONS, + submission_ids=[instance.pk] + ) + + # Set the In-Memory file’s current position to 0 before passing it to + # Request. + xml_submission_file.seek(0) + files = {'xml_submission_file': xml_submission_file} + + # Combine all files altogether + if attachments: + files.update(attachments) + + kc_request = requests.Request( + method='POST', url=self.submission_url, files=files + ) + kc_response = self.__kobocat_proxy_request(kc_request, user) + return self.__prepare_as_drf_response_signature( + kc_response, expected_response_format='xml' + ) + + @property + def enketo_id(self): + if not (enketo_id := self.get_data('enketo_id')): + self.get_enketo_survey_links() + enketo_id = self.get_data('enketo_id') + return enketo_id + + @staticmethod + def normalize_internal_url(url: str) -> str: + """ + Normalize url to ensure KOBOCAT_INTERNAL_URL is used + """ + parsed_url = urlparse(url) + return f'{settings.KOBOCAT_INTERNAL_URL}{parsed_url.path}' + + def get_attachment( + self, + submission_id_or_uuid: Union[int, str], + user: settings.AUTH_USER_MODEL, + attachment_id: Optional[int] = None, + xpath: Optional[str] = None, + ) -> Attachment: + """ + Return an object which can be retrieved by its primary key or by XPath. + An exception is raised when the submission or the attachment is not found. + """ + submission_id = None + submission_uuid = None + try: + submission_id = int(submission_id_or_uuid) + except ValueError: + submission_uuid = submission_id_or_uuid + if submission_uuid: + # `_uuid` is the legacy identifier that changes (per OpenRosa spec) + # after every edit; `meta/rootUuid` remains consistent across + # edits. prefer the latter when fetching by UUID. + candidates = list( + self.get_submissions( + user, + query={ + '$or': [ + {'meta/rootUuid': submission_uuid}, + {'_uuid': submission_uuid}, + ] + }, + fields=['_id', 'meta/rootUuid', '_uuid'], + ) + ) + if not candidates: + raise SubmissionNotFoundException + for submission in candidates: + if submission.get('meta/rootUuid') == submission_uuid: + submission_id = submission['_id'] + break + else: + # no submissions with matching `meta/rootUuid` were found; + # get the "first" result, despite there being no order + # specified, just for consistency with previous code + submission_id = candidates[0]['_id'] + + submission_xml = self.get_submission( + submission_id, user, format_type=SUBMISSION_FORMAT_TYPE_XML + ) + if not submission_xml: + raise SubmissionNotFoundException + + if xpath: + submission_root = fromstring_preserve_root_xmlns(submission_xml) + element = submission_root.find(xpath) + if element is None: + raise XPathNotFoundException + attachment_filename = element.text + filters = { + 'media_file_basename': attachment_filename, + } + else: + filters = { + 'pk': attachment_id, + } + + filters['instance__id'] = submission_id + # Ensure the attachment actually belongs to this project! + filters['instance__xform_id'] = self.xform_id + + try: + attachment = Attachment.objects.get(**filters) + except Attachment.DoesNotExist: + raise AttachmentNotFoundException + + return attachment + + def get_attachment_objects_from_dict( + self, submission: dict + ) -> Union[QuerySet, list]: + + # First test that there are attachments to avoid a call to the DB for + # nothing + if not submission.get('_attachments'): + return [] + + # Get filenames from DB because Mongo does not contain the + # original basename. + # EE excepts the original basename before Django renames it and + # stores it in Mongo + # E.g.: + # - XML filename: Screenshot 2022-01-19 222028-13_45_57.jpg + # - Mongo: Screenshot_2022-01-19_222028-13_45_57.jpg + + return Attachment.objects.filter(instance_id=submission['_id']) + + def get_daily_counts( + self, user: settings.AUTH_USER_MODEL, timeframe: tuple[date, date] + ) -> dict: + + user = get_database_user(user) + + if user != self.asset.owner and self.asset.has_perm( + user, PERM_PARTIAL_SUBMISSIONS + ): + # We cannot use cached values from daily counter when user has + # partial permissions. We need to use MongoDB aggregation engine + # to retrieve the correct value according to user's permissions. + permission_filters = self.asset.get_filters_for_partial_perm( + user.pk, perm=PERM_VIEW_SUBMISSIONS + ) + + if not permission_filters: + return {} + + query = { + '_userform_id': self.mongo_userform_id, + '_submission_time': { + '$gte': f'{timeframe[0]}', + '$lte': f'{timeframe[1]}T23:59:59' + } + } + + query = MongoHelper.get_permission_filters_query( + query, permission_filters + ) + + documents = settings.MONGO_DB.instances.aggregate([ + { + '$match': query, + }, + { + '$group': { + '_id': { + '$dateToString': { + 'format': '%Y-%m-%d', + 'date': { + '$dateFromString': { + 'format': "%Y-%m-%dT%H:%M:%S", + 'dateString': "$_submission_time" + } + } + } + }, + 'count': {'$sum': 1} + } + } + ]) + return {doc['_id']: doc['count'] for doc in documents} + + # Trivial case, user has 'view_permissions' + daily_counts = ( + DailyXFormSubmissionCounter.objects.values( + 'date', 'counter' + ).filter( + xform_id=self.xform_id, + date__range=timeframe, + ) + ) + return { + str(count['date']): count['counter'] for count in daily_counts + } + + def get_data_download_links(self): + exports_base_url = '/'.join(( + settings.KOBOCAT_URL.rstrip('/'), + self.asset.owner.username, + 'exports', + self.xform.id_string + )) + reports_base_url = '/'.join(( + settings.KOBOCAT_URL.rstrip('/'), + self.asset.owner.username, + 'reports', + self.xform.id_string + )) + links = { + # To be displayed in iframes + 'xls_legacy': '/'.join((exports_base_url, 'xls/')), + 'csv_legacy': '/'.join((exports_base_url, 'csv/')), + 'zip_legacy': '/'.join((exports_base_url, 'zip/')), + 'kml_legacy': '/'.join((exports_base_url, 'kml/')), + # For GET requests that return files directly + 'xls': '/'.join((reports_base_url, 'export.xlsx')), + 'csv': '/'.join((reports_base_url, 'export.csv')), + } + return links + + def get_enketo_survey_links(self): + if not self.get_data('backend_response'): + return {} + + data = { + 'server_url': '{}/{}'.format( + settings.KOBOCAT_URL.rstrip('/'), + self.asset.owner.username + ), + 'form_id': self.xform.id_string + } + + try: + response = requests.post( + f'{settings.ENKETO_URL}/{settings.ENKETO_SURVEY_ENDPOINT}', + # bare tuple implies basic auth + auth=(settings.ENKETO_API_KEY, ''), + data=data + ) + response.raise_for_status() + except requests.exceptions.RequestException: + # Don't 500 the entire asset view if Enketo is unreachable + logging.error( + 'Failed to retrieve links from Enketo', exc_info=True) + return {} + try: + links = response.json() + except ValueError: + logging.error('Received invalid JSON from Enketo', exc_info=True) + return {} + + try: + enketo_id = links.pop('enketo_id') + except KeyError: + logging.error( + 'Invalid response from Enketo: `enketo_id` is not found', + exc_info=True, + ) + return {} + + stored_enketo_id = self.get_data('enketo_id') + if stored_enketo_id != enketo_id: + if stored_enketo_id: + logging.warning( + f'Enketo ID has changed from {stored_enketo_id} to {enketo_id}' + ) + self.save_to_db({'enketo_id': enketo_id}) + + if self.xform.require_auth: + # Unfortunately, EE creates unique ID based on OpenRosa server URL. + # Thus, we need to always generated the ID with the same URL + # (i.e.: with username) to be retro-compatible and then, + # overwrite the OpenRosa server URL again. + self.set_enketo_open_rosa_server( + require_auth=True, enketo_id=enketo_id + ) + + for discard in ('enketo_id', 'code', 'preview_iframe_url'): + try: + del links[discard] + except KeyError: + pass + return links + + def get_orphan_postgres_submissions(self) -> Optional[QuerySet, bool]: + """ + Return a queryset of all submissions still present in PostgreSQL + database related to `self.xform`. + Return False if one submission still exists in MongoDB at + least. + Otherwise, if `self.xform` does not exist (anymore), return None + """ + all_submissions = self.get_submissions( + user=self.asset.owner, + fields=['_id'], + skip_count=True, + ) + try: + next(all_submissions) + except StopIteration: + pass + else: + return False + + try: + return Instance.objects.filter(xform_id=self.xform_id) + except InvalidXFormException: + return None + + # FIXME Where this method is needed + def get_submission_detail_url(self, submission_id: int) -> str: + url = f'{self.submission_list_url}/{submission_id}' + return url + + # FIXME where this method is needed + def get_submission_validation_status_url(self, submission_id: int) -> str: + url = '{detail_url}/validation_status'.format( + detail_url=self.get_submission_detail_url(submission_id) + ) + return url + + def get_submissions( + self, + user: settings.AUTH_USER_MODEL, + format_type: str = SUBMISSION_FORMAT_TYPE_JSON, + submission_ids: list = list, + request: Optional['rest_framework.request.Request'] = None, + **mongo_query_params + ) -> Union[Generator[dict, None, None], list]: + """ + Retrieve submissions that `user` is allowed to access. + + The format `format_type` can be either: + - 'json' (See `kpi.constants.SUBMISSION_FORMAT_TYPE_JSON`) + - 'xml' (See `kpi.constants.SUBMISSION_FORMAT_TYPE_XML`) + + Results can be filtered by submission ids. Moreover MongoDB filters can + be passed through `query` to narrow down the results. + + If `user` has no access to these submissions or no matches are found, + an empty generator is returned. + + If `format_type` is 'json', a generator of dictionaries is returned. + Otherwise, if `format_type` is 'xml', a generator of strings is returned. + + If `request` is provided, submission attachments url are rewritten to + point to KPI (instead of KoBoCAT). + See `BaseDeploymentBackend._rewrite_json_attachment_urls()` + """ + + mongo_query_params['submission_ids'] = submission_ids + params = self.validate_submission_list_params( + user, format_type=format_type, **mongo_query_params + ) + + if format_type == SUBMISSION_FORMAT_TYPE_JSON: + submissions = self.__get_submissions_in_json(request, **params) + elif format_type == SUBMISSION_FORMAT_TYPE_XML: + submissions = self.__get_submissions_in_xml(**params) + else: + raise BadFormatException( + "The format {} is not supported".format(format_type) + ) + return submissions + + # FIXME + def get_validation_status( + self, submission_id: int, user: settings.AUTH_USER_MODEL + ) -> dict: + url = self.get_submission_validation_status_url(submission_id) + kc_request = requests.Request(method='GET', url=url) + kc_response = self.__kobocat_proxy_request(kc_request, user) + + return self.__prepare_as_drf_response_signature(kc_response) + + @property + def mongo_userform_id(self): + return '{}_{}'.format(self.asset.owner.username, self.xform_id_string) + + @staticmethod + def nlp_tracking_data( + asset_ids: list[int], start_date: Optional[datetime.date] = None + ): + """ + Get the NLP tracking data since a specified date + If no date is provided, get all-time data + """ + filter_args = {} + if start_date: + filter_args = {'date__gte': start_date} + try: + nlp_tracking = ( + NLPUsageCounter.objects.only('total_asr_seconds', 'total_mt_characters') + .filter( + asset_id__in=asset_ids, + **filter_args + ).aggregate( + total_nlp_asr_seconds=Coalesce(Sum('total_asr_seconds'), 0), + total_nlp_mt_characters=Coalesce(Sum('total_mt_characters'), 0), + ) + ) + except NLPUsageCounter.DoesNotExist: + return { + 'total_nlp_asr_seconds': 0, + 'total_nlp_mt_characters': 0, + } + else: + return nlp_tracking + + def redeploy(self, active=None): + """ + Replace (overwrite) the deployment, and + optionally changing whether the deployment is active. + CAUTION: Does not save deployment data to the database! + """ + if active is None: + active = self.active + + id_string = self.xform.id_string + xlsx_io = self.asset.to_xlsx_io( + versioned=True, append={ + 'settings': { + 'id_string': id_string, + 'form_title': self.asset.name, + } + } + ) + xlsx_file = ContentFile(xlsx_io.read(), name=f'{self.asset.uid}.xlsx') + + with kc_transaction_atomic(): + XForm.objects.filter(pk=self.xform.id).update( + downloadable=active, + title=self.asset.name, + has_kpi_hooks=self.asset.has_active_hooks, + ) + self.xform.downloadable = active + self.xform.title = self.asset.name + self.xform.has_kpi_hooks = self.asset.has_active_hooks + + publish_xls_form(xlsx_file, self.asset.owner, self.xform.id_string) + + + # Do not call save it, asset (and its deployment) is saved right + # after calling this method in `DeployableMixin.deploy()` + self.store_data( + { + 'backend': 'openrosa', + 'active': active, + 'backend_response': { + 'formid': self.xform.pk, + 'uuid': self.xform.uuid, + 'id_string': self.xform.id_string, + 'kpi_asset_uid': self.asset.uid, + }, + 'version': self.asset.version_id, + } + ) + + def remove_from_kc_only_flag( + self, specific_user: Union[int, settings.AUTH_USER_MODEL] = None + ): + """ + Removes `from_kc_only` flag for ALL USERS unless `specific_user` is + provided + + Args: + specific_user (int, User): User object or pk + """ + # This flag lets us know that permission assignments in KPI exist + # only because they were copied from KoBoCAT (by `sync_from_kobocat`). + # As soon as permissions are assigned through KPI, this flag must be + # removed + # + # This method is here instead of `ObjectPermissionMixin` because + # it's specific to KoBoCat as backend. + + # TODO: Remove this method after kobotoolbox/kobocat#642 + + filters = { + 'permission__codename': PERM_FROM_KC_ONLY, + 'asset_id': self.asset.id, + } + if specific_user is not None: + try: + user_id = specific_user.pk + except AttributeError: + user_id = specific_user + filters['user_id'] = user_id + + ObjectPermission.objects.filter(**filters).delete() + + def rename_enketo_id_key(self, previous_owner_username: str): + parsed_url = urlparse(settings.KOBOCAT_URL) + domain_name = parsed_url.netloc + asset_uid = self.asset.uid + enketo_redis_client = get_redis_connection('enketo_redis_main') + + try: + enketo_redis_client.rename( + src=f'or:{domain_name}/{previous_owner_username},{asset_uid}', + dst=f'or:{domain_name}/{self.asset.owner.username},{asset_uid}' + ) + except redis.exceptions.ResponseError: + # original does not exist, weird but don't raise a 500 for that + pass + + @staticmethod + def prepare_bulk_update_response(kc_responses: list) -> dict: + """ + Formatting the response to allow for partial successes to be seen + more explicitly. + + Args: + kc_responses (list): A list containing dictionaries with keys of + `_uuid` from the newly generated uuid and `response`, the response + object received from KoBoCAT + + Returns: + dict: formatted dict to be passed to a Response object and sent to + the client + """ + + OPEN_ROSA_XML_MESSAGE = '{http://openrosa.org/http/response}message' + + # Unfortunately, the response message from OpenRosa is in XML format, + # so it needs to be parsed before extracting the text + results = [] + for response in kc_responses: + message = t('Something went wrong') + try: + xml_parsed = fromstring_preserve_root_xmlns( + response['response'].content + ) + except DET.ParseError: + pass + else: + message_el = xml_parsed.find(OPEN_ROSA_XML_MESSAGE) + if message_el is not None and message_el.text.strip(): + message = message_el.text + + results.append( + { + 'uuid': response['uuid'], + 'status_code': response['response'].status_code, + 'message': message, + } + ) + + total_update_attempts = len(results) + total_successes = [result['status_code'] for result in results].count( + status.HTTP_201_CREATED + ) + + return { + 'status': status.HTTP_200_OK + if total_successes > 0 + # FIXME: If KoboCAT returns something unexpected, like a 404 or a + # 500, then 400 is not the right response to send to the client + else status.HTTP_400_BAD_REQUEST, + 'data': { + 'count': total_update_attempts, + 'successes': total_successes, + 'failures': total_update_attempts - total_successes, + 'results': results, + }, + } + + def set_active(self, active): + """ + Set deployment as active or not. + Store results in deployment data + """ + # Use `queryset.update()` over `model.save()` because we don't need to + # run the logic of the `model.save()` method and we don't need signals + # to be called. + XForm.objects.filter(pk=self.xform_id).update(downloadable=active) + self.xform.downloadable = active + self.save_to_db({'active': active}) + + def set_asset_uid(self, force: bool = False) -> bool: + """ + Link KoBoCAT `XForm` back to its corresponding KPI `Asset` by + populating the `kpi_asset_uid` field (use KoBoCAT proxy to PATCH XForm). + Useful when a form is created from the legacy upload form. + Store results in deployment data. + + It returns `True` only if `XForm.kpi_asset_uid` field is updated + during this call, otherwise `False`. + """ + is_synchronized = not ( + force or + self.backend_response.get('kpi_asset_uid', None) is None + ) + if is_synchronized: + return False + + # Use `queryset.update()` over `model.save()` because we don't need to + # run the logic of the `model.save()` method and we don't need signals + # to be called. + XForm.objects.filter(pk=self.xform_id).update( + kpi_asset_uid=self.asset.uid + ) + self.xform.kpi_asset_uid = self.asset.uid + self.backend_response['kpi_asset_uid'] = self.asset.uid + self.store_data({'backend_response': self.backend_response}) + return True + + def set_enketo_open_rosa_server( + self, require_auth: bool, enketo_id: str = None + ): + # Kobocat handles Open Rosa requests with different accesses. + # - Authenticated access, https://[kc] + # - Anonymous access, https://[kc]/username + # Enketo generates its unique ID based on the server URL. + # Thus, if the project requires authentication, we need to update Redis + # directly to keep the same ID and let Enketo submit data to correct + # endpoint + if not enketo_id: + enketo_id = self.enketo_id + + server_url = settings.KOBOCAT_URL.rstrip('/') + if not require_auth: + server_url = f'{server_url}/{self.asset.owner.username}' + + enketo_redis_client = get_redis_connection('enketo_redis_main') + enketo_redis_client.hset( + f'id:{enketo_id}', + 'openRosaServer', + server_url, + ) + + def set_has_kpi_hooks(self): + """ + `PATCH` `has_kpi_hooks` boolean of related KoBoCAT XForm. + It lets KoBoCAT know whether it needs to notify KPI + each time a submission comes in. + + Store results in deployment data + """ + # Use `queryset.update()` over `model.save()` because we don't need to + # run the logic of the `model.save()` method and we don't need signals + # to be called. + XForm.objects.filter(pk=self.xform_id).update( + kpi_asset_uid=self.asset.uid, + has_kpi_hooks=self.asset.has_active_hooks + ) + self.xform.kpi_asset_uid = self.asset.uid + self.xform.has_active_hooks = self.asset.has_active_hooks + + self.backend_response['kpi_asset_uid'] = self.asset.uid + self.store_data({'backend_response': self.backend_response}) + + def set_validation_status( + self, + submission_id: int, + user: settings.AUTH_USER_MODEL, + data: dict, + method: str, + ) -> dict: + """ + Update validation status through KoBoCAT proxy, + authenticated by `user`'s API token. + If `method` is `DELETE`, the status is reset to `None` + + It returns a dictionary which can used as Response object arguments + """ + + self.validate_access_with_partial_perms( + user=user, + perm=PERM_VALIDATE_SUBMISSIONS, + submission_ids=[submission_id], + ) + + kc_request_params = { + 'method': method, + 'url': self.get_submission_validation_status_url(submission_id), + } + + if method == 'PATCH': + kc_request_params.update({'json': data}) + + kc_request = requests.Request(**kc_request_params) + kc_response = self.__kobocat_proxy_request(kc_request, user) + return self.__prepare_as_drf_response_signature(kc_response) + + def set_validation_statuses( + self, user: settings.AUTH_USER_MODEL, data: dict + ) -> dict: + """ + Bulk update validation status for provided submissions through + KoBoCAT proxy, authenticated by `user`'s API token. + + `data` should contain either the submission ids or the query to + retrieve the subset of submissions chosen by then user. + If none of them are provided, all the submissions are selected + Examples: + {"submission_ids": [1, 2, 3]} + {"query":{"_validation_status.uid":"validation_status_not_approved"} + """ + submission_ids = self.validate_access_with_partial_perms( + user=user, + perm=PERM_VALIDATE_SUBMISSIONS, + submission_ids=data['submission_ids'], + query=data['query'], + ) + + # If `submission_ids` is not empty, user has partial permissions. + # Otherwise, they have have full access. + if submission_ids: + # Remove query from `data` because all the submission ids have been + # already retrieved + data.pop('query', None) + data['submission_ids'] = submission_ids + + # `PATCH` KC even if KPI receives `DELETE` + url = self.submission_list_url + kc_request = requests.Request(method='PATCH', url=url, json=data) + kc_response = self.__kobocat_proxy_request(kc_request, user) + return self.__prepare_as_drf_response_signature(kc_response) + + # DEPRECATED + def store_submission( + self, user, xml_submission, submission_uuid, attachments=None + ): + file_tuple = (submission_uuid, io.StringIO(xml_submission)) + files = {'xml_submission_file': file_tuple} + if attachments: + files.update(attachments) + kc_request = requests.Request( + method='POST', url=self.submission_url, files=files + ) + kc_response = self.__kobocat_proxy_request(kc_request, user=user) + return kc_response + + @property + def submission_count(self): + try: + return self.xform.num_of_submissions + except InvalidXFormException: + return 0 + + def submission_count_since_date(self, start_date=None): + try: + xform_id = self.xform_id + except InvalidXFormException: + return 0 + + today = timezone.now().date() + filter_args = { + 'xform_id': xform_id, + } + if start_date: + filter_args['date__range'] = [start_date, today] + try: + # Note: this is replicating the functionality that was formerly in + # `current_month_submission_count`. `current_month_submission_count` + # didn't account for partial permissions, and this doesn't either + total_submissions = DailyXFormSubmissionCounter.objects.only( + 'date', 'counter' + ).filter(**filter_args).aggregate(count_sum=Coalesce(Sum('counter'), 0)) + except DailyXFormSubmissionCounter.DoesNotExist: + return 0 + else: + return total_submissions['count_sum'] + + @property + def submission_list_url(self): + url = '{kc_base}/api/v1/data/{formid}'.format( + kc_base=settings.KOBOCAT_INTERNAL_URL, + formid=self.backend_response['formid'] + ) + return url + + @property + def submission_model(self): + return Instance + + @property + def submission_url(self) -> str: + # Use internal host to secure calls to KoboCAT API, + # kobo-service-account can restrict requests per hosts. + url = '{kc_base}/submission'.format( + kc_base=settings.KOBOCAT_INTERNAL_URL, + ) + return url + + def sync_media_files(self, file_type: str = AssetFile.FORM_MEDIA): + + metadata_files = defaultdict(dict) + + # Build a list of KoboCAT metadata to compare with KPI + for metadata in MetaData.objects.filter( + xform_id=self.xform_id, + data_type=self.SYNCED_DATA_FILE_TYPES[file_type], + ).values(): + metadata_files[metadata['data_value']] = { + 'pk': metadata['id'], + 'md5': metadata['file_hash'], + 'from_kpi': metadata['from_kpi'], + } + + metadata_filenames = metadata_files.keys() + + queryset = self._get_metadata_queryset(file_type=file_type) + + for media_file in queryset: + + backend_media_id = media_file.backend_media_id + + # File does not exist in KC + if backend_media_id not in metadata_filenames: + if media_file.deleted_at is None: + # New file + self._save_openrosa_metadata(media_file) + else: + # Orphan, delete it + media_file.delete(force=True) + continue + + # Existing file + if backend_media_id in metadata_filenames: + metadata_file = metadata_files[backend_media_id] + if media_file.deleted_at is None: + # If md5 differs, we need to re-upload it. + if media_file.md5_hash != metadata_file['md5']: + if media_file.file_type == AssetFile.PAIRED_DATA: + self._update_kc_metadata_hash( + media_file, metadata_file['pk'] + ) + else: + self._delete_openrosa_metadata(metadata_file) + self._save_openrosa_metadata(media_file) + elif metadata_file['from_kpi']: + self._delete_openrosa_metadata(metadata_file, media_file) + else: + # Remote file has been uploaded directly to KC. We + # cannot delete it, but we need to vacuum KPI. + media_file.delete(force=True) + # Skip deletion of key corresponding to `backend_media_id` + # in `metadata_files` to avoid unique constraint failure in case + # user deleted + # and re-uploaded the same file in a row between + # two deployments + # Example: + # - User uploads file1.jpg (pk == 1) + # - User deletes file1.jpg (pk == 1) + # - User re-uploads file1.jpg (pk == 2) + # Next time, 'file1.jpg' is encountered in this loop, + # it would try to re-upload to KC if its hash differs + # from KC version and would fail because 'file1.jpg' + # already exists in KC db. + continue + + # Remove current filename from `metadata_files`. + # All files which will remain in this dict (after this loop) + # will be considered obsolete and will be deleted + del metadata_files[backend_media_id] + + # Remove KoboCAT orphan files previously uploaded through KPI + for metadata_file in metadata_files.values(): + if metadata_file['from_kpi']: + self._delete_openrosa_metadata(metadata_file) + + @property + def xform(self): + + if self._xform is not None: + return self._xform + + pk = self.backend_response['formid'] + xform = ( + XForm.objects.filter(pk=pk) + .only( + 'user__username', + 'id_string', + 'num_of_submissions', + 'attachment_storage_bytes', + 'require_auth', + 'uuid', + ) + .select_related( + 'user' + ) # Avoid extra query to validate username below + .first() + ) + + if not ( + xform + and xform.user.username == self.asset.owner.username + and xform.id_string == self.xform_id_string + ): + raise InvalidXFormException( + 'Deployment links to an unexpected KoboCAT XForm' + ) + self._xform = xform + return self._xform + + @property + def xform_id(self): + return self.xform.pk + + @property + def xform_id_string(self): + return self.get_data('backend_response.id_string') + + @staticmethod + @contextmanager + def suspend_submissions(user_ids: list[int]): + UserProfile.objects.filter( + user_id__in=user_ids + ).update( + metadata=UpdateJSONFieldAttributes( + 'metadata', + updates={'submissions_suspended': True}, + ), + ) + try: + yield + finally: + UserProfile.objects.filter( + user_id__in=user_ids + ).update( + metadata=UpdateJSONFieldAttributes( + 'metadata', + updates={'submissions_suspended': False}, + ), + ) + + def transfer_submissions_ownership( + self, previous_owner_username: str + ) -> bool: + + results = settings.MONGO_DB.instances.update_many( + {'_userform_id': f'{previous_owner_username}_{self.xform_id_string}'}, + { + '$set': { + '_userform_id': self.mongo_userform_id + } + }, + ) + + return ( + results.matched_count == 0 or + ( + results.matched_count > 0 + and results.matched_count == results.modified_count + ) + ) + + def transfer_counters_ownership(self, new_owner: 'kobo_auth.User'): + + NLPUsageCounter.objects.filter( + asset=self.asset, user=self.asset.owner + ).update(user=new_owner) + DailyXFormSubmissionCounter.objects.filter( + xform=self.xform, user_id=self.asset.owner.pk + ).update(user=new_owner) + MonthlyXFormSubmissionCounter.objects.filter( + xform=self.xform, user_id=self.asset.owner.pk + ).update(user=new_owner) + + UserProfile.objects.filter(user_id=self.asset.owner.pk).update( + attachment_storage_bytes=F('attachment_storage_bytes') + - self.xform.attachment_storage_bytes + ) + UserProfile.objects.filter(user_id=self.asset.owner.pk).update( + attachment_storage_bytes=F('attachment_storage_bytes') + + self.xform.attachment_storage_bytes + ) + + def _delete_openrosa_metadata( + self, metadata_file_: dict, file_: Union[AssetFile, PairedData] = None + ): + """ + A simple utility to delete metadata in KoBoCAT. + If related KPI file is provided (i.e. `file_`), it is deleted too. + """ + # Delete MetaData object and its related file (on storage) + try: + metadata = MetaData.objects.get(pk=metadata_file_['id']) + except MetaData.DoesNotExist: + pass + else: + # Need to call signals + metadata.delete() + + if file_ is None: + return + + # Delete file in KPI if requested + file_.delete(force=True) + + def _last_submission_time(self): + id_string = self.xform.id_string + return last_submission_time( + xform_id_string=id_string, user_id=self.asset.owner.pk + ) + + def _save_openrosa_metadata(self, file_: SyncBackendMediaInterface): + """ + Create a MetaData object usable for (KoboCAT) v1 API, related to + AssetFile `file_`. + """ + metadata = { + 'data_value': file_.backend_media_id, + 'xform_id': self.xform_id, + 'data_type': self.SYNCED_DATA_FILE_TYPES[file_.file_type], + 'from_kpi': True, + 'data_filename': file_.filename, + 'data_file_type': file_.mimetype, + 'file_hash': file_.md5_hash, + } + + if not file_.is_remote_url: + metadata['data_file'] = file_.content + + MetaData.objects.create(**metadata) + + file_.synced_with_backend = True + file_.save(update_fields=['synced_with_backend']) + + def _update_kc_metadata_hash( + self, file_: SyncBackendMediaInterface, metadata_id: int + ): + """ + Update metadata object hash + """ + data = {'file_hash': file_.md5_hash} + # MetaData has no signals, use `filter().update()` instead of `.get()` + # and `.save(update_fields='...')` + MetaData.objects.filter(pk=metadata_id).update(**data) + file_.synced_with_backend = True + file_.save(update_fields=['synced_with_backend']) + + def __get_submissions_in_json( + self, + request: Optional['rest_framework.request.Request'] = None, + **params + ) -> Generator[dict, None, None]: + """ + Retrieve submissions directly from Mongo. + Submissions can be filtered with `params`. + """ + # Apply a default sort of _id to prevent unpredictable natural sort + if not params.get('sort'): + params['sort'] = {'_id': 1} + mongo_cursor, total_count = MongoHelper.get_instances( + self.mongo_userform_id, **params) + + # Python-only attribute used by `kpi.views.v2.data.DataViewSet.list()` + self.current_submission_count = total_count + + add_supplemental_details_to_query = self.asset.has_advanced_features + + fields = params.get('fields', []) + if len(fields) > 0 and '_uuid' not in fields: + # skip the query if submission '_uuid' is not even q'd from mongo + add_supplemental_details_to_query = False + + if add_supplemental_details_to_query: + mongo_cursor = stream_with_extras(mongo_cursor, self.asset) + + return ( + self._rewrite_json_attachment_urls( + MongoHelper.to_readable_dict(submission), + request, + ) + for submission in mongo_cursor + ) + + def __get_submissions_in_xml( + self, + **params + ) -> Generator[str, None, None]: + """ + Retrieve submissions directly from PostgreSQL. + Submissions can be filtered with `params`. + """ + + mongo_filters = ['query', 'permission_filters'] + use_mongo = any(mongo_filter in mongo_filters for mongo_filter in params + if params.get(mongo_filter) is not None) + + if use_mongo: + # We use Mongo to retrieve matching instances. + params['fields'] = ['_id'] + # Force `sort` by `_id` for Mongo + # See FIXME about sort in `BaseDeploymentBackend.validate_submission_list_params()` + params['sort'] = {'_id': 1} + submissions, count = MongoHelper.get_instances( + self.mongo_userform_id, **params + ) + submission_ids = [ + submission.get('_id') + for submission in submissions + ] + self.current_submission_count = count + + queryset = Instance.objects.filter(xform_id=self.xform_id) + + if len(submission_ids) > 0 or use_mongo: + queryset = queryset.filter(id__in=submission_ids) + + # Python-only attribute used by `kpi.views.v2.data.DataViewSet.list()` + if not use_mongo: + self.current_submission_count = queryset.count() + + # Force Sort by id + # See FIXME about sort in `BaseDeploymentBackend.validate_submission_list_params()` + queryset = queryset.order_by('id') + + # When using Mongo, data is already paginated, + # no need to do it with PostgreSQL too. + if not use_mongo: + offset = params.get('start') + limit = offset + params.get('limit') + queryset = queryset[offset:limit] + + return (lazy_instance.xml for lazy_instance in queryset) diff --git a/kpi/models/paired_data.py b/kpi/models/paired_data.py index a49bcf7eb8..5487e89f8d 100644 --- a/kpi/models/paired_data.py +++ b/kpi/models/paired_data.py @@ -211,8 +211,6 @@ def md5_hash(self): f'{str(time.time())}.{self.backend_media_id}', prefix=True ) + '-time' - return self.asset_file.md5_hash - @property def is_remote_url(self): """ diff --git a/kpi/serializers/v2/deployment.py b/kpi/serializers/v2/deployment.py index 956568c228..e140800ca9 100644 --- a/kpi/serializers/v2/deployment.py +++ b/kpi/serializers/v2/deployment.py @@ -28,13 +28,15 @@ def create(self, validated_data): asset = self.context['asset'] self._raise_unless_current_version(asset, validated_data) # if no backend is provided, use the installation's default backend - backend_id = validated_data.get('backend', - settings.DEFAULT_DEPLOYMENT_BACKEND) + backend_id = validated_data.get( + 'backend', settings.DEFAULT_DEPLOYMENT_BACKEND + ) # `asset.deploy()` deploys the latest version and updates that versions' # 'deployed' boolean value - asset.deploy(backend=backend_id, - active=validated_data.get('active', False)) + asset.deploy( + backend=backend_id, active=validated_data.get('active', False) + ) return asset.deployment def update(self, instance, validated_data): diff --git a/kpi/signals.py b/kpi/signals.py index d3ad80f9d2..90245bbee6 100644 --- a/kpi/signals.py +++ b/kpi/signals.py @@ -77,7 +77,7 @@ def tag_uid_post_save(sender, instance, created, raw, **kwargs): @receiver(post_save, sender=Hook) def update_kc_xform_has_kpi_hooks(sender, instance, **kwargs): """ - Updates KoBoCAT XForm instance as soon as Asset.Hook list is updated. + Updates KoboCAT XForm instance as soon as Asset.Hook list is updated. """ asset = instance.asset if asset.has_deployment: diff --git a/kpi/tasks.py b/kpi/tasks.py index 3006d1fa0f..28b5d19697 100644 --- a/kpi/tasks.py +++ b/kpi/tasks.py @@ -1,10 +1,13 @@ # coding: utf-8 +import time + import constance import requests from django.conf import settings from django.core.mail import send_mail from django.core.management import call_command + from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.markdownx_uploader.tasks import remove_unused_markdown_files from kobo.celery import celery_app @@ -73,7 +76,14 @@ def sync_kobocat_xforms( @celery_app.task def sync_media_files(asset_uid): - asset = Asset.objects.get(uid=asset_uid) + asset = Asset.objects.defer('content').get(uid=asset_uid) + if not asset.has_deployment: + # 🙈 Race condition: Celery task starts too fast and does not see + # the deployment data, even if asset has been saved prior to call this + # task + time.sleep(1) + asset.refresh_from_db(fields=['_deployment_data']) + asset.deployment.sync_media_files() diff --git a/kpi/tests/api/v2/test_api_submissions.py b/kpi/tests/api/v2/test_api_submissions.py index 5a98d72f11..9a1c0409dd 100644 --- a/kpi/tests/api/v2/test_api_submissions.py +++ b/kpi/tests/api/v2/test_api_submissions.py @@ -206,12 +206,8 @@ def test_audit_log_on_bulk_delete(self): self.asset.owner, fields=['_id'] ) ] - ( - app_label, - model_name, - ) = self.asset.deployment.submission_model.get_app_label_and_model_name() audit_log_count = AuditLog.objects.filter( - user=self.someuser, app_label=app_label, model_name=model_name + user=self.someuser, app_label='logger', model_name='instance' ).count() # No submissions have been deleted yet assert audit_log_count == 0 @@ -221,7 +217,7 @@ def test_audit_log_on_bulk_delete(self): # All submissions have been deleted and should be logged deleted_submission_ids = AuditLog.objects.values_list( 'pk', flat=True - ).filter(user=self.someuser, app_label=app_label, model_name=model_name) + ).filter(user=self.someuser, app_label='logger', model_name='instance') assert len(expected_submission_ids) > 0 assert sorted(expected_submission_ids), sorted(deleted_submission_ids) @@ -783,12 +779,8 @@ def test_audit_log_on_delete(self): deleted. """ submission = self.submissions_submitted_by_someuser[0] - ( - app_label, - model_name, - ) = self.asset.deployment.submission_model.get_app_label_and_model_name() audit_log_count = AuditLog.objects.filter( - user=self.someuser, app_label=app_label, model_name=model_name + user=self.someuser, app_label='logger', model_name='instance' ).count() # No submissions have been deleted yet assert audit_log_count == 0 @@ -798,7 +790,7 @@ def test_audit_log_on_delete(self): # All submissions have been deleted and should be logged deleted_submission_ids = AuditLog.objects.values_list( 'pk', flat=True - ).filter(user=self.someuser, app_label=app_label, model_name=model_name) + ).filter(user=self.someuser, app_label='logger', model_name='instance') assert len(deleted_submission_ids) > 0 assert [submission['_id']], deleted_submission_ids diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 402f541d09..918f7d2281 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -355,14 +355,10 @@ def bulk(self, request, *args, **kwargs): query=data['query'], fields=['_id', '_uuid'] ) - ( - app_label, - model_name, - ) = deployment.submission_model.get_app_label_and_model_name() for submission in submissions: audit_logs.append(AuditLog( - app_label=app_label, - model_name=model_name, + app_label='logger', + model_name='instance', object_id=submission['_id'], user=request.user, user_uid=request.user.extra_details.uid, @@ -399,13 +395,9 @@ def destroy(self, request, pk, *args, **kwargs): ) if json_response['status'] == status.HTTP_204_NO_CONTENT: - ( - app_label, - model_name, - ) = deployment.submission_model.get_app_label_and_model_name() AuditLog.objects.create( - app_label=app_label, - model_name=model_name, + app_label='logger', + model_name='instance', object_id=pk, user=request.user, metadata={ From f1c3f15465349de763e34782ebabb7ee5e3e1431 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 18 Jul 2024 19:21:47 -0400 Subject: [PATCH 007/119] WIP - OpenRosa backend --- kobo/apps/openrosa/apps/api/exceptions.py | 2 +- .../apps/api/viewsets/data_viewset.py | 169 +++--------------- kobo/apps/openrosa/apps/logger/exceptions.py | 12 +- .../0030_backfill_lost_monthly_counters.py | 4 +- .../0031_remove_null_user_daily_counters.py | 4 +- .../openrosa/apps/logger/utils/__init__.py | 3 + .../logger/{utils.py => utils/counters.py} | 1 + .../apps/logger/utils/database_query.py | 90 ++++++++++ .../openrosa/apps/logger/utils/instance.py | 69 +++++++ kobo/apps/openrosa/libs/utils/logger_tools.py | 33 ++-- kpi/deployment_backends/base_backend.py | 13 +- kpi/deployment_backends/kobocat_backend.py | 10 +- kpi/deployment_backends/mock_backend.py | 4 +- kpi/deployment_backends/openrosa_backend.py | 119 ++++++------ kpi/utils/files.py | 12 ++ kpi/views/v2/asset_snapshot.py | 2 +- kpi/views/v2/data.py | 17 +- 17 files changed, 323 insertions(+), 241 deletions(-) create mode 100644 kobo/apps/openrosa/apps/logger/utils/__init__.py rename kobo/apps/openrosa/apps/logger/{utils.py => utils/counters.py} (99%) create mode 100644 kobo/apps/openrosa/apps/logger/utils/database_query.py create mode 100644 kobo/apps/openrosa/apps/logger/utils/instance.py create mode 100644 kpi/utils/files.py diff --git a/kobo/apps/openrosa/apps/api/exceptions.py b/kobo/apps/openrosa/apps/api/exceptions.py index 26bdab1ffd..460ae1e544 100644 --- a/kobo/apps/openrosa/apps/api/exceptions.py +++ b/kobo/apps/openrosa/apps/api/exceptions.py @@ -13,7 +13,7 @@ class LegacyAPIException(APIException): default_code = 'legacy_api_exception' -class NoConfirmationProvidedException(APIException): +class NoConfirmationProvidedAPIException(APIException): status_code = HTTP_400_BAD_REQUEST default_detail = t('No confirmation provided') diff --git a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py index acccae48e1..79a85a5fa6 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py @@ -18,7 +18,7 @@ from rest_framework.settings import api_settings from kobo.apps.openrosa.apps.api.exceptions import ( - NoConfirmationProvidedException, + NoConfirmationProvidedAPIException, ) from kobo.apps.openrosa.apps.api.viewsets.xform_viewset import ( custom_response_handler, @@ -29,16 +29,20 @@ get_validation_status, remove_validation_status_from_instance, ) +from kobo.apps.openrosa.apps.logger.exceptions import ( + BuildDbQueriesAttributeError, + BuildDbQueriesBadArgumentError, + BuildDbQueriesNoConfirmationProvidedError, +) from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.apps.logger.models.instance import ( Instance, ) -from kobo.apps.openrosa.apps.logger.signals import ( - nullify_exports_time_of_last_submission, - update_xform_submission_count_delete, +from kobo.apps.openrosa.apps.logger.utils import ( + build_db_queries, + delete_instances, ) from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance -from kobo.apps.openrosa.apps.viewer.signals import remove_from_mongo from kobo.apps.openrosa.libs.renderers import renderers from kobo.apps.openrosa.libs.mixins.anonymous_user_public_forms_mixin import ( AnonymousUserPublicFormsMixin, @@ -419,57 +423,28 @@ def bulk_delete(self, request, *args, **kwargs): Bulk delete instances """ xform = self.get_object() - postgres_query, mongo_query = self.__build_db_queries(xform, request.data) - - # Disconnect signals to speed-up bulk deletion - pre_delete.disconnect(remove_from_mongo, sender=ParsedInstance) - post_delete.disconnect( - nullify_exports_time_of_last_submission, sender=Instance, - dispatch_uid='nullify_exports_time_of_last_submission', - ) - post_delete.disconnect( - update_xform_submission_count_delete, sender=Instance, - dispatch_uid='update_xform_submission_count_delete', - ) try: - # Delete Postgres & Mongo - all_count, results = Instance.objects.filter(**postgres_query).delete() - identifier = f'{Instance._meta.app_label}.Instance' - try: - deleted_records_count = results[identifier] - except KeyError: - # PostgreSQL did not delete any Instance objects. Keep going in case - # they are still present in MongoDB. - logging.warning('Instance objects cannot be found') - deleted_records_count = 0 - - ParsedInstance.bulk_delete(mongo_query) - - # Update xform like signals would do if it was as single object deletion - nullify_exports_time_of_last_submission(sender=Instance, instance=xform) - update_xform_submission_count_delete( - sender=Instance, - instance=xform, value=deleted_records_count - ) - finally: - # Pre_delete signal needs to be re-enabled for parsed instance - pre_delete.connect(remove_from_mongo, sender=ParsedInstance) - post_delete.connect( - nullify_exports_time_of_last_submission, - sender=Instance, - dispatch_uid='nullify_exports_time_of_last_submission', - ) - post_delete.connect( - update_xform_submission_count_delete, - sender=Instance, - dispatch_uid='update_xform_submission_count_delete', + deleted_records_count = delete_instances(xform, request.data) + except BuildDbQueriesBadArgumentError: + raise ValidationError({ + 'payload': t("`query` and `instance_ids` can't be used together") + }) + except BuildDbQueriesAttributeError: + raise ValidationError( + {'payload': t('Invalid `query` or `submission_ids` params')} ) + except BuildDbQueriesNoConfirmationProvidedError: + raise NoConfirmationProvidedAPIException() - return Response({ - 'detail': t('{} submissions have been deleted').format( - deleted_records_count) - }, status.HTTP_200_OK) + return Response( + { + 'detail': t('{} submissions have been deleted').format( + deleted_records_count + ) + }, + status.HTTP_200_OK, + ) def bulk_validation_status(self, request, *args, **kwargs): @@ -488,8 +463,7 @@ def bulk_validation_status(self, request, *args, **kwargs): new_validation_status_uid, xform, real_user.username ) - postgres_query, mongo_query = self.__build_db_queries(xform, - request.data) + postgres_query, mongo_query = build_db_queries(xform, request.data) # Update Postgres & Mongo updated_records_count = Instance.objects.filter( @@ -741,90 +715,3 @@ def list(self, request, *args, **kwargs): return res return custom_response_handler(request, xform, query, export_type) - - @staticmethod - def __build_db_queries(xform_, request_data): - - """ - Gets instance ids based on the request payload. - Useful to narrow down set of instances for bulk actions - - Args: - xform_ (XForm) - request_data (dict) - - Returns: - tuple(, ): PostgreSQL filters, Mongo filters. - They are meant to be used respectively with Django Queryset - and PyMongo query. - - """ - - mongo_query = ParsedInstance.get_base_query(xform_.user.username, - xform_.id_string) - postgres_query = {'xform_id': xform_.id} - instance_ids = None - # Remove empty values - payload = { - key_: value_ for key_, value_ in request_data.items() if value_ - } - ################################################### - # Submissions can be retrieve in 3 different ways # - ################################################### - # First of all, - # users cannot send `query` and `submission_ids` in POST/PATCH request - # - if all(key_ in payload for key_ in ('query', 'submission_ids')): - raise ValidationError({ - 'payload': t("`query` and `instance_ids` can't be used together") - }) - - # First scenario / Get submissions based on user's query - try: - query = payload['query'] - except KeyError: - pass - else: - try: - query.update(mongo_query) # Overrides `_userform_id` if exists - except AttributeError: - raise ValidationError({ - 'payload': t('Invalid query: %(query)s') - % {'query': json.dumps(query)} - }) - - query_kwargs = { - 'query': json.dumps(query), - 'fields': '["_id"]' - } - - cursor = ParsedInstance.query_mongo_no_paging(**query_kwargs) - instance_ids = [record.get('_id') for record in list(cursor)] - - # Second scenario / Get submissions based on list of ids - try: - submission_ids = payload['submission_ids'] - except KeyError: - pass - else: - try: - # Use int() to test if list of integers is valid. - instance_ids = [int(submission_id) - for submission_id in submission_ids] - except ValueError: - raise ValidationError({ - 'payload': t('Invalid submission ids: %(submission_ids)s') - % {'submission_ids': - json.dumps(payload['submission_ids'])} - }) - - if instance_ids is not None: - # Narrow down queries with list of ids. - postgres_query.update({'id__in': instance_ids}) - mongo_query.update({'_id': {'$in': instance_ids}}) - elif payload.get('confirm', False) is not True: - # Third scenario / get all submissions in form, - # but confirmation param must be among payload - raise NoConfirmationProvidedException() - - return postgres_query, mongo_query diff --git a/kobo/apps/openrosa/apps/logger/exceptions.py b/kobo/apps/openrosa/apps/logger/exceptions.py index 74b31a19bd..79db6172fe 100644 --- a/kobo/apps/openrosa/apps/logger/exceptions.py +++ b/kobo/apps/openrosa/apps/logger/exceptions.py @@ -1,5 +1,13 @@ -# coding: utf-8 -from django.utils.translation import gettext as t +class BuildDbQueriesAttributeError(Exception): + pass + + +class BuildDbQueriesBadArgumentError(Exception): + pass + + +class BuildDbQueriesNoConfirmationProvidedError(Exception): + pass class DuplicateUUIDError(Exception): diff --git a/kobo/apps/openrosa/apps/logger/migrations/0030_backfill_lost_monthly_counters.py b/kobo/apps/openrosa/apps/logger/migrations/0030_backfill_lost_monthly_counters.py index 08b1492386..ad495779f2 100644 --- a/kobo/apps/openrosa/apps/logger/migrations/0030_backfill_lost_monthly_counters.py +++ b/kobo/apps/openrosa/apps/logger/migrations/0030_backfill_lost_monthly_counters.py @@ -6,7 +6,9 @@ from django.db.models.functions import ExtractYear, ExtractMonth from django.utils import timezone -from kobo.apps.openrosa.apps.logger.utils import delete_null_user_daily_counters +from kobo.apps.openrosa.apps.logger.utils.counters import ( + delete_null_user_daily_counters, +) def populate_missing_monthly_counters(apps, schema_editor): diff --git a/kobo/apps/openrosa/apps/logger/migrations/0031_remove_null_user_daily_counters.py b/kobo/apps/openrosa/apps/logger/migrations/0031_remove_null_user_daily_counters.py index 557d20a344..db961696b9 100644 --- a/kobo/apps/openrosa/apps/logger/migrations/0031_remove_null_user_daily_counters.py +++ b/kobo/apps/openrosa/apps/logger/migrations/0031_remove_null_user_daily_counters.py @@ -1,7 +1,9 @@ from django.conf import settings from django.db import migrations -from kobo.apps.openrosa.apps.logger.utils import delete_null_user_daily_counters +from kobo.apps.openrosa.apps.logger.utils.counters import ( + delete_null_user_daily_counters, +) class Migration(migrations.Migration): diff --git a/kobo/apps/openrosa/apps/logger/utils/__init__.py b/kobo/apps/openrosa/apps/logger/utils/__init__.py new file mode 100644 index 0000000000..40c609529a --- /dev/null +++ b/kobo/apps/openrosa/apps/logger/utils/__init__.py @@ -0,0 +1,3 @@ +from .counters import delete_null_user_daily_counters +from .database_query import build_db_queries +from .instance import delete_instances diff --git a/kobo/apps/openrosa/apps/logger/utils.py b/kobo/apps/openrosa/apps/logger/utils/counters.py similarity index 99% rename from kobo/apps/openrosa/apps/logger/utils.py rename to kobo/apps/openrosa/apps/logger/utils/counters.py index 4aaf1b1e9d..c76dbf42d4 100644 --- a/kobo/apps/openrosa/apps/logger/utils.py +++ b/kobo/apps/openrosa/apps/logger/utils/counters.py @@ -1,3 +1,4 @@ + def delete_null_user_daily_counters(apps, *args): """ Find any DailyXFormCounters without a user, assign them to a user if we can, otherwise delete them diff --git a/kobo/apps/openrosa/apps/logger/utils/database_query.py b/kobo/apps/openrosa/apps/logger/utils/database_query.py new file mode 100644 index 0000000000..b8a81db872 --- /dev/null +++ b/kobo/apps/openrosa/apps/logger/utils/database_query.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +import json + +from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance +from ..exceptions import ( + BuildDbQueriesAttributeError, + BuildDbQueriesBadArgumentError, + BuildDbQueriesNoConfirmationProvidedError, +) +from ..models.xform import XForm + + +def build_db_queries(xform: XForm, request_data: dict) -> tuple[dict, dict]: + + """ + Gets instance ids based on the request payload. + Useful to narrow down set of instances for bulk actions + + Args: + xform (XForm) + request_data (dict) + + Returns: + tuple(, ): PostgreSQL filters, Mongo filters. + They are meant to be used respectively with Django Queryset + and PyMongo query. + + """ + + mongo_query = ParsedInstance.get_base_query( + xform.user.username, xform.id_string + ) + postgres_query = {'xform_id': xform.id} + instance_ids = None + # Remove empty values + payload = { + key_: value_ for key_, value_ in request_data.items() if value_ + } + ################################################### + # Submissions can be retrieve in 3 different ways # + ################################################### + # First of all, + # users cannot send `query` and `submission_ids` in POST/PATCH request + # + if all(key_ in payload for key_ in ('query', 'submission_ids')): + raise BuildDbQueriesBadArgumentError + + # First scenario / Get submissions based on user's query + try: + query = payload['query'] + except KeyError: + pass + else: + try: + query.update(mongo_query) # Overrides `_userform_id` if exists + except AttributeError: + raise BuildDbQueriesAttributeError + + query_kwargs = { + 'query': json.dumps(query), + 'fields': '["_id"]' + } + + cursor = ParsedInstance.query_mongo_no_paging(**query_kwargs) + instance_ids = [record.get('_id') for record in list(cursor)] + + # Second scenario / Get submissions based on list of ids + try: + submission_ids = payload['submission_ids'] + except KeyError: + pass + else: + try: + # Use int() to test if list of integers is valid. + instance_ids = [int(submission_id) + for submission_id in submission_ids] + except ValueError: + raise BuildDbQueriesAttributeError + + if instance_ids is not None: + # Narrow down queries with list of ids. + postgres_query.update({'id__in': instance_ids}) + mongo_query.update({'_id': {'$in': instance_ids}}) + elif payload.get('confirm', False) is not True: + # Third scenario / get all submissions in form, + # but confirmation param must be among payload + raise BuildDbQueriesNoConfirmationProvidedError + + return postgres_query, mongo_query diff --git a/kobo/apps/openrosa/apps/logger/utils/instance.py b/kobo/apps/openrosa/apps/logger/utils/instance.py new file mode 100644 index 0000000000..39bcf54bc1 --- /dev/null +++ b/kobo/apps/openrosa/apps/logger/utils/instance.py @@ -0,0 +1,69 @@ +import logging + +from django.db.models.signals import pre_delete, post_delete +from kobo.apps.openrosa.apps.logger.models.instance import ( + Instance, +) +from kobo.apps.openrosa.apps.logger.signals import ( + nullify_exports_time_of_last_submission, + update_xform_submission_count_delete, +) +from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance +from kobo.apps.openrosa.apps.viewer.signals import remove_from_mongo + + +from .database_query import build_db_queries +from ..models.xform import XForm + + +def delete_instances(xform: XForm, request_data: dict) -> int: + + deleted_records_count = 0 + postgres_query, mongo_query = build_db_queries(xform, request_data) + + # Disconnect signals to speed-up bulk deletion + pre_delete.disconnect(remove_from_mongo, sender=ParsedInstance) + post_delete.disconnect( + nullify_exports_time_of_last_submission, sender=Instance, + dispatch_uid='nullify_exports_time_of_last_submission', + ) + post_delete.disconnect( + update_xform_submission_count_delete, sender=Instance, + dispatch_uid='update_xform_submission_count_delete', + ) + + try: + # Delete Postgres & Mongo + all_count, results = Instance.objects.filter(**postgres_query).delete() + identifier = f'{Instance._meta.app_label}.Instance' + try: + deleted_records_count = results[identifier] + except KeyError: + # PostgreSQL did not delete any Instance objects. Keep going in case + # they are still present in MongoDB. + logging.warning('Instance objects cannot be found') + + ParsedInstance.bulk_delete(mongo_query) + + # Update xform like signals would do if it was as single object deletion + nullify_exports_time_of_last_submission(sender=Instance, instance=xform) + update_xform_submission_count_delete( + sender=Instance, + instance=xform, + value=deleted_records_count + ) + finally: + # Pre_delete signal needs to be re-enabled for parsed instance + pre_delete.connect(remove_from_mongo, sender=ParsedInstance) + post_delete.connect( + nullify_exports_time_of_last_submission, + sender=Instance, + dispatch_uid='nullify_exports_time_of_last_submission', + ) + post_delete.connect( + update_xform_submission_count_delete, + sender=Instance, + dispatch_uid='update_xform_submission_count_delete', + ) + + return deleted_records_count diff --git a/kobo/apps/openrosa/libs/utils/logger_tools.py b/kobo/apps/openrosa/libs/utils/logger_tools.py index 3d15d8d8b3..d04002dcca 100644 --- a/kobo/apps/openrosa/libs/utils/logger_tools.py +++ b/kobo/apps/openrosa/libs/utils/logger_tools.py @@ -7,6 +7,7 @@ import sys import traceback from datetime import date, datetime, timezone +from typing import Generator, Optional from xml.etree import ElementTree as ET from xml.parsers.expat import ExpatError try: @@ -17,6 +18,7 @@ from dict2xml import dict2xml from django.conf import settings from django.core.exceptions import ValidationError, PermissionDenied +from django.core.files.base import File from django.core.mail import mail_admins from django.db import IntegrityError, transaction from django.db.models import Q @@ -137,12 +139,12 @@ def check_edit_submission_permissions( @transaction.atomic # paranoia; redundant since `ATOMIC_REQUESTS` set to `True` def create_instance( username: str, - xml_file: str, - media_files: list['django.core.files.uploadedfile.UploadedFile'], + xml_file: File, + media_files: Generator[File], status: str = 'submitted_via_web', uuid: str = None, date_created_override: datetime = None, - request: 'rest_framework.request.Request' = None, + request: Optional['rest_framework.request.Request'] = None, ) -> Instance: """ Submission cases: @@ -524,7 +526,13 @@ def response_with_mimetype_and_name( return response -def safe_create_instance(username, xml_file, media_files, uuid, request): +def safe_create_instance( + username, + xml_file, + media_files, + uuid: Optional[str] = None, + request: Optional['rest_framework.request.Request'] = None, +): """Create an instance and catch exceptions. :returns: A list [error, instance] where error is None if there was no @@ -534,7 +542,8 @@ def safe_create_instance(username, xml_file, media_files, uuid, request): try: instance = create_instance( - username, xml_file, media_files, uuid=uuid, request=request) + username, xml_file, media_files, uuid=uuid, request=request + ) except InstanceInvalidUserError: error = OpenRosaResponseBadRequest(t("Username or ID required.")) except InstanceEmptyError: @@ -570,7 +579,7 @@ def safe_create_instance(username, xml_file, media_files, uuid, request): def save_attachments( instance: Instance, - media_files: list['django.core.files.uploadedfile.UploadedFile'], + media_files: Generator[File], defer_counting: bool = False, ) -> tuple[list[Attachment], list[Attachment]]: """ @@ -584,15 +593,19 @@ def save_attachments( which avoids locking any rows in `logger_xform` or `main_userprofile`. """ new_attachments = [] + for f in media_files: - attachment_filename = generate_attachment_filename(instance, f.name) + attachment_filename = generate_attachment_filename( + instance, os.path.basename(f.name) + ) existing_attachment = Attachment.objects.filter( instance=instance, media_file=attachment_filename, mimetype=f.content_type, ).first() - if existing_attachment and (existing_attachment.file_hash == - hash_attachment_contents(f.read())): + if existing_attachment and ( + existing_attachment.file_hash == hash_attachment_contents(f.read()) + ): # We already have this attachment! continue f.seek(0) @@ -616,7 +629,7 @@ def save_submission( request: 'rest_framework.request.Request', xform: XForm, xml: str, - media_files: list['django.core.files.uploadedfile.UploadedFile'], + media_files: Generator[File], new_uuid: str, status: str, date_created_override: datetime, diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index 5131d8ac8a..aa9686748b 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -208,16 +208,20 @@ def delete(self): self.asset._deployment_data.clear() # noqa @abc.abstractmethod - def delete_submission(self, submission_id: int, user: settings.AUTH_USER_MODEL) -> dict: + def delete_submission( + self, submission_id: int, user: settings.AUTH_USER_MODEL + ) -> dict: pass @abc.abstractmethod - def delete_submissions(self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs) -> dict: + def delete_submissions( + self, data: dict, user: settings.AUTH_USER_MODEL + ) -> dict: pass @abc.abstractmethod def duplicate_submission( - self, submission_id: int, user: settings.AUTH_USER_MODEL + self, submission_id: int, request: 'rest_framework.request.Request', ) -> dict: pass @@ -476,7 +480,6 @@ def store_data(self, values: dict): def stored_data_key(self): return self.__stored_data_key - @property @abc.abstractmethod def store_submission( self, user, xml_submission, submission_uuid, attachments=None @@ -665,7 +668,7 @@ def validate_access_with_partial_perms( perm: str, submission_ids: list = [], query: dict = {}, - ) -> list: + ) -> Optional[list]: """ Validate whether `user` is allowed to perform write actions on submissions with the permission `perm`. diff --git a/kpi/deployment_backends/kobocat_backend.py b/kpi/deployment_backends/kobocat_backend.py index b67355abc6..e31f04018f 100644 --- a/kpi/deployment_backends/kobocat_backend.py +++ b/kpi/deployment_backends/kobocat_backend.py @@ -338,7 +338,7 @@ def delete_submissions(self, data: dict, user: settings.AUTH_USER_MODEL) -> dict return drf_response def duplicate_submission( - self, submission_id: int, user: 'settings.AUTH_USER_MODEL' + self, submission_id: int, request: 'rest_framework.request.Request', ) -> dict: """ Duplicates a single submission proxied through KoBoCAT. The submission @@ -350,7 +350,7 @@ def duplicate_submission( submission if successful """ - + user = request.user self.validate_access_with_partial_perms( user=user, perm=PERM_CHANGE_SUBMISSIONS, @@ -404,7 +404,7 @@ def duplicate_submission( def edit_submission( self, xml_submission_file: File, - user: settings.AUTH_USER_MODEL, + request: 'rest_framework.request.Request', attachments: dict = None, ): """ @@ -413,6 +413,7 @@ def edit_submission( The returned Response should be in XML (expected format by Enketo Express) """ + user = request.user submission_xml = xml_submission_file.read() try: xml_root = fromstring_preserve_root_xmlns(submission_xml) @@ -465,9 +466,10 @@ def edit_submission( method='POST', url=self.submission_url, files=files ) kc_response = self.__kobocat_proxy_request(kc_request, user) - return self.__prepare_as_drf_response_signature( + prepared_response = self.__prepare_as_drf_response_signature( kc_response, expected_response_format='xml' ) + return prepared_response @property def enketo_id(self): diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 0f36ac2bf3..3a292cfa91 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -217,11 +217,11 @@ def delete_submissions( } def duplicate_submission( - self, submission_id: int, user: settings.AUTH_USER_MODEL + self, submission_id: int, request: 'rest_framework.request.Request', ) -> dict: # TODO: Make this operate on XML somehow and reuse code from # KobocatDeploymentBackend, to catch issues like #3054 - + user = request.user self.validate_access_with_partial_perms( user=user, perm=PERM_CHANGE_SUBMISSIONS, diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 6bc8eda1aa..b38c5987bb 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -1,8 +1,5 @@ from __future__ import annotations -import io -import json -import re from collections import defaultdict from contextlib import contextmanager from datetime import date, datetime @@ -25,7 +22,6 @@ from django.utils import timezone from django.utils.translation import gettext_lazy as t from django_redis import get_redis_connection -from kobo_service_account.utils import get_request_headers from rest_framework import status from kobo.apps.openrosa.apps.logger.models import ( @@ -36,7 +32,8 @@ XForm, ) from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile -from kobo.apps.openrosa.libs.utils.logger_tools import publish_xls_form +from kobo.apps.openrosa.apps.logger.utils.instance import delete_instances +from kobo.apps.openrosa.libs.utils.logger_tools import safe_create_instance, publish_xls_form from kobo.apps.subsequences.utils import stream_with_extras from kobo.apps.trackers.models import NLPUsageCounter from kpi.constants import ( @@ -49,13 +46,9 @@ PERM_VALIDATE_SUBMISSIONS, PERM_VIEW_SUBMISSIONS, ) -from kpi.deployment_backends.kc_access.storage import ( - default_kobocat_storage as default_storage, -) from kpi.exceptions import ( AttachmentNotFoundException, InvalidXFormException, - KobocatCommunicationError, SubmissionIntegrityError, SubmissionNotFoundException, XPathNotFoundException, @@ -65,10 +58,10 @@ from kpi.models.object_permission import ObjectPermission from kpi.models.paired_data import PairedData from kpi.utils.django_orm_helper import UpdateJSONFieldAttributes +from kpi.utils.files import ExtendedContentFile from kpi.utils.log import logging from kpi.utils.mongo_helper import MongoHelper from kpi.utils.object_permission import get_database_user -from kpi.utils.permissions import is_user_anonymous from kpi.utils.xml import fromstring_preserve_root_xmlns, xml_tostring from .base_backend import BaseDeploymentBackend from .kc_access.utils import ( @@ -78,8 +71,6 @@ ) from ..exceptions import ( BadFormatException, - KobocatDeploymentException, - KobocatDuplicateSubmissionException, ) @@ -208,7 +199,6 @@ def delete(self): super().delete() - # FIXME def delete_submission( self, submission_id: int, user: settings.AUTH_USER_MODEL ) -> dict: @@ -226,12 +216,13 @@ def delete_submission( Instance.objects.filter(pk=submission_id).delete() - # FIXME - return 1/0 + return { + 'content_type': 'application/json', + 'status': status.HTTP_204_NO_CONTENT, + } - # FIXME def delete_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs + self, data: dict, user: settings.AUTH_USER_MODEL ) -> dict: """ Bulk delete provided submissions. @@ -243,7 +234,6 @@ def delete_submissions( or {"query": {"Question": "response"} """ - submission_ids = self.validate_access_with_partial_perms( user=user, perm=PERM_DELETE_SUBMISSIONS, @@ -259,16 +249,16 @@ def delete_submissions( data.pop('query', None) data['submission_ids'] = submission_ids - kc_url = self.submission_list_url - kc_request = requests.Request(method='DELETE', url=kc_url, json=data) - kc_response = self.__kobocat_proxy_request(kc_request, user) + deleted_count = delete_instances(self.xform, data) - drf_response = self.__prepare_as_drf_response_signature(kc_response) - return drf_response + return { + 'data': {'detail': f'{deleted_count} submissions have been deleted'}, + 'content_type': 'application/json', + 'status': status.HTTP_200_OK, + } - # FIXME def duplicate_submission( - self, submission_id: int, user: 'settings.AUTH_USER_MODEL' + self, submission_id: int, request: 'rest_framework.request.Request', ) -> dict: """ Duplicates a single submission proxied through KoBoCAT. The submission @@ -280,7 +270,7 @@ def duplicate_submission( submission if successful """ - + user = request.user self.validate_access_with_partial_perms( user=user, perm=PERM_CHANGE_SUBMISSIONS, @@ -294,14 +284,14 @@ def duplicate_submission( ) # Get attachments for the duplicated submission if there are any - attachment_objects = Attachment.objects.filter( + attachments = [] + if attachment_objects := Attachment.objects.filter( instance_id=submission_id - ) - attachments = ( - {a.media_file_basename: a.media_file for a in attachment_objects} - if attachment_objects - else None - ) + ): + attachments = ( + ExtendedContentFile(a.media_file.read(), name=a.media_file_basename) + for a in attachment_objects + ) # parse XML string to ET object xml_parsed = fromstring_preserve_root_xmlns(submission) @@ -323,21 +313,22 @@ def duplicate_submission( uuid_formatted ) - # - - kc_response = self.store_submission( - user, xml_tostring(xml_parsed), _uuid, attachments + safe_create_instance( + username=user.username, + xml_file=ContentFile(xml_tostring(xml_parsed)), + media_files=attachments, + uuid=_uuid, + request=request, + ) + return self._rewrite_json_attachment_urls( + next(self.get_submissions(user, query={'_uuid': _uuid})), request ) - if kc_response.status_code == status.HTTP_201_CREATED: - return next(self.get_submissions(user, query={'_uuid': _uuid})) - else: - raise KobocatDuplicateSubmissionException # FIXME def edit_submission( self, xml_submission_file: File, - user: settings.AUTH_USER_MODEL, + request: 'rest_framework.request.Request', attachments: dict = None, ): """ @@ -346,6 +337,8 @@ def edit_submission( The returned Response should be in XML (expected format by Enketo Express) """ + user = request.user + submission_xml = xml_submission_file.read() try: xml_root = fromstring_preserve_root_xmlns(submission_xml) @@ -364,6 +357,7 @@ def edit_submission( ) # Remove UUID prefix deprecated_uuid = deprecated_uuid[len('uuid:'):] + try: instance = Instance.objects.get( uuid=deprecated_uuid, @@ -388,20 +382,20 @@ def edit_submission( # Set the In-Memory file’s current position to 0 before passing it to # Request. xml_submission_file.seek(0) - files = {'xml_submission_file': xml_submission_file} - # Combine all files altogether - if attachments: - files.update(attachments) - - kc_request = requests.Request( - method='POST', url=self.submission_url, files=files - ) - kc_response = self.__kobocat_proxy_request(kc_request, user) - return self.__prepare_as_drf_response_signature( - kc_response, expected_response_format='xml' + safe_create_instance( + username=user.username, + xml_file=xml_submission_file, + media_files=attachments if attachments else [], + request=request, ) + return { + 'headers': {}, + 'content_type': 'text/xml; charset=utf-8', + 'status': status.HTTP_201_CREATED, + } + @property def enketo_id(self): if not (enketo_id := self.get_data('enketo_id')): @@ -709,7 +703,7 @@ def get_submissions( self, user: settings.AUTH_USER_MODEL, format_type: str = SUBMISSION_FORMAT_TYPE_JSON, - submission_ids: list = list, + submission_ids: list = None, request: Optional['rest_framework.request.Request'] = None, **mongo_query_params ) -> Union[Generator[dict, None, None], list]: @@ -734,7 +728,9 @@ def get_submissions( See `BaseDeploymentBackend._rewrite_json_attachment_urls()` """ - mongo_query_params['submission_ids'] = submission_ids + mongo_query_params['submission_ids'] = ( + submission_ids if submission_ids else [] + ) params = self.validate_submission_list_params( user, format_type=format_type, **mongo_query_params ) @@ -825,7 +821,6 @@ def redeploy(self, active=None): publish_xls_form(xlsx_file, self.asset.owner, self.xform.id_string) - # Do not call save it, asset (and its deployment) is saved right # after calling this method in `DeployableMixin.deploy()` self.store_data( @@ -1104,19 +1099,11 @@ def set_validation_statuses( kc_response = self.__kobocat_proxy_request(kc_request, user) return self.__prepare_as_drf_response_signature(kc_response) - # DEPRECATED + # @Todo DEPRECATED - to be removed def store_submission( self, user, xml_submission, submission_uuid, attachments=None ): - file_tuple = (submission_uuid, io.StringIO(xml_submission)) - files = {'xml_submission_file': file_tuple} - if attachments: - files.update(attachments) - kc_request = requests.Request( - method='POST', url=self.submission_url, files=files - ) - kc_response = self.__kobocat_proxy_request(kc_request, user=user) - return kc_response + pass @property def submission_count(self): diff --git a/kpi/utils/files.py b/kpi/utils/files.py new file mode 100644 index 0000000000..adb911ee1e --- /dev/null +++ b/kpi/utils/files.py @@ -0,0 +1,12 @@ +import os +from mimetypes import guess_type + +from django.core.files.base import ContentFile + + +class ExtendedContentFile(ContentFile): + + @property + def content_type(self): + mimetype, _ = guess_type(os.path.basename(self.name)) + return mimetype diff --git a/kpi/views/v2/asset_snapshot.py b/kpi/views/v2/asset_snapshot.py index d3036c7031..1edf0ca550 100644 --- a/kpi/views/v2/asset_snapshot.py +++ b/kpi/views/v2/asset_snapshot.py @@ -242,7 +242,7 @@ def submission(self, request, *args, **kwargs): try: xml_response = asset_snapshot.asset.deployment.edit_submission( - xml_submission_file, request.user, attachments + xml_submission_file, request, attachments ) except SubmissionIntegrityError as e: raise serializers.ValidationError(str(e)) diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 918f7d2281..75adf4cdd2 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -56,8 +56,9 @@ from kpi.serializers.v2.data import DataBulkActionsValidator -class DataViewSet(AssetNestedObjectViewsetMixin, NestedViewSetMixin, - viewsets.GenericViewSet): +class DataViewSet( + AssetNestedObjectViewsetMixin, NestedViewSetMixin, viewsets.GenericViewSet +): """ ## List of submissions for a specific asset @@ -463,10 +464,12 @@ def list(self, request, *args, **kwargs): ) try: - submissions = deployment.get_submissions(request.user, - format_type=format_type, - request=request, - **filters) + submissions = deployment.get_submissions( + request.user, + format_type=format_type, + request=request, + **filters + ) except OperationFailure as err: message = str(err) # Don't show just any raw exception message out of fear of data leaking @@ -549,7 +552,7 @@ def duplicate(self, request, pk, *args, **kwargs): # Coerce to int because back end only finds matches with same type submission_id = positive_int(pk) duplicate_response = deployment.duplicate_submission( - submission_id=submission_id, user=request.user + submission_id=submission_id, request=request ) return Response(duplicate_response, status=status.HTTP_201_CREATED) From 23b9178817414cfdc2a5ecb54b31d7ecd45778b4 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 23 Jul 2024 13:21:15 -0400 Subject: [PATCH 008/119] WIP openrosa backend - validation status --- kobo/apps/openrosa/apps/api/tools.py | 52 +------- .../apps/api/viewsets/data_viewset.py | 8 +- .../apps/openrosa/apps/logger/models/xform.py | 19 --- .../openrosa/apps/logger/utils/instance.py | 50 +++++++- kobo/settings/base.py | 22 +--- kpi/deployment_backends/openrosa_backend.py | 117 ++++++++++++++---- 6 files changed, 152 insertions(+), 116 deletions(-) diff --git a/kobo/apps/openrosa/apps/api/tools.py b/kobo/apps/openrosa/apps/api/tools.py index 209b3fabef..4f93cc290b 100644 --- a/kobo/apps/openrosa/apps/api/tools.py +++ b/kobo/apps/openrosa/apps/api/tools.py @@ -15,7 +15,7 @@ HttpResponseRedirect, ) from django.utils.translation import gettext as t -from kobo_service_account.utils import get_real_user, get_request_headers +from kobo_service_account.utils import get_request_headers from rest_framework import exceptions from rest_framework.request import Request from taggit.forms import TagField @@ -136,56 +136,6 @@ class TagForm(forms.Form): instance.save() -def add_validation_status_to_instance( - request: Request, instance: 'Instance' -) -> bool: - """ - Save instance validation status if it is valid. - To be valid, it has to belong to XForm validation statuses - """ - validation_status_uid = request.data.get('validation_status.uid') - success = False - - # Payload must contain validation_status property. - if validation_status_uid: - real_user = get_real_user(request) - validation_status = get_validation_status( - validation_status_uid, instance.xform, real_user.username - ) - if validation_status: - instance.validation_status = validation_status - instance.save() - success = instance.parsed_instance.update_mongo(asynchronous=False) - - return success - - -def get_validation_status(validation_status_uid, asset, username): - # Validate validation_status value It must belong to asset statuses. - available_statuses = {status.get("uid"): status - for status in asset.settings.get("validation_statuses")} - - validation_status = {} - - if validation_status_uid in available_statuses.keys(): - available_status = available_statuses.get(validation_status_uid) - validation_status = { - "timestamp": int(time.time()), - "uid": validation_status_uid, - "by_whom": username, - "color": available_status.get("color"), - "label": available_status.get("label") - } - - return validation_status - - -def remove_validation_status_from_instance(instance): - instance.validation_status = {} - instance.save() - return instance.parsed_instance.update_mongo(asynchronous=False) - - def get_media_file_response( metadata: MetaData, request: Request = None ) -> HttpResponse: diff --git a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py index 79a85a5fa6..c8e1484370 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py @@ -25,6 +25,8 @@ ) from kobo.apps.openrosa.apps.api.tools import ( add_tags_to_instance, +) +from kobo.apps.openrosa.apps.logger.utils.instance import ( add_validation_status_to_instance, get_validation_status, remove_validation_status_from_instance, @@ -562,9 +564,13 @@ def validation_status(self, request, *args, **kwargs): data = {} if request.method != 'GET': + username = get_real_user(request).username + validation_status_uid = request.data.get('validation_status.uid') if ( request.method == 'PATCH' - and not add_validation_status_to_instance(request, instance) + and not add_validation_status_to_instance( + username, validation_status_uid, instance + ) ): http_status = status.HTTP_400_BAD_REQUEST elif request.method == 'DELETE': diff --git a/kobo/apps/openrosa/apps/logger/models/xform.py b/kobo/apps/openrosa/apps/logger/models/xform.py index c0c68ab6a2..c7fd168a59 100644 --- a/kobo/apps/openrosa/apps/logger/models/xform.py +++ b/kobo/apps/openrosa/apps/logger/models/xform.py @@ -302,25 +302,6 @@ def _xls_file_io(self): else: return BytesIO(ff.read()) - @property - def settings(self): - """ - Mimic Asset settings. - :return: Object - """ - # As soon as we need to add custom validation statuses in Asset settings, - # validation in add_validation_status_to_instance - # (kobocat/kobo.apps.openrosa/apps/api/tools.py) should still work - default_validation_statuses = getattr(settings, "DEFAULT_VALIDATION_STATUSES", []) - - # Later purpose, default_validation_statuses could be merged with a custom validation statuses dict - # for example: - # self._validation_statuses.update(default_validation_statuses) - - return { - "validation_statuses": default_validation_statuses - } - @property def xml_with_disclaimer(self): return XMLFormWithDisclaimer(self).get_object().xml diff --git a/kobo/apps/openrosa/apps/logger/utils/instance.py b/kobo/apps/openrosa/apps/logger/utils/instance.py index 39bcf54bc1..a1de6bebc7 100644 --- a/kobo/apps/openrosa/apps/logger/utils/instance.py +++ b/kobo/apps/openrosa/apps/logger/utils/instance.py @@ -1,9 +1,9 @@ import logging +import time +from django.conf import settings from django.db.models.signals import pre_delete, post_delete -from kobo.apps.openrosa.apps.logger.models.instance import ( - Instance, -) + from kobo.apps.openrosa.apps.logger.signals import ( nullify_exports_time_of_last_submission, update_xform_submission_count_delete, @@ -14,6 +14,30 @@ from .database_query import build_db_queries from ..models.xform import XForm +from ..models.instance import Instance + + +def add_validation_status_to_instance( + username: str, validation_status_uid: str, instance: Instance +) -> bool: + """ + Save instance validation status if it is valid. + To be valid, it has to belong to XForm validation statuses + """ + success = False + + # Payload must contain validation_status property. + if validation_status_uid: + + validation_status = get_validation_status( + validation_status_uid, username + ) + if validation_status: + instance.validation_status = validation_status + instance.save(update_fields=['validation_status']) + success = instance.parsed_instance.update_mongo(asynchronous=False) + + return success def delete_instances(xform: XForm, request_data: dict) -> int: @@ -67,3 +91,23 @@ def delete_instances(xform: XForm, request_data: dict) -> int: ) return deleted_records_count + + +def get_validation_status(validation_status_uid: str, username: str) -> dict: + try: + label = settings.DEFAULT_VALIDATION_STATUSES[validation_status_uid] + except KeyError: + return {} + + return { + 'timestamp': int(time.time()), + 'uid': validation_status_uid, + 'by_whom': username, + 'label': label, + } + + +def remove_validation_status_from_instance(instance: Instance) -> bool: + instance.validation_status = {} + instance.save(update_fields=['validation_status']) + return instance.parsed_instance.update_mongo(asynchronous=False) diff --git a/kobo/settings/base.py b/kobo/settings/base.py index 4e72e62a72..855b80a50f 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -1693,23 +1693,11 @@ def dj_stripe_request_callback_method(): os.environ.get('SUPPORT_BRIEFCASE_SUBMISSION_DATE') != 'True' ) -DEFAULT_VALIDATION_STATUSES = [ - { - 'uid': 'validation_status_not_approved', - 'color': '#ff0000', - 'label': 'Not Approved' - }, - { - 'uid': 'validation_status_approved', - 'color': '#00ff00', - 'label': 'Approved' - }, - { - 'uid': 'validation_status_on_hold', - 'color': '#0000ff', - 'label': 'On Hold' - }, -] +DEFAULT_VALIDATION_STATUSES = { + 'validation_status_not_approved': 'Not Approved', + 'validation_status_approved': 'Approved', + 'validation_status_on_hold': 'On Hold', +} THUMB_CONF = { 'large': 1280, diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index b38c5987bb..c2f2533b30 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -31,6 +31,10 @@ Instance, XForm, ) +from kobo.apps.openrosa.apps.logger.utils.instance import ( + add_validation_status_to_instance, + remove_validation_status_from_instance, +) from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile from kobo.apps.openrosa.apps.logger.utils.instance import delete_instances from kobo.apps.openrosa.libs.utils.logger_tools import safe_create_instance, publish_xls_form @@ -324,7 +328,6 @@ def duplicate_submission( next(self.get_submissions(user, query={'_uuid': _uuid})), request ) - # FIXME def edit_submission( self, xml_submission_file: File, @@ -687,12 +690,10 @@ def get_orphan_postgres_submissions(self) -> Optional[QuerySet, bool]: except InvalidXFormException: return None - # FIXME Where this method is needed def get_submission_detail_url(self, submission_id: int) -> str: url = f'{self.submission_list_url}/{submission_id}' return url - # FIXME where this method is needed def get_submission_validation_status_url(self, submission_id: int) -> str: url = '{detail_url}/validation_status'.format( detail_url=self.get_submission_detail_url(submission_id) @@ -745,15 +746,29 @@ def get_submissions( ) return submissions - # FIXME def get_validation_status( self, submission_id: int, user: settings.AUTH_USER_MODEL ) -> dict: - url = self.get_submission_validation_status_url(submission_id) - kc_request = requests.Request(method='GET', url=url) - kc_response = self.__kobocat_proxy_request(kc_request, user) + submission = self.get_submission( + submission_id, user, fields=['_validation_status'] + ) - return self.__prepare_as_drf_response_signature(kc_response) + # TODO simplify response when KobocatDeploymentBackend + # and MockDeploymentBackend are gone + if not submission: + return { + 'content_type': 'application/json', + 'status': status.HTTP_404_NOT_FOUND, + 'data': { + 'detail': f'No submission found with ID: {submission_id}' + } + } + + return { + 'data': submission['_validation_status'], + 'content_type': 'application/json', + 'status': status.HTTP_200_OK, + } @property def mongo_userform_id(self): @@ -1039,8 +1054,7 @@ def set_validation_status( method: str, ) -> dict: """ - Update validation status through KoBoCAT proxy, - authenticated by `user`'s API token. + Update validation status. If `method` is `DELETE`, the status is reset to `None` It returns a dictionary which can used as Response object arguments @@ -1052,24 +1066,59 @@ def set_validation_status( submission_ids=[submission_id], ) - kc_request_params = { - 'method': method, - 'url': self.get_submission_validation_status_url(submission_id), - } + # TODO simplify response when KobocatDeploymentBackend + # and MockDeploymentBackend are gone + try: + instance = Instance.objects.only( + 'validation_status', 'date_modified' + ).get(pk=submission_id) + except Instance.DoesNotExist: + return { + 'content_type': 'application/json', + 'status': status.HTTP_404_NOT_FOUND, + 'data': { + 'detail': f'No submission found with ID: {submission_id}' + } + } - if method == 'PATCH': - kc_request_params.update({'json': data}) + if method == 'DELETE': + if remove_validation_status_from_instance(instance): + return { + 'content_type': 'application/json', + 'status': status.HTTP_204_NO_CONTENT, + } + else: + return { + 'content_type': 'application/json', + 'status': status.HTTP_500_INTERNAL_SERVER_ERROR, + 'data': { + 'detail': 'Could not update MongoDB' + } + } - kc_request = requests.Request(**kc_request_params) - kc_response = self.__kobocat_proxy_request(kc_request, user) - return self.__prepare_as_drf_response_signature(kc_response) + validation_status_uid = data.get('validation_status.uid') + + if not add_validation_status_to_instance( + user.username, validation_status_uid, instance + ): + return { + 'content_type': 'application/json', + 'status': status.HTTP_400_BAD_REQUEST, + 'data': { + 'detail': f'Invalid validation status: `{validation_status_uid}`' + } + } + return { + 'data': instance.validation_status, + 'content_type': 'application/json', + 'status': status.HTTP_200_OK, + } def set_validation_statuses( self, user: settings.AUTH_USER_MODEL, data: dict ) -> dict: """ - Bulk update validation status for provided submissions through - KoBoCAT proxy, authenticated by `user`'s API token. + Bulk update validation status. `data` should contain either the submission ids or the query to retrieve the subset of submissions chosen by then user. @@ -1086,20 +1135,38 @@ def set_validation_statuses( ) # If `submission_ids` is not empty, user has partial permissions. - # Otherwise, they have have full access. + # Otherwise, they have full access. + if submission_ids: + # Remove query from `data` because all the submission ids have been + # already retrieved + data.pop('query', None) + data['submission_ids'] = submission_ids + + + + # If `submission_ids` is not empty, user has partial permissions. + # Otherwise, they have full access. if submission_ids: # Remove query from `data` because all the submission ids have been # already retrieved data.pop('query', None) data['submission_ids'] = submission_ids + deleted_count = delete_instances(self.xform, data) + + return { + 'data': {'detail': f'{deleted_count} submissions have been deleted'}, + 'content_type': 'application/json', + 'status': status.HTTP_200_OK, + } + # `PATCH` KC even if KPI receives `DELETE` url = self.submission_list_url kc_request = requests.Request(method='PATCH', url=url, json=data) kc_response = self.__kobocat_proxy_request(kc_request, user) return self.__prepare_as_drf_response_signature(kc_response) - # @Todo DEPRECATED - to be removed + # @Todo DEPRECATED - to be removed when KobocatDeploymentBackend is gone def store_submission( self, user, xml_submission, submission_uuid, attachments=None ): @@ -1334,11 +1401,11 @@ def transfer_counters_ownership(self, new_owner: 'kobo_auth.User'): UserProfile.objects.filter(user_id=self.asset.owner.pk).update( attachment_storage_bytes=F('attachment_storage_bytes') - - self.xform.attachment_storage_bytes + - self.xform.attachment_storage_bytes ) UserProfile.objects.filter(user_id=self.asset.owner.pk).update( attachment_storage_bytes=F('attachment_storage_bytes') - + self.xform.attachment_storage_bytes + + self.xform.attachment_storage_bytes ) def _delete_openrosa_metadata( From 3ffc222a7dab300f24d2656b15777c56e0429ae8 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 23 Jul 2024 14:55:18 -0400 Subject: [PATCH 009/119] Add missing env var to GitHub CI --- .github/workflows/pytest.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 5bc93e6559..57d0e699c8 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -15,11 +15,15 @@ jobs: DJANGO_LANGUAGE_CODES: "ar cs de-DE en es fr hi ku pl pt tr zh-hans" DJANGO_SECRET_KEY: notSecretJustForTestingYep DJANGO_SETTINGS_MODULE: kobo.settings.testing - REDIS_SESSION_URL: redis://localhost:6379 - SERVICE_ACCOUNT_BACKEND_URL: redis://localhost:6379/4 + REDIS_SESSION_URL: redis://localhost:6379/2 + SERVICE_ACCOUNT_BACKEND_URL: redis://localhost:6379/6 CACHE_URL: redis://localhost:6379/3 ENKETO_REDIS_MAIN_URL: redis://localhost:6379/0 + CELERY_BROKER_URL: redis://localhost:6379/1 KOBOCAT_MEDIA_ROOT: /tmp/test_media + KOBOCAT_URL: http://kobocat + KOBOCAT_INTERNAL_URL: http://kobocat + KOBOFORM_URL: http://kpi strategy: matrix: python-version: ['3.8', '3.10'] From aa0b0a2668c26e5f9e2e4f811a34c579fd329112 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 30 Jul 2024 09:29:18 -0400 Subject: [PATCH 010/119] Support bulk validation status update and edit submission --- .../apps/api/viewsets/data_viewset.py | 54 +++++++++---------- kobo/apps/openrosa/apps/logger/exceptions.py | 4 ++ .../openrosa/apps/logger/utils/__init__.py | 1 + .../openrosa/apps/logger/utils/instance.py | 27 ++++++++++ kpi/deployment_backends/openrosa_backend.py | 48 ++++++++--------- kpi/views/v2/asset_snapshot.py | 6 ++- 6 files changed, 83 insertions(+), 57 deletions(-) diff --git a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py index c8e1484370..46ef0320e1 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py @@ -1,10 +1,6 @@ -# coding: utf-8 -import logging -import json from typing import Union from django.db.models import Q -from django.db.models.signals import pre_delete, post_delete from django.http import Http404 from django.shortcuts import get_object_or_404 from django.utils.translation import gettext as t @@ -28,23 +24,20 @@ ) from kobo.apps.openrosa.apps.logger.utils.instance import ( add_validation_status_to_instance, - get_validation_status, + delete_instances, remove_validation_status_from_instance, + set_instance_validation_statuses, ) from kobo.apps.openrosa.apps.logger.exceptions import ( BuildDbQueriesAttributeError, BuildDbQueriesBadArgumentError, BuildDbQueriesNoConfirmationProvidedError, + MissingValidationStatusPayloadError, ) from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.apps.logger.models.instance import ( Instance, ) -from kobo.apps.openrosa.apps.logger.utils import ( - build_db_queries, - delete_instances, -) -from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance from kobo.apps.openrosa.libs.renderers import renderers from kobo.apps.openrosa.libs.mixins.anonymous_user_public_forms_mixin import ( AnonymousUserPublicFormsMixin, @@ -451,33 +444,36 @@ def bulk_delete(self, request, *args, **kwargs): def bulk_validation_status(self, request, *args, **kwargs): xform = self.get_object() + real_user = get_real_user(request) try: - new_validation_status_uid = request.data['validation_status.uid'] - except KeyError: + updated_records_count = set_instance_validation_statuses( + xform, request.data, real_user.username + ) + except BuildDbQueriesBadArgumentError: + raise ValidationError({ + 'payload': t("`query` and `instance_ids` can't be used together") + }) + except BuildDbQueriesAttributeError: + raise ValidationError( + {'payload': t('Invalid `query` or `submission_ids` params')} + ) + except BuildDbQueriesNoConfirmationProvidedError: + raise NoConfirmationProvidedAPIException() + except MissingValidationStatusPayloadError: raise ValidationError({ 'payload': t('No `validation_status.uid` provided') }) - # Create new validation_status object - real_user = get_real_user(request) - new_validation_status = get_validation_status( - new_validation_status_uid, xform, real_user.username + return Response( + { + 'detail': t('{} submissions have been updated').format( + updated_records_count + ) + }, + status.HTTP_200_OK, ) - postgres_query, mongo_query = build_db_queries(xform, request.data) - - # Update Postgres & Mongo - updated_records_count = Instance.objects.filter( - **postgres_query - ).update(validation_status=new_validation_status) - ParsedInstance.bulk_update_validation_statuses(mongo_query, - new_validation_status) - return Response({ - 'detail': t('{} submissions have been updated').format( - updated_records_count) - }, status.HTTP_200_OK) - def get_serializer_class(self): pk_lookup, dataid_lookup = self.lookup_fields pk = self.kwargs.get(pk_lookup) diff --git a/kobo/apps/openrosa/apps/logger/exceptions.py b/kobo/apps/openrosa/apps/logger/exceptions.py index 79db6172fe..d8b85c5c94 100644 --- a/kobo/apps/openrosa/apps/logger/exceptions.py +++ b/kobo/apps/openrosa/apps/logger/exceptions.py @@ -18,5 +18,9 @@ class FormInactiveError(Exception): pass +class MissingValidationStatusPayloadError(Exception): + pass + + class TemporarilyUnavailableError(Exception): pass diff --git a/kobo/apps/openrosa/apps/logger/utils/__init__.py b/kobo/apps/openrosa/apps/logger/utils/__init__.py index 40c609529a..54d1dd56ae 100644 --- a/kobo/apps/openrosa/apps/logger/utils/__init__.py +++ b/kobo/apps/openrosa/apps/logger/utils/__init__.py @@ -1,3 +1,4 @@ from .counters import delete_null_user_daily_counters from .database_query import build_db_queries from .instance import delete_instances +from .instance import set_instance_validation_statuses diff --git a/kobo/apps/openrosa/apps/logger/utils/instance.py b/kobo/apps/openrosa/apps/logger/utils/instance.py index a1de6bebc7..f54d87b0d4 100644 --- a/kobo/apps/openrosa/apps/logger/utils/instance.py +++ b/kobo/apps/openrosa/apps/logger/utils/instance.py @@ -13,6 +13,7 @@ from .database_query import build_db_queries +from ..exceptions import MissingValidationStatusPayloadError from ..models.xform import XForm from ..models.instance import Instance @@ -111,3 +112,29 @@ def remove_validation_status_from_instance(instance: Instance) -> bool: instance.validation_status = {} instance.save(update_fields=['validation_status']) return instance.parsed_instance.update_mongo(asynchronous=False) + + +def set_instance_validation_statuses( + xform: XForm, request_data: dict, request_username: str +) -> int: + + try: + new_validation_status_uid = request_data['validation_status.uid'] + except KeyError: + raise MissingValidationStatusPayloadError + + # Create new validation_status object + new_validation_status = get_validation_status( + new_validation_status_uid, request_username + ) + + postgres_query, mongo_query = build_db_queries(xform, request_data) + + # Update Postgres & Mongo + updated_records_count = Instance.objects.filter( + **postgres_query + ).update(validation_status=new_validation_status) + ParsedInstance.bulk_update_validation_statuses( + mongo_query, new_validation_status + ) + return updated_records_count diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index c2f2533b30..416625a3df 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -24,6 +24,7 @@ from django_redis import get_redis_connection from rest_framework import status +from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile from kobo.apps.openrosa.apps.logger.models import ( Attachment, DailyXFormSubmissionCounter, @@ -33,10 +34,10 @@ ) from kobo.apps.openrosa.apps.logger.utils.instance import ( add_validation_status_to_instance, + delete_instances, remove_validation_status_from_instance, + set_instance_validation_statuses, ) -from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile -from kobo.apps.openrosa.apps.logger.utils.instance import delete_instances from kobo.apps.openrosa.libs.utils.logger_tools import safe_create_instance, publish_xls_form from kobo.apps.subsequences.utils import stream_with_extras from kobo.apps.trackers.models import NLPUsageCounter @@ -253,6 +254,7 @@ def delete_submissions( data.pop('query', None) data['submission_ids'] = submission_ids + # TODO handle errors deleted_count = delete_instances(self.xform, data) return { @@ -386,10 +388,17 @@ def edit_submission( # Request. xml_submission_file.seek(0) + # Retrieve only File objects to pass to `safe_create_instance` + # TODO remove those files as soon as the view sends request.FILES directly + # See TODO in kpi/views/v2/asset_snapshot.py::submission + media_files = ( + media_file for media_file in attachments.values() + ) + safe_create_instance( username=user.username, xml_file=xml_submission_file, - media_files=attachments if attachments else [], + media_files=media_files, request=request, ) @@ -948,11 +957,11 @@ def prepare_bulk_update_response(kc_responses: list) -> dict: ) return { - 'status': status.HTTP_200_OK - if total_successes > 0 - # FIXME: If KoboCAT returns something unexpected, like a 404 or a - # 500, then 400 is not the right response to send to the client - else status.HTTP_400_BAD_REQUEST, + 'status': ( + status.HTTP_200_OK + if total_successes > 0 + else status.HTTP_400_BAD_REQUEST + ), 'data': { 'count': total_update_attempts, 'successes': total_successes, @@ -1142,30 +1151,17 @@ def set_validation_statuses( data.pop('query', None) data['submission_ids'] = submission_ids - - - # If `submission_ids` is not empty, user has partial permissions. - # Otherwise, they have full access. - if submission_ids: - # Remove query from `data` because all the submission ids have been - # already retrieved - data.pop('query', None) - data['submission_ids'] = submission_ids - - deleted_count = delete_instances(self.xform, data) + # TODO handle errors + update_instances = set_instance_validation_statuses( + self.xform, data, user + ) return { - 'data': {'detail': f'{deleted_count} submissions have been deleted'}, + 'data': {'detail': f'{update_instances} submissions have been updated'}, 'content_type': 'application/json', 'status': status.HTTP_200_OK, } - # `PATCH` KC even if KPI receives `DELETE` - url = self.submission_list_url - kc_request = requests.Request(method='PATCH', url=url, json=data) - kc_response = self.__kobocat_proxy_request(kc_request, user) - return self.__prepare_as_drf_response_signature(kc_response) - # @Todo DEPRECATED - to be removed when KobocatDeploymentBackend is gone def store_submission( self, user, xml_submission, submission_uuid, attachments=None diff --git a/kpi/views/v2/asset_snapshot.py b/kpi/views/v2/asset_snapshot.py index 1edf0ca550..30f942871d 100644 --- a/kpi/views/v2/asset_snapshot.py +++ b/kpi/views/v2/asset_snapshot.py @@ -232,11 +232,13 @@ def submission(self, request, *args, **kwargs): # Prepare attachments even if all files are present in `request.FILES` # (i.e.: submission XML and attachments) - attachments = None + attachments = {} # Remove 'xml_submission_file' since it is already handled request.FILES.pop('xml_submission_file') + + # TODO pass request.FILES to `edit_submission()` directly when + # KobocatBackendDeployment is gone if len(request.FILES): - attachments = {} for name, attachment in request.FILES.items(): attachments[name] = attachment From d8e7fb8d74f837867dfe5ebf75c5a82a9b8e2d10 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 30 Jul 2024 12:14:36 -0400 Subject: [PATCH 011/119] dummy commit to create first PR --- kpi/views/v2/asset_snapshot.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/kpi/views/v2/asset_snapshot.py b/kpi/views/v2/asset_snapshot.py index d3036c7031..e0b328795c 100644 --- a/kpi/views/v2/asset_snapshot.py +++ b/kpi/views/v2/asset_snapshot.py @@ -1,20 +1,14 @@ -# coding: utf-8 -import re import copy -from xml.dom import Node from typing import Optional import requests -from defusedxml import minidom from django.conf import settings -from django.db.models import Q, F from django.http import HttpResponseRedirect, Http404 from rest_framework import renderers, serializers from rest_framework.decorators import action from rest_framework.response import Response from rest_framework.reverse import reverse -from kobo.apps.form_disclaimer.models import FormDisclaimer from kpi.authentication import DigestAuthentication, EnketoSessionAuthentication from kpi.constants import PERM_VIEW_ASSET from kpi.exceptions import SubmissionIntegrityError From 8cede6c8d2fcfdd07ea316c0ca28707af9d23875 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 30 Jul 2024 14:48:32 -0400 Subject: [PATCH 012/119] Support bulk edit with OpenRosa backend --- kpi/deployment_backends/base_backend.py | 20 ++++--- kpi/deployment_backends/kobocat_backend.py | 2 +- kpi/deployment_backends/mock_backend.py | 2 +- kpi/deployment_backends/openrosa_backend.py | 61 ++++++++++----------- kpi/views/v2/data.py | 4 +- 5 files changed, 44 insertions(+), 45 deletions(-) diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index aa9686748b..0eaf773563 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -86,7 +86,7 @@ def bulk_assign_mapped_perms(self): pass def bulk_update_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs ) -> dict: """ Allows for bulk updating (bulk editing) of submissions. A @@ -144,7 +144,7 @@ def bulk_update_submissions( ) } - kc_responses = [] + backend_responses = [] for submission in submissions: xml_parsed = fromstring_preserve_root_xmlns(submission) @@ -172,17 +172,19 @@ def bulk_update_submissions( for path, value in update_data.items(): edit_submission_xml(xml_parsed, path, value) - kc_response = self.store_submission( - user, xml_tostring(xml_parsed), _uuid + backend_response = self.store_submission( + user, + xml_tostring(xml_parsed), + _uuid, + request=kwargs.get('request'), ) - kc_responses.append( + backend_responses.append( { 'uuid': _uuid, - 'response': kc_response, + 'response': backend_response, } ) - - return self.prepare_bulk_update_response(kc_responses) + return self.prepare_bulk_update_response(backend_responses) @abc.abstractmethod def calculated_submission_count(self, user: settings.AUTH_USER_MODEL, **kwargs): @@ -482,7 +484,7 @@ def stored_data_key(self): @abc.abstractmethod def store_submission( - self, user, xml_submission, submission_uuid, attachments=None + self, user, xml_submission, submission_uuid, attachments=None, **kwargs ): pass diff --git a/kpi/deployment_backends/kobocat_backend.py b/kpi/deployment_backends/kobocat_backend.py index e31f04018f..0a6c87a2f3 100644 --- a/kpi/deployment_backends/kobocat_backend.py +++ b/kpi/deployment_backends/kobocat_backend.py @@ -1104,7 +1104,7 @@ def set_validation_statuses( return self.__prepare_as_drf_response_signature(kc_response) def store_submission( - self, user, xml_submission, submission_uuid, attachments=None + self, user, xml_submission, submission_uuid, attachments=None, **kwargs ): file_tuple = (submission_uuid, io.StringIO(xml_submission)) files = {'xml_submission_file': file_tuple} diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 3a292cfa91..58e7d64efc 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -642,7 +642,7 @@ def set_validation_statuses( } def store_submission( - self, user, xml_submission, submission_uuid, attachments=None + self, user, xml_submission, submission_uuid, attachments=None, **kwargs ): """ Return a mock response without actually storing anything diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 416625a3df..7a4298864e 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -910,51 +910,35 @@ def rename_enketo_id_key(self, previous_owner_username: str): pass @staticmethod - def prepare_bulk_update_response(kc_responses: list) -> dict: + def prepare_bulk_update_response(backend_responses: list) -> dict: """ Formatting the response to allow for partial successes to be seen more explicitly. - - Args: - kc_responses (list): A list containing dictionaries with keys of - `_uuid` from the newly generated uuid and `response`, the response - object received from KoBoCAT - - Returns: - dict: formatted dict to be passed to a Response object and sent to - the client """ - OPEN_ROSA_XML_MESSAGE = '{http://openrosa.org/http/response}message' - - # Unfortunately, the response message from OpenRosa is in XML format, - # so it needs to be parsed before extracting the text results = [] - for response in kc_responses: + cpt_successes = 0 + for backend_response in backend_responses: + uuid = backend_response['uuid'] + error, instance = backend_response['response'] + message = t('Something went wrong') - try: - xml_parsed = fromstring_preserve_root_xmlns( - response['response'].content - ) - except DET.ParseError: - pass - else: - message_el = xml_parsed.find(OPEN_ROSA_XML_MESSAGE) - if message_el is not None and message_el.text.strip(): - message = message_el.text + status_code = status.HTTP_400_BAD_REQUEST + if not error: + cpt_successes += 1 + message = t('Successful submission') + status_code = status.HTTP_201_CREATED results.append( { - 'uuid': response['uuid'], - 'status_code': response['response'].status_code, + 'uuid': uuid, + 'status_code': status_code, 'message': message, } ) total_update_attempts = len(results) - total_successes = [result['status_code'] for result in results].count( - status.HTTP_201_CREATED - ) + total_successes = cpt_successes return { 'status': ( @@ -1162,11 +1146,22 @@ def set_validation_statuses( 'status': status.HTTP_200_OK, } - # @Todo DEPRECATED - to be removed when KobocatDeploymentBackend is gone def store_submission( - self, user, xml_submission, submission_uuid, attachments=None + self, user, xml_submission, submission_uuid, attachments=None, **kwargs ): - pass + media_files = [] + if attachments: + media_files = ( + media_file for media_file in attachments.values() + ) + + return safe_create_instance( + username=user.username, + xml_file=ContentFile(xml_submission), + media_files=media_files, + uuid=submission_uuid, + request=kwargs.get('request'), + ) @property def submission_count(self): diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 75adf4cdd2..cf4d22db41 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -371,7 +371,9 @@ def bulk(self, request, *args, **kwargs): )) # Send request to KC - json_response = action_(bulk_actions_validator.data, request.user) + json_response = action_( + bulk_actions_validator.data, request.user, request=request + ) # If requests has succeeded, let's log deletions (if any) if json_response['status'] == status.HTTP_200_OK and audit_logs: From 02017ed2fbb236252189c8081e89e438a502a519 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 30 Jul 2024 16:54:01 -0400 Subject: [PATCH 013/119] Remove KobocatDeploymentBackend and shadow models --- .../openrosa/apps/main/models/user_profile.py | 51 + kobo/apps/shadow_model/__init__.py | 13 - kobo/settings/base.py | 2 +- kpi/constants.py | 5 +- kpi/deployment_backends/backends.py | 3 +- kpi/deployment_backends/base_backend.py | 2 +- .../kc_access/shadow_models.py | 604 ------ kpi/deployment_backends/kc_access/utils.py | 220 +-- kpi/deployment_backends/kobocat_backend.py | 1680 ----------------- kpi/deployment_backends/mock_backend.py | 2 +- kpi/deployment_backends/openrosa_backend.py | 8 +- kpi/management/commands/copy_kc_profile.py | 5 +- .../commands/sync_kobocat_xforms.py | 9 +- .../0011_explode_asset_deployments.py | 6 +- kpi/serializers/v2/service_usage.py | 4 +- kpi/views/v2/data.py | 2 +- 16 files changed, 137 insertions(+), 2479 deletions(-) delete mode 100644 kobo/apps/shadow_model/__init__.py delete mode 100644 kpi/deployment_backends/kc_access/shadow_models.py delete mode 100644 kpi/deployment_backends/kobocat_backend.py diff --git a/kobo/apps/openrosa/apps/main/models/user_profile.py b/kobo/apps/openrosa/apps/main/models/user_profile.py index fb36140aa3..7460ad11fb 100644 --- a/kobo/apps/openrosa/apps/main/models/user_profile.py +++ b/kobo/apps/openrosa/apps/main/models/user_profile.py @@ -1,7 +1,10 @@ # coding: utf-8 +import json + from django.conf import settings from django.db import models from guardian.conf import settings as guardian_settings +from rest_framework.authtoken.models import Token from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.logger.fields import LazyDefaultBooleanField @@ -45,6 +48,54 @@ class Meta: def __str__(self): return '%s[%s]' % (self.name, self.user.username) + @classmethod + def to_dict(cls, user_id: int) -> dict: + """ + Retrieve all fields from the user's KC profile and return them in a + dictionary + """ + try: + profile_model = cls.objects.get(user_id=user_id) + # Use a dict instead of the object in case we enter the next exception. + # The response will return a json. + # We want the variable to have the same type in both cases. + except UserProfile.DoesNotExist: + token, _ = Token.objects.get_or_create(user_id=user_id) + profile_model, _ = cls.objects.get_or_create(user_id=user_id) + + profile = profile_model.__dict__ + + fields = [ + # Use a (kc_name, new_name) tuple to rename a field + 'name', + 'organization', + ('home_page', 'organization_website'), + ('description', 'bio'), + ('phonenumber', 'phone_number'), + 'address', + 'city', + 'country', + 'twitter', + 'metadata', + ] + + result = {} + + for field in fields: + + if isinstance(field, tuple): + kc_name, field = field + else: + kc_name = field + + value = profile.get(kc_name) + # When a field contains JSON (e.g. `metadata`), it gets loaded as a + # `dict`. Convert it back to a string representation + if isinstance(value, dict): + value = json.dumps(value) + result[field] = value + return result + @property def gravatar(self): return get_gravatar_img_link(self.user) diff --git a/kobo/apps/shadow_model/__init__.py b/kobo/apps/shadow_model/__init__.py deleted file mode 100644 index 73019b2e86..0000000000 --- a/kobo/apps/shadow_model/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# coding: utf-8 -from django.apps import AppConfig -from kpi.constants import SHADOW_MODEL_APP_LABEL - - -class ShadowModelAppConfig(AppConfig): - """ - This app is not in-use but needed because one of shadow models is registered - in Django Admin. - """ - name = 'kobo.apps.shadow_model' - verbose_name = 'KoboCAT data' - label = SHADOW_MODEL_APP_LABEL diff --git a/kobo/settings/base.py b/kobo/settings/base.py index 805e3b3251..a9774c52cc 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -126,7 +126,7 @@ 'kobo.apps.external_integrations.ExternalIntegrationsAppConfig', 'markdownx', 'kobo.apps.help', - 'kobo.apps.shadow_model.ShadowModelAppConfig', + # 'kobo.apps.shadow_model.ShadowModelAppConfig', 'trench', 'kobo.apps.accounts.mfa.apps.MfaAppConfig', 'kobo.apps.languages.LanguageAppConfig', diff --git a/kpi/constants.py b/kpi/constants.py index faf95ff905..c098a6c6cb 100644 --- a/kpi/constants.py +++ b/kpi/constants.py @@ -61,14 +61,11 @@ ASSET_TYPE_TEMPLATE: [ASSET_TYPE_SURVEY, ASSET_TYPE_TEMPLATE] } -ASSET_TYPE_ARG_NAME = "asset_type" +ASSET_TYPE_ARG_NAME = 'asset_type' -# Main app label for shadow models. -SHADOW_MODEL_APP_LABEL = 'shadow_model' # List of app labels that need to read/write data from KoBoCAT database # Useful in `db_routers.py` SHADOW_MODEL_APP_LABELS = [ - SHADOW_MODEL_APP_LABEL, 'superuser_stats', ] diff --git a/kpi/deployment_backends/backends.py b/kpi/deployment_backends/backends.py index 8f972ba79e..1dd514e4fe 100644 --- a/kpi/deployment_backends/backends.py +++ b/kpi/deployment_backends/backends.py @@ -1,10 +1,9 @@ # coding: utf-8 from .mock_backend import MockDeploymentBackend -from .kobocat_backend import KobocatDeploymentBackend from .openrosa_backend import OpenRosaDeploymentBackend DEPLOYMENT_BACKENDS = { 'mock': MockDeploymentBackend, - 'kobocat': KobocatDeploymentBackend, + 'kobocat': OpenRosaDeploymentBackend, 'openrosa': OpenRosaDeploymentBackend, } diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index 0eaf773563..d2e6d88ac2 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -217,7 +217,7 @@ def delete_submission( @abc.abstractmethod def delete_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs ) -> dict: pass diff --git a/kpi/deployment_backends/kc_access/shadow_models.py b/kpi/deployment_backends/kc_access/shadow_models.py deleted file mode 100644 index d810adbf07..0000000000 --- a/kpi/deployment_backends/kc_access/shadow_models.py +++ /dev/null @@ -1,604 +0,0 @@ -# coding: utf-8 -from __future__ import annotations - -from typing import Optional -from urllib.parse import quote as urlquote - -from django.conf import settings -from django.contrib.contenttypes.fields import GenericForeignKey -from django.core import checks -from django.core.exceptions import FieldDoesNotExist -from django.core.files.base import ContentFile -from django.db import ( - ProgrammingError, - models, - transaction, -) -from django.utils import timezone - -from kobo.apps.openrosa.libs.utils.image_tools import ( - get_optimized_image_path, - resize, -) -from kpi.constants import SHADOW_MODEL_APP_LABEL -from kpi.deployment_backends.kc_access.storage import ( - default_kobocat_storage, -) -from kpi.exceptions import ( - BadContentTypeException, -) -from kpi.fields.file import ExtendedFileField -from kpi.mixins.audio_transcoding import AudioTranscodingMixin -from kpi.utils.hash import calculate_hash -from .storage import ( - get_kobocat_storage, - KobocatFileSystemStorage, -) - - -class ShadowModel(models.Model): - """ - Allows identification of writeable and read-only shadow models - """ - class Meta: - managed = False - abstract = True - # TODO find out why it raises a warning when user logs in. - # ``` - # RuntimeWarning: Model '...' was already registered. - # Reloading models is not advised as it can lead to inconsistencies, - # most notably with related models - # ``` - # Maybe because `SHADOW_MODEL_APP_LABEL` is not declared in - # `INSTALLED_APP` - # It's just used for `DefaultDatabaseRouter` conditions. - app_label = SHADOW_MODEL_APP_LABEL - - @classmethod - def get_app_label_and_model_name(cls) -> tuple[str, str]: - model_name_mapping = { - 'kobocatxform': ('logger', 'xform'), - 'readonlykobocatinstance': ('logger', 'instance'), - 'kobocatuserprofile': ('main', 'userprofile'), - 'kobocatuserobjectpermission': ('guardian', 'userobjectpermission'), - } - try: - return model_name_mapping[cls._meta.model_name] - except KeyError: - raise NotImplementedError - - @classmethod - def get_content_type(cls) -> KobocatContentType: - app_label, model_name = cls.get_app_label_and_model_name() - return KobocatContentType.objects.get( - app_label=app_label, model=model_name) - - -class KobocatAttachmentManager(models.Manager): - - def get_queryset(self): - return super().get_queryset().exclude(deleted_at__isnull=False) - - -class KobocatAttachment(ShadowModel, AudioTranscodingMixin): - - class Meta(ShadowModel.Meta): - db_table = 'logger_attachment' - - instance = models.ForeignKey( - 'superuser_stats.ReadOnlyKobocatInstance', - related_name='attachments', - on_delete=models.CASCADE, - ) - media_file = ExtendedFileField( - storage=get_kobocat_storage(), max_length=380, db_index=True - ) - media_file_basename = models.CharField( - max_length=260, null=True, blank=True, db_index=True) - # `PositiveIntegerField` will only accommodate 2 GiB, so we should consider - # `PositiveBigIntegerField` after upgrading to Django 3.1+ - media_file_size = models.PositiveIntegerField(blank=True, null=True) - mimetype = models.CharField( - max_length=100, null=False, blank=True, default='' - ) - deleted_at = models.DateTimeField(blank=True, null=True, db_index=True) - objects = KobocatAttachmentManager() - all_objects = models.Manager() - - @property - def absolute_mp3_path(self): - """ - Return the absolute path on local file system of the converted version of - attachment. Otherwise, return the AWS url (e.g. https://...) - """ - - kobocat_storage = get_kobocat_storage() - - if not kobocat_storage.exists(self.mp3_storage_path): - content = self.get_transcoded_audio('mp3') - kobocat_storage.save(self.mp3_storage_path, ContentFile(content)) - - if isinstance(kobocat_storage, KobocatFileSystemStorage): - return f'{self.media_file.path}.mp3' - - return kobocat_storage.url(self.mp3_storage_path) - - @property - def absolute_path(self): - """ - Return the absolute path on local file system of the attachment. - Otherwise, return the AWS url (e.g. https://...) - """ - if isinstance(get_kobocat_storage(), KobocatFileSystemStorage): - return self.media_file.path - - return self.media_file.url - - @property - def mp3_storage_path(self): - """ - Return the path of file after conversion. It is the exact same name, plus - the conversion audio format extension concatenated. - E.g: file.mp4 and file.mp4.mp3 - """ - return f'{self.storage_path}.mp3' - - def protected_path( - self, format_: Optional[str] = None, suffix: Optional[str] = None - ) -> str: - """ - Return path to be served as protected file served by NGINX - """ - if format_ == 'mp3': - attachment_file_path = self.absolute_mp3_path - else: - attachment_file_path = self.absolute_path - - optimized_image_path = None - if suffix and self.mimetype.startswith('image/'): - optimized_image_path = get_optimized_image_path( - self.media_file.name, suffix - ) - if not default_kobocat_storage.exists(optimized_image_path): - resize(self.media_file.name) - - if isinstance(get_kobocat_storage(), KobocatFileSystemStorage): - # Django normally sanitizes accented characters in file names during - # save on disk but some languages have extra letters - # (out of ASCII character set) and must be encoded to let NGINX serve - # them - if optimized_image_path: - attachment_file_path = default_kobocat_storage.path( - optimized_image_path - ) - protected_url = urlquote(attachment_file_path.replace( - settings.KOBOCAT_MEDIA_ROOT, '/protected') - ) - else: - # Double-encode the S3 URL to take advantage of NGINX's - # otherwise troublesome automatic decoding - if optimized_image_path: - attachment_file_path = default_kobocat_storage.url( - optimized_image_path - ) - protected_url = f'/protected-s3/{urlquote(attachment_file_path)}' - - return protected_url - - @property - def storage_path(self): - return str(self.media_file) - - -class KobocatContentType(ShadowModel): - """ - Minimal representation of Django 1.8's - contrib.contenttypes.models.ContentType - """ - app_label = models.CharField(max_length=100) - model = models.CharField('python model class name', max_length=100) - - class Meta(ShadowModel.Meta): - db_table = 'django_content_type' - unique_together = (('app_label', 'model'),) - - def __str__(self): - # Not as nice as the original, which returns a human-readable name - # complete with whitespace. That requires access to the Python model - # class, though - return self.model - - -class KobocatDailyXFormSubmissionCounter(ShadowModel): - - date = models.DateField() - user = models.ForeignKey( - 'shadow_model.KobocatUser', null=True, on_delete=models.CASCADE - ) - xform = models.ForeignKey( - 'shadow_model.KobocatXForm', - related_name='daily_counts', - null=True, - on_delete=models.CASCADE, - ) - counter = models.IntegerField(default=0) - - class Meta(ShadowModel.Meta): - db_table = 'logger_dailyxformsubmissioncounter' - unique_together = [['date', 'xform', 'user'], ['date', 'user']] - - -class KobocatGenericForeignKey(GenericForeignKey): - - def get_content_type(self, obj=None, id=None, using=None): - if obj is not None: - return KobocatContentType.objects.db_manager(obj._state.db).get_for_model( - obj, for_concrete_model=self.for_concrete_model) - elif id is not None: - return KobocatContentType.objects.db_manager(using).get_for_id(id) - else: - # This should never happen. I love comments like this, don't you? - raise Exception("Impossible arguments to GFK.get_content_type!") - - def get_forward_related_filter(self, obj): - """See corresponding method on RelatedField""" - return { - self.fk_field: obj.pk, - self.ct_field: KobocatContentType.objects.get_for_model(obj).pk, - } - - def _check_content_type_field(self): - try: - field = self.model._meta.get_field(self.ct_field) - except FieldDoesNotExist: - return [ - checks.Error( - "The GenericForeignKey content type references the " - "nonexistent field '%s.%s'." % ( - self.model._meta.object_name, self.ct_field - ), - obj=self, - id='contenttypes.E002', - ) - ] - else: - if not isinstance(field, models.ForeignKey): - return [ - checks.Error( - "'%s.%s' is not a ForeignKey." % ( - self.model._meta.object_name, self.ct_field - ), - hint=( - "GenericForeignKeys must use a ForeignKey to " - "'contenttypes.ContentType' as the 'content_type' field." - ), - obj=self, - id='contenttypes.E003', - ) - ] - elif field.remote_field.model != KobocatContentType: - return [ - checks.Error( - "'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." - % (self.model._meta.object_name, self.ct_field), - hint=( - "GenericForeignKeys must use a ForeignKey to " - "'contenttypes.ContentType' as the 'content_type' field." - ), - obj=self, - id='contenttypes.E004', - ) - ] - else: - return [] - - -class KobocatMetadata(ShadowModel): - - MEDIA_FILES_TYPE = [ - 'media', - 'paired_data', - ] - - xform = models.ForeignKey('shadow_model.KobocatXForm', on_delete=models.CASCADE) - data_type = models.CharField(max_length=255) - data_value = models.CharField(max_length=255) - data_file = ExtendedFileField(storage=get_kobocat_storage(), blank=True, null=True) - data_file_type = models.CharField(max_length=255, blank=True, null=True) - file_hash = models.CharField(max_length=50, blank=True, null=True) - from_kpi = models.BooleanField(default=False) - data_filename = models.CharField(max_length=255, blank=True, null=True) - date_created = models.DateTimeField(default=timezone.now) - date_modified = models.DateTimeField(default=timezone.now) - - class Meta(ShadowModel.Meta): - db_table = 'main_metadata' - - -class KobocatMonthlyXFormSubmissionCounter(ShadowModel): - year = models.IntegerField() - month = models.IntegerField() - user = models.ForeignKey( - 'shadow_model.KobocatUser', - on_delete=models.CASCADE, - ) - xform = models.ForeignKey( - 'shadow_model.KobocatXForm', - related_name='monthly_counts', - null=True, - on_delete=models.SET_NULL, - ) - counter = models.IntegerField(default=0) - - class Meta(ShadowModel.Meta): - app_label = 'superuser_stats' - db_table = 'logger_monthlyxformsubmissioncounter' - verbose_name_plural = 'User Statistics' - - -class KobocatPermission(ShadowModel): - """ - Minimal representation of Django 1.8's contrib.auth.models.Permission - """ - name = models.CharField('name', max_length=255) - content_type = models.ForeignKey(KobocatContentType, on_delete=models.CASCADE) - codename = models.CharField('codename', max_length=100) - - class Meta(ShadowModel.Meta): - db_table = 'auth_permission' - unique_together = (('content_type', 'codename'),) - ordering = ('content_type__app_label', 'content_type__model', - 'codename') - - def __str__(self): - return "%s | %s | %s" % ( - str(self.content_type.app_label), - str(self.content_type), - str(self.name)) - - -class KobocatUser(ShadowModel): - - username = models.CharField('username', max_length=30, unique=True) - password = models.CharField('password', max_length=128) - last_login = models.DateTimeField('last login', blank=True, null=True) - is_superuser = models.BooleanField('superuser status', default=False) - first_name = models.CharField('first name', max_length=30, blank=True) - last_name = models.CharField('last name', max_length=150, blank=True) - email = models.EmailField('email address', blank=True) - is_staff = models.BooleanField('staff status', default=False) - is_active = models.BooleanField('active', default=True) - date_joined = models.DateTimeField('date joined', default=timezone.now) - - class Meta(ShadowModel.Meta): - db_table = 'auth_user' - - @classmethod - @transaction.atomic - def sync(cls, auth_user): - # NB: `KobocatUserObjectPermission` (and probably other things) depend - # upon PKs being synchronized between KPI and KoboCAT - kc_auth_user = cls.get_kc_user(auth_user) - kc_auth_user.password = auth_user.password - kc_auth_user.last_login = auth_user.last_login - kc_auth_user.is_superuser = auth_user.is_superuser - kc_auth_user.first_name = auth_user.first_name - kc_auth_user.last_name = auth_user.last_name - kc_auth_user.email = auth_user.email - kc_auth_user.is_staff = auth_user.is_staff - kc_auth_user.is_active = auth_user.is_active - kc_auth_user.date_joined = auth_user.date_joined - - kc_auth_user.save() - - # We've manually set a primary key, so `last_value` in the sequence - # `auth_user_id_seq` now lags behind `max(id)`. Fix it now! - update_autofield_sequence(cls) - - @classmethod - def get_kc_user(cls, auth_user: settings.AUTH_USER_MODEL) -> KobocatUser: - try: - kc_auth_user = cls.objects.get(pk=auth_user.pk) - assert kc_auth_user.username == auth_user.username - except KobocatUser.DoesNotExist: - kc_auth_user = cls(pk=auth_user.pk, username=auth_user.username) - - return kc_auth_user - - -class KobocatUserObjectPermission(ShadowModel): - """ - For the _sole purpose_ of letting us manipulate KoBoCAT - permissions, this comprises the following django-guardian classes - all condensed into one: - - * UserObjectPermission - * UserObjectPermissionBase - * BaseGenericObjectPermission - * BaseObjectPermission - - CAVEAT LECTOR: The django-guardian custom manager, - UserObjectPermissionManager, is NOT included! - """ - permission = models.ForeignKey(KobocatPermission, on_delete=models.CASCADE) - content_type = models.ForeignKey(KobocatContentType, on_delete=models.CASCADE) - object_pk = models.CharField('object ID', max_length=255) - content_object = KobocatGenericForeignKey(fk_field='object_pk') - user = models.ForeignKey(KobocatUser, on_delete=models.CASCADE) - - class Meta(ShadowModel.Meta): - db_table = 'guardian_userobjectpermission' - unique_together = ['user', 'permission', 'object_pk'] - - def __str__(self): - # `unicode(self.content_object)` fails when the object's model - # isn't known to this Django project. Let's use something more - # benign instead. - content_object_str = '{app_label}_{model} ({pk})'.format( - app_label=self.content_type.app_label, - model=self.content_type.model, - pk=self.object_pk) - return '%s | %s | %s' % ( - # unicode(self.content_object), - content_object_str, - str(getattr(self, 'user', False) or self.group), - str(self.permission.codename)) - - def save(self, *args, **kwargs): - content_type = KobocatContentType.objects.get_for_model( - self.content_object) - if content_type != self.permission.content_type: - raise BadContentTypeException( - f"Cannot persist permission not designed for this " - "class (permission's type is {self.permission.content_type} " - "and object's type is {content_type}") - return super().save(*args, **kwargs) - - -class KobocatUserPermission(ShadowModel): - """ Needed to assign model-level KoBoCAT permissions """ - user = models.ForeignKey('KobocatUser', db_column='user_id', - on_delete=models.CASCADE) - permission = models.ForeignKey('KobocatPermission', - db_column='permission_id', - on_delete=models.CASCADE) - - class Meta(ShadowModel.Meta): - db_table = 'auth_user_user_permissions' - - -class KobocatUserProfile(ShadowModel): - """ - From onadata/apps/main/models/user_profile.py - """ - class Meta(ShadowModel.Meta): - db_table = 'main_userprofile' - verbose_name = 'user profile' - verbose_name_plural = 'user profiles' - - # This field is required. - user = models.OneToOneField(KobocatUser, - related_name='profile', - on_delete=models.CASCADE) - - # Other fields here - name = models.CharField(max_length=255, blank=True) - city = models.CharField(max_length=255, blank=True) - country = models.CharField(max_length=2, blank=True) - organization = models.CharField(max_length=255, blank=True) - home_page = models.CharField(max_length=255, blank=True) - twitter = models.CharField(max_length=255, blank=True) - description = models.CharField(max_length=255, blank=True) - require_auth = models.BooleanField(default=True) - address = models.CharField(max_length=255, blank=True) - phonenumber = models.CharField(max_length=30, blank=True) - num_of_submissions = models.IntegerField(default=0) - attachment_storage_bytes = models.BigIntegerField(default=0) - metadata = models.JSONField(default=dict, blank=True) - # We need to cast `is_active` to an (positive small) integer because KoBoCAT - # is using `LazyBooleanField` which is an integer behind the scene. - # We do not want to port this class to KPI only for one line of code. - is_mfa_active = models.PositiveSmallIntegerField(default=False) - validated_password = models.BooleanField(default=False) - - @classmethod - def set_mfa_status(cls, user_id: int, is_active: bool): - - user_profile, created = cls.objects.get_or_create(user_id=user_id) - user_profile.is_mfa_active = int(is_active) - user_profile.save(update_fields=['is_mfa_active']) - - @classmethod - def set_password_details( - cls, - user_id: int, - validated: bool, - ): - """ - Update the kobocat user's password_change_date and validated_password fields - """ - user_profile, created = cls.objects.get_or_create(user_id=user_id) - user_profile.validated_password = validated - user_profile.save( - update_fields=['validated_password'] - ) - - -class KobocatXForm(ShadowModel): - - class Meta(ShadowModel.Meta): - db_table = 'logger_xform' - verbose_name = 'xform' - verbose_name_plural = 'xforms' - - XFORM_TITLE_LENGTH = 255 - xls = ExtendedFileField(null=True) - xml = models.TextField() - user = models.ForeignKey( - KobocatUser, related_name='xforms', null=True, on_delete=models.CASCADE - ) - shared = models.BooleanField(default=False) - shared_data = models.BooleanField(default=False) - downloadable = models.BooleanField(default=True) - id_string = models.SlugField() - title = models.CharField(max_length=XFORM_TITLE_LENGTH) - date_created = models.DateTimeField() - date_modified = models.DateTimeField() - uuid = models.CharField(max_length=32, default='') - last_submission_time = models.DateTimeField(blank=True, null=True) - num_of_submissions = models.IntegerField(default=0) - attachment_storage_bytes = models.BigIntegerField(default=0) - kpi_asset_uid = models.CharField(max_length=32, null=True) - pending_delete = models.BooleanField(default=False) - require_auth = models.BooleanField(default=True) - - @property - def md5_hash(self): - return calculate_hash(self.xml) - - @property - def prefixed_hash(self): - """ - Matches what's returned by the KC API - """ - - return "md5:%s" % self.md5_hash - - -class ReadOnlyModel(ShadowModel): - - read_only = True - - class Meta(ShadowModel.Meta): - abstract = True - - -class ReadOnlyKobocatInstance(ReadOnlyModel): - - class Meta(ReadOnlyModel.Meta): - app_label = 'superuser_stats' - db_table = 'logger_instance' - verbose_name = 'Submissions by Country' - verbose_name_plural = 'Submissions by Country' - - xml = models.TextField() - user = models.ForeignKey(KobocatUser, null=True, on_delete=models.CASCADE) - xform = models.ForeignKey(KobocatXForm, related_name='instances', - on_delete=models.CASCADE) - date_created = models.DateTimeField() - date_modified = models.DateTimeField() - deleted_at = models.DateTimeField(null=True, default=None) - status = models.CharField(max_length=20, - default='submitted_via_web') - uuid = models.CharField(max_length=249, default='') - - -def safe_kc_read(func): - def _wrapper(*args, **kwargs): - try: - return func(*args, **kwargs) - except ProgrammingError as e: - raise ProgrammingError( - 'kc_access error accessing kobocat tables: {}'.format(str(e)) - ) - return _wrapper diff --git a/kpi/deployment_backends/kc_access/utils.py b/kpi/deployment_backends/kc_access/utils.py index 47aa22a3fd..857be6c590 100644 --- a/kpi/deployment_backends/kc_access/utils.py +++ b/kpi/deployment_backends/kc_access/utils.py @@ -1,118 +1,29 @@ -# coding: utf-8 -import json import logging from contextlib import ContextDecorator from typing import Union -import requests from django.conf import settings -from django.contrib.auth.models import AnonymousUser +from django.contrib.auth.models import AnonymousUser, Permission +from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ImproperlyConfigured -from django.db import transaction +from django.db import ProgrammingError, transaction from django.db.models import Model -from kobo_service_account.utils import get_request_headers -from rest_framework.authtoken.models import Token +from guardian.models import UserObjectPermission from kobo.apps.kobo_auth.shortcuts import User -from kpi.exceptions import KobocatProfileException from kpi.utils.log import logging from kpi.utils.permissions import is_user_anonymous -from .shadow_models import ( - safe_kc_read, - KobocatContentType, - KobocatPermission, - KobocatUser, - KobocatUserObjectPermission, - KobocatUserPermission, - KobocatUserProfile, - KobocatXForm, -) - - -def _trigger_kc_profile_creation(user): - """ - Get the user's profile via the KC API, causing KC to create a KC - UserProfile if none exists already - """ - url = settings.KOBOCAT_INTERNAL_URL + '/api/v1/user' - token, _ = Token.objects.get_or_create(user=user) - response = requests.get( - url, headers={'Authorization': 'Token ' + token.key}) - if not response.status_code == 200: - raise KobocatProfileException( - 'Bad HTTP status code `{}` when retrieving KoBoCAT user profile' - ' for `{}`.'.format(response.status_code, user.username)) - return response - - -@safe_kc_read -def instance_count(xform_id_string, user_id): - try: - return KobocatXForm.objects.only('num_of_submissions').get( - id_string=xform_id_string, - user_id=user_id - ).num_of_submissions - except KobocatXForm.DoesNotExist: - return 0 -@safe_kc_read -def last_submission_time(xform_id_string, user_id): - return KobocatXForm.objects.get( - user_id=user_id, id_string=xform_id_string - ).last_submission_time - - -@safe_kc_read -def get_kc_profile_data(user_id): - """ - Retrieve all fields from the user's KC profile and return them in a - dictionary - """ - try: - profile_model = KobocatUserProfile.objects.get(user_id=user_id) - # Use a dict instead of the object in case we enter the next exception. - # The response will return a json. - # We want the variable to have the same type in both cases. - profile = profile_model.__dict__ - except KobocatUserProfile.DoesNotExist: +def safe_kc_read(func): + def _wrapper(*args, **kwargs): try: - response = _trigger_kc_profile_creation(User.objects.get(pk=user_id)) - profile = response.json() - except KobocatProfileException: - logging.exception('Failed to create KoBoCAT user profile') - return {} - - fields = [ - # Use a (kc_name, new_name) tuple to rename a field - 'name', - 'organization', - ('home_page', 'organization_website'), - ('description', 'bio'), - ('phonenumber', 'phone_number'), - 'address', - 'city', - 'country', - 'twitter', - 'metadata', - ] - - result = {} - - for field in fields: - - if isinstance(field, tuple): - kc_name, field = field - else: - kc_name = field - - value = profile.get(kc_name) - # When a field contains JSON (e.g. `metadata`), it gets loaded as a - # `dict`. Convert it back to a string representation - if isinstance(value, dict): - value = json.dumps(value) - result[field] = value - return result + return func(*args, **kwargs) + except ProgrammingError as e: + raise ProgrammingError( + 'kc_access error accessing KoboCAT tables: {}'.format(str(e)) + ) + return _wrapper def _get_content_type_kwargs_for_related(obj): @@ -167,8 +78,9 @@ def _get_applicable_kc_permissions(obj, kpi_codenames): # This permission doesn't map to anything in KC continue content_type_kwargs = _get_content_type_kwargs_for_related(obj) - permissions = KobocatPermission.objects.filter( - codename__in=kc_codenames, **content_type_kwargs) + permissions = Permission.objects.using(settings.OPENROSA_DB_ALIAS).filter( + codename__in=kc_codenames, **content_type_kwargs + ) return permissions @@ -183,58 +95,43 @@ def _get_xform_id_for_asset(asset): raise -def grant_kc_model_level_perms(user): +def grant_kc_model_level_perms(user: 'kobo_auth.User'): """ Gives `user` unrestricted model-level access to everything listed in settings.KOBOCAT_DEFAULT_PERMISSION_CONTENT_TYPES. Without this, actions on individual instances are immediately denied and object-level permissions are never considered. """ - if not isinstance(user, KobocatUser): - user = KobocatUser.objects.get(pk=user.pk) - content_types = [] for pair in settings.KOBOCAT_DEFAULT_PERMISSION_CONTENT_TYPES: try: content_types.append( - KobocatContentType.objects.get( + ContentType.objects.using(settings.OPENROSA_DB_ALIAS).get( app_label=pair[0], model=pair[1] ) ) - except KobocatContentType.DoesNotExist: + except ContentType.DoesNotExist: # Consider raising `ImproperlyConfigured` here. Anyone running KPI # without KC should change # `KOBOCAT_DEFAULT_PERMISSION_CONTENT_TYPES` appropriately in their # settings logging.error( - 'Could not find KoBoCAT content type for {}.{}'.format(*pair) + 'Could not find KoboCAT content type for {}.{}'.format(*pair) ) - permissions_to_assign = KobocatPermission.objects.filter( - content_type__in=content_types) + permissions_to_assign = Permission.objects.using( + settings.OPENROSA_DB_ALIAS + ).filter(content_type__in=content_types) if content_types and not permissions_to_assign.exists(): raise RuntimeError( - 'No KoBoCAT permissions found! You may need to run the Django ' - 'management command `migrate` in your KoBoCAT environment. ' + 'No KoboCAT permissions found! You may need to run the Django ' + 'management command `migrate` in your KoboCAT environment. ' 'Searched for content types {}.'.format(content_types) ) - # What KC permissions does this user already have? Getting the KC database - # column names right necessitated a custom M2M model, - # `KobocatUserPermission`, which means we can't use Django's tolerant - # `add()`. Prior to Django 2.2, there's no way to make `bulk_create()` - # ignore `IntegrityError`s, so we have to avoid duplication manually: - # https://docs.djangoproject.com/en/2.2/ref/models/querysets/#django.db.models.query.QuerySet.bulk_create - existing_user_perm_pks = KobocatUserPermission.objects.filter( - user=user - ).values_list('permission_id', flat=True) - - KobocatUserPermission.objects.bulk_create([ - KobocatUserPermission(user=user, permission=p) - for p in permissions_to_assign if p.pk not in existing_user_perm_pks - ]) + user.user_permissions.add(*permissions_to_assign) def set_kc_anonymous_permissions_xform_flags( @@ -242,7 +139,7 @@ def set_kc_anonymous_permissions_xform_flags( ): r""" Given a KPI object, one or more KPI permission codenames and the PK of - a KC `XForm`, assume the KPI permisisons have been assigned to or + a KC `XForm`, assume the KPI permissions have been assigned to or removed from the anonymous user. Then, modify any corresponding flags on the `XForm` accordingly. :param obj: Object with `KC_ANONYMOUS_PERMISSIONS_XFORM_FLAGS` @@ -277,7 +174,8 @@ def set_kc_anonymous_permissions_xform_flags( xform_updates.update(flags) # Write to the KC database - KobocatXForm.objects.filter(pk=xform_id).update(**xform_updates) + XForm = obj.deployment.xform.__class__ # noqa - avoid circular imports + XForm.objects.filter(pk=xform_id).update(**xform_updates) def assign_applicable_kc_permissions( @@ -315,21 +213,34 @@ def assign_applicable_kc_permissions( obj, kpi_codenames, xform_id ) - xform_content_type = KobocatContentType.objects.get( - **obj.KC_CONTENT_TYPE_KWARGS) + xform_content_type = ContentType.objects.using( + settings.OPENROSA_DB_ALIAS + ).get(**obj.KC_CONTENT_TYPE_KWARGS) - kc_permissions_already_assigned = KobocatUserObjectPermission.objects.filter( - user_id=user_id, permission__in=permissions, object_pk=xform_id, - ).values_list('permission__codename', flat=True) + kc_permissions_already_assigned = ( + UserObjectPermission.objects.using(settings.OPENROSA_DB_ALIAS) + .filter( + user_id=user_id, + permission__in=permissions, + object_pk=xform_id, + ) + .values_list('permission__codename', flat=True) + ) permissions_to_create = [] for permission in permissions: if permission.codename in kc_permissions_already_assigned: continue - permissions_to_create.append(KobocatUserObjectPermission( - user_id=user_id, permission=permission, object_pk=xform_id, - content_type=xform_content_type - )) - KobocatUserObjectPermission.objects.bulk_create(permissions_to_create) + permissions_to_create.append( + UserObjectPermission( + user_id=user_id, + permission=permission, + object_pk=xform_id, + content_type=xform_content_type, + ) + ) + UserObjectPermission.objects.using(settings.OPENROSA_DB_ALIAS).bulk_create( + permissions_to_create + ) def remove_applicable_kc_permissions( @@ -365,10 +276,11 @@ def remove_applicable_kc_permissions( if user_id == settings.ANONYMOUS_USER_ID: return set_kc_anonymous_permissions_xform_flags( - obj, kpi_codenames, xform_id, remove=True) + obj, kpi_codenames, xform_id, remove=True + ) content_type_kwargs = _get_content_type_kwargs_for_related(obj) - KobocatUserObjectPermission.objects.filter( + UserObjectPermission.objects.using(settings.OPENROSA_DB_ALIAS).filter( user_id=user_id, permission__in=permissions, object_pk=xform_id, # `permission` has a FK to `ContentType`, but I'm paranoid **content_type_kwargs @@ -406,25 +318,23 @@ def reset_kc_permissions( raise NotImplementedError content_type_kwargs = _get_content_type_kwargs_for_related(obj) - KobocatUserObjectPermission.objects.filter( - user_id=user_id, object_pk=xform_id, + UserObjectPermission.objects.using(settings.OPENROSA_DB_ALIAS).filter( + user_id=user_id, + object_pk=xform_id, # `permission` has a FK to `ContentType`, but I'm paranoid **content_type_kwargs ).delete() def delete_kc_user(username: str): - url = settings.KOBOCAT_INTERNAL_URL + f'/api/v1/users/{username}' - - response = requests.delete( - url, headers=get_request_headers(username) - ) - response.raise_for_status() + User.objects.using(settings.OPENROSA_DB_ALIAS).filter( + username=username + ).delete() -def kc_transaction_atomic(using='kobocat', *args, **kwargs): +def kc_transaction_atomic(using=settings.OPENROSA_DB_ALIAS, *args, **kwargs): """ - KoBoCAT database does not exist in testing environment. + KoboCAT database does not exist in testing environment. `transaction.atomic(using='kobocat') cannot be called without raising errors. This utility returns a context manager which does nothing if environment @@ -442,7 +352,7 @@ def __exit__(self, exc_type, exc_value, traceback): pass assert ( - callable(using) or using == 'kobocat' + callable(using) or using == settings.OPENROSA_DB_ALIAS ), "`kc_transaction_atomic` may only be used with the 'kobocat' database" if settings.TESTING: @@ -455,6 +365,6 @@ def __exit__(self, exc_type, exc_value, traceback): # Not in a testing environment; use the real `atomic` if callable(using): - return transaction.atomic('kobocat', *args, **kwargs)(using) + return transaction.atomic(settings.OPENROSA_DB_ALIAS, *args, **kwargs)(using) else: - return transaction.atomic('kobocat', *args, **kwargs) + return transaction.atomic(settings.OPENROSA_DB_ALIAS, *args, **kwargs) diff --git a/kpi/deployment_backends/kobocat_backend.py b/kpi/deployment_backends/kobocat_backend.py deleted file mode 100644 index 0a6c87a2f3..0000000000 --- a/kpi/deployment_backends/kobocat_backend.py +++ /dev/null @@ -1,1680 +0,0 @@ -from __future__ import annotations - -import io -import json -import re -from collections import defaultdict -from contextlib import contextmanager -from datetime import date, datetime -from typing import Generator, Optional, Union -from urllib.parse import urlparse -try: - from zoneinfo import ZoneInfo -except ImportError: - from backports.zoneinfo import ZoneInfo - -import requests -import redis.exceptions -from defusedxml import ElementTree as DET -from django.conf import settings -from django.core.exceptions import ImproperlyConfigured -from django.core.files import File -from django.db.models import Sum, F -from django.db.models.functions import Coalesce -from django.db.models.query import QuerySet -from django.utils import timezone -from django.utils.translation import gettext_lazy as t -from django_redis import get_redis_connection -from kobo_service_account.utils import get_request_headers -from rest_framework import status - -from kobo.apps.subsequences.utils import stream_with_extras -from kobo.apps.trackers.models import NLPUsageCounter -from kpi.constants import ( - SUBMISSION_FORMAT_TYPE_JSON, - SUBMISSION_FORMAT_TYPE_XML, - PERM_FROM_KC_ONLY, - PERM_CHANGE_SUBMISSIONS, - PERM_DELETE_SUBMISSIONS, - PERM_PARTIAL_SUBMISSIONS, - PERM_VALIDATE_SUBMISSIONS, - PERM_VIEW_SUBMISSIONS, -) -from kpi.exceptions import ( - AttachmentNotFoundException, - InvalidXFormException, - KobocatCommunicationError, - SubmissionIntegrityError, - SubmissionNotFoundException, - XPathNotFoundException, -) -from kpi.interfaces.sync_backend_media import SyncBackendMediaInterface -from kpi.models.asset_file import AssetFile -from kpi.models.object_permission import ObjectPermission -from kpi.models.paired_data import PairedData -from kpi.utils.django_orm_helper import UpdateJSONFieldAttributes -from kpi.utils.log import logging -from kpi.utils.mongo_helper import MongoHelper -from kpi.utils.object_permission import get_database_user -from kpi.utils.permissions import is_user_anonymous -from kpi.utils.xml import fromstring_preserve_root_xmlns, xml_tostring -from .base_backend import BaseDeploymentBackend -from .kc_access.shadow_models import ( - KobocatAttachment, - KobocatDailyXFormSubmissionCounter, - KobocatMonthlyXFormSubmissionCounter, - KobocatUserProfile, - KobocatXForm, - ReadOnlyKobocatInstance, -) -from .kc_access.utils import ( - assign_applicable_kc_permissions, - kc_transaction_atomic, - last_submission_time -) -from ..exceptions import ( - BadFormatException, - KobocatDeploymentException, - KobocatDuplicateSubmissionException, -) - - -class KobocatDeploymentBackend(BaseDeploymentBackend): - """ - Used to deploy a project into KoBoCAT. Stores the project identifiers in the - `self.asset._deployment_data` models.JSONField (referred as "deployment data") - """ - - SYNCED_DATA_FILE_TYPES = { - AssetFile.FORM_MEDIA: 'media', - AssetFile.PAIRED_DATA: 'paired_data', - } - - @property - def attachment_storage_bytes(self): - try: - return self.xform.attachment_storage_bytes - except InvalidXFormException: - return 0 - - def bulk_assign_mapped_perms(self): - """ - Bulk assign all KoBoCAT permissions related to KPI permissions. - Useful to assign permissions retroactively upon deployment. - Beware: it only adds permissions, it does not remove or sync permissions. - """ - users_with_perms = self.asset.get_users_with_perms(attach_perms=True) - - # if only the owner has permissions, no need to go further - if len(users_with_perms) == 1 and \ - list(users_with_perms)[0].id == self.asset.owner_id: - return - - with kc_transaction_atomic(): - for user, perms in users_with_perms.items(): - if user.id == self.asset.owner_id: - continue - assign_applicable_kc_permissions(self.asset, user, perms) - - def calculated_submission_count( - self, user: settings.AUTH_USER_MODEL, **kwargs - ) -> int: - params = self.validate_submission_list_params( - user, validate_count=True, **kwargs - ) - return MongoHelper.get_count(self.mongo_userform_id, **params) - - def connect(self, active=False): - """ - `POST` initial survey content to KoBoCAT and create a new project. - Store results in deployment data. - CAUTION: Does not save deployment data to the database! - """ - # Use the external URL here; the internal URL will be substituted - # in when appropriate - if not settings.KOBOCAT_URL or not settings.KOBOCAT_INTERNAL_URL: - raise ImproperlyConfigured( - 'Both KOBOCAT_URL and KOBOCAT_INTERNAL_URL must be ' - 'configured before using KobocatDeploymentBackend' - ) - kc_server = settings.KOBOCAT_URL - id_string = self.asset.uid - - url = self.normalize_internal_url('{}/api/v1/forms'.format(kc_server)) - xlsx_io = self.asset.to_xlsx_io( - versioned=True, append={ - 'settings': { - 'id_string': id_string, - 'form_title': self.asset.name, - } - } - ) - - # Payload contains `kpi_asset_uid` and `has_kpi_hook` for two reasons: - # - KC `XForm`'s `id_string` can be different than `Asset`'s `uid`, then - # we can't rely on it to find its related `Asset`. - # - Removing, renaming `has_kpi_hook` will force PostgreSQL to rewrite - # every record of `logger_xform`. It can be also used to filter - # queries as it is faster to query a boolean than string. - payload = { - 'downloadable': active, - 'has_kpi_hook': self.asset.has_active_hooks, - 'kpi_asset_uid': self.asset.uid - } - files = {'xls_file': ('{}.xlsx'.format(id_string), xlsx_io)} - json_response = self._kobocat_request( - 'POST', url, data=payload, files=files - ) - # Store only path - json_response['url'] = urlparse(json_response['url']).path - self.store_data( - { - 'backend': 'kobocat', - 'active': json_response['downloadable'], - 'backend_response': json_response, - 'version': self.asset.version_id, - } - ) - - @property - def form_uuid(self): - try: - return self.backend_response['uuid'] - except KeyError: - logging.warning( - 'KoboCAT backend response has no `uuid`', exc_info=True - ) - return None - - @staticmethod - def nlp_tracking_data(asset_ids, start_date=None): - """ - Get the NLP tracking data since a specified date - If no date is provided, get all-time data - """ - filter_args = {} - if start_date: - filter_args = {'date__gte': start_date} - try: - nlp_tracking = ( - NLPUsageCounter.objects.only('total_asr_seconds', 'total_mt_characters') - .filter( - asset_id__in=asset_ids, - **filter_args - ).aggregate( - total_nlp_asr_seconds=Coalesce(Sum('total_asr_seconds'), 0), - total_nlp_mt_characters=Coalesce(Sum('total_mt_characters'), 0), - ) - ) - except NLPUsageCounter.DoesNotExist: - return { - 'total_nlp_asr_seconds': 0, - 'total_nlp_mt_characters': 0, - } - else: - return nlp_tracking - - def submission_count_since_date(self, start_date=None): - try: - xform_id = self.xform_id - except InvalidXFormException: - return 0 - - today = timezone.now().date() - filter_args = { - 'xform_id': xform_id, - } - if start_date: - filter_args['date__range'] = [start_date, today] - try: - # Note: this is replicating the functionality that was formerly in - # `current_month_submission_count`. `current_month_submission_count` - # didn't account for partial permissions, and this doesn't either - total_submissions = KobocatDailyXFormSubmissionCounter.objects.only( - 'date', 'counter' - ).filter(**filter_args).aggregate(count_sum=Coalesce(Sum('counter'), 0)) - except KobocatDailyXFormSubmissionCounter.DoesNotExist: - return 0 - else: - return total_submissions['count_sum'] - - @staticmethod - def format_openrosa_datetime(dt: Optional[datetime] = None) -> str: - """ - Format a given datetime object or generate a new timestamp matching the - OpenRosa datetime formatting - """ - if dt is None: - dt = datetime.now(tz=ZoneInfo('UTC')) - - # Awkward check, but it's prescribed by - # https://docs.python.org/3/library/datetime.html#determining-if-an-object-is-aware-or-naive - if dt.tzinfo is None or dt.tzinfo.utcoffset(None) is None: - raise ValueError('An offset-aware datetime is required') - return dt.isoformat('T', 'milliseconds') - - def delete(self): - """ - WARNING! Deletes all submitted data! - """ - url = self.normalize_internal_url(self.backend_response['url']) - try: - self._kobocat_request('DELETE', url) - except KobocatDeploymentException as e: - if not hasattr(e, 'response'): - raise - - if e.response.status_code == status.HTTP_404_NOT_FOUND: - # The KC project is already gone! - pass - elif e.response.status_code in [ - status.HTTP_502_BAD_GATEWAY, - status.HTTP_504_GATEWAY_TIMEOUT, - ]: - raise KobocatCommunicationError - elif e.response.status_code == status.HTTP_401_UNAUTHORIZED: - raise KobocatCommunicationError( - 'Could not authenticate to KoBoCAT' - ) - else: - raise - - super().delete() - - def delete_submission( - self, submission_id: int, user: settings.AUTH_USER_MODEL - ) -> dict: - """ - Delete a submission through KoBoCAT proxy - - It returns a dictionary which can used as Response object arguments - """ - - self.validate_access_with_partial_perms( - user=user, - perm=PERM_DELETE_SUBMISSIONS, - submission_ids=[submission_id] - ) - - kc_url = self.get_submission_detail_url(submission_id) - kc_request = requests.Request(method='DELETE', url=kc_url) - kc_response = self.__kobocat_proxy_request(kc_request, user) - - return self.__prepare_as_drf_response_signature(kc_response) - - def delete_submissions(self, data: dict, user: settings.AUTH_USER_MODEL) -> dict: - """ - Bulk delete provided submissions through KoBoCAT proxy, - authenticated by `user`'s API token. - - `data` should contain the submission ids or the query to get the subset - of submissions to delete - Example: - {"submission_ids": [1, 2, 3]} - or - {"query": {"Question": "response"} - """ - - submission_ids = self.validate_access_with_partial_perms( - user=user, - perm=PERM_DELETE_SUBMISSIONS, - submission_ids=data['submission_ids'], - query=data['query'], - ) - - # If `submission_ids` is not empty, user has partial permissions. - # Otherwise, they have full access. - if submission_ids: - # Remove query from `data` because all the submission ids have been - # already retrieved - data.pop('query', None) - data['submission_ids'] = submission_ids - - kc_url = self.submission_list_url - kc_request = requests.Request(method='DELETE', url=kc_url, json=data) - kc_response = self.__kobocat_proxy_request(kc_request, user) - - drf_response = self.__prepare_as_drf_response_signature(kc_response) - return drf_response - - def duplicate_submission( - self, submission_id: int, request: 'rest_framework.request.Request', - ) -> dict: - """ - Duplicates a single submission proxied through KoBoCAT. The submission - with the given `submission_id` is duplicated and the `start`, `end` and - `instanceID` parameters of the submission are reset before being posted - to KoBoCAT. - - Returns a dict with message response from KoBoCAT and uuid of created - submission if successful - - """ - user = request.user - self.validate_access_with_partial_perms( - user=user, - perm=PERM_CHANGE_SUBMISSIONS, - submission_ids=[submission_id], - ) - - submission = self.get_submission( - submission_id, - user=user, - format_type=SUBMISSION_FORMAT_TYPE_XML, - ) - - # Get attachments for the duplicated submission if there are any - attachment_objects = KobocatAttachment.objects.filter( - instance_id=submission_id - ) - attachments = ( - {a.media_file_basename: a.media_file for a in attachment_objects} - if attachment_objects - else None - ) - - # parse XML string to ET object - xml_parsed = fromstring_preserve_root_xmlns(submission) - - # attempt to update XML fields for duplicate submission. Note that - # `start` and `end` are not guaranteed to be included in the XML object - _uuid, uuid_formatted = self.generate_new_instance_id() - date_formatted = self.format_openrosa_datetime() - for date_field in ('start', 'end'): - element = xml_parsed.find(date_field) - # Even if the element is found, `bool(element)` is `False`. How - # very un-Pythonic! - if element is not None: - element.text = date_formatted - # Rely on `meta/instanceID` being present. If it's absent, something is - # fishy enough to warrant raising an exception instead of continuing - # silently - xml_parsed.find(self.SUBMISSION_CURRENT_UUID_XPATH).text = ( - uuid_formatted - ) - - kc_response = self.store_submission( - user, xml_tostring(xml_parsed), _uuid, attachments - ) - if kc_response.status_code == status.HTTP_201_CREATED: - return next(self.get_submissions(user, query={'_uuid': _uuid})) - else: - raise KobocatDuplicateSubmissionException - - def edit_submission( - self, - xml_submission_file: File, - request: 'rest_framework.request.Request', - attachments: dict = None, - ): - """ - Edit a submission through KoBoCAT proxy on behalf of `user`. - Attachments can be uploaded by passing a dictionary (name, File object) - - The returned Response should be in XML (expected format by Enketo Express) - """ - user = request.user - submission_xml = xml_submission_file.read() - try: - xml_root = fromstring_preserve_root_xmlns(submission_xml) - except DET.ParseError: - raise SubmissionIntegrityError( - t('Your submission XML is malformed.') - ) - try: - deprecated_uuid = xml_root.find( - self.SUBMISSION_DEPRECATED_UUID_XPATH - ).text - xform_uuid = xml_root.find(self.FORM_UUID_XPATH).text - except AttributeError: - raise SubmissionIntegrityError( - t('Your submission XML is missing critical elements.') - ) - # Remove UUID prefix - deprecated_uuid = deprecated_uuid[len('uuid:'):] - try: - instance = ReadOnlyKobocatInstance.objects.get( - uuid=deprecated_uuid, - xform__uuid=xform_uuid, - xform__kpi_asset_uid=self.asset.uid, - ) - except ReadOnlyKobocatInstance.DoesNotExist: - raise SubmissionIntegrityError( - t( - 'The submission you attempted to edit could not be found, ' - 'or you do not have access to it.' - ) - ) - - # Validate write access for users with partial permissions - self.validate_access_with_partial_perms( - user=user, - perm=PERM_CHANGE_SUBMISSIONS, - submission_ids=[instance.pk] - ) - - # Set the In-Memory file’s current position to 0 before passing it to - # Request. - xml_submission_file.seek(0) - files = {'xml_submission_file': xml_submission_file} - - # Combine all files altogether - if attachments: - files.update(attachments) - - kc_request = requests.Request( - method='POST', url=self.submission_url, files=files - ) - kc_response = self.__kobocat_proxy_request(kc_request, user) - prepared_response = self.__prepare_as_drf_response_signature( - kc_response, expected_response_format='xml' - ) - return prepared_response - - @property - def enketo_id(self): - if not (enketo_id := self.get_data('enketo_id')): - self.get_enketo_survey_links() - enketo_id = self.get_data('enketo_id') - return enketo_id - - @staticmethod - def normalize_internal_url(url: str) -> str: - """ - Normalize url to ensure KOBOCAT_INTERNAL_URL is used - """ - parsed_url = urlparse(url) - return f'{settings.KOBOCAT_INTERNAL_URL}{parsed_url.path}' - - def get_attachment( - self, - submission_id_or_uuid: Union[int, str], - user: settings.AUTH_USER_MODEL, - attachment_id: Optional[int] = None, - xpath: Optional[str] = None, - ) -> KobocatAttachment: - """ - Return an object which can be retrieved by its primary key or by XPath. - An exception is raised when the submission or the attachment is not found. - """ - submission_id = None - submission_uuid = None - try: - submission_id = int(submission_id_or_uuid) - except ValueError: - submission_uuid = submission_id_or_uuid - if submission_uuid: - # `_uuid` is the legacy identifier that changes (per OpenRosa spec) - # after every edit; `meta/rootUuid` remains consistent across - # edits. prefer the latter when fetching by UUID. - candidates = list( - self.get_submissions( - user, - query={ - '$or': [ - {'meta/rootUuid': submission_uuid}, - {'_uuid': submission_uuid}, - ] - }, - fields=['_id', 'meta/rootUuid', '_uuid'], - ) - ) - if not candidates: - raise SubmissionNotFoundException - for submission in candidates: - if submission.get('meta/rootUuid') == submission_uuid: - submission_id = submission['_id'] - break - else: - # no submissions with matching `meta/rootUuid` were found; - # get the "first" result, despite there being no order - # specified, just for consistency with previous code - submission_id = candidates[0]['_id'] - - submission_xml = self.get_submission( - submission_id, user, format_type=SUBMISSION_FORMAT_TYPE_XML - ) - if not submission_xml: - raise SubmissionNotFoundException - - if xpath: - submission_root = fromstring_preserve_root_xmlns(submission_xml) - element = submission_root.find(xpath) - if element is None: - raise XPathNotFoundException - attachment_filename = element.text - filters = { - 'media_file_basename': attachment_filename, - } - else: - filters = { - 'pk': attachment_id, - } - - filters['instance__id'] = submission_id - # Ensure the attachment actually belongs to this project! - filters['instance__xform_id'] = self.xform_id - - try: - attachment = KobocatAttachment.objects.get(**filters) - except KobocatAttachment.DoesNotExist: - raise AttachmentNotFoundException - - return attachment - - def get_attachment_objects_from_dict(self, submission: dict) -> QuerySet: - - # First test that there are attachments to avoid a call to the DB for - # nothing - if not submission.get('_attachments'): - return [] - - # Get filenames from DB because Mongo does not contain the - # original basename. - # EE excepts the original basename before Django renames it and - # stores it in Mongo - # E.g.: - # - XML filename: Screenshot 2022-01-19 222028-13_45_57.jpg - # - Mongo: Screenshot_2022-01-19_222028-13_45_57.jpg - - # ToDo What about adding the original basename and the question - # name in Mongo to avoid another DB query? - return KobocatAttachment.objects.filter( - instance_id=submission['_id'] - ) - - def get_daily_counts( - self, user: settings.AUTH_USER_MODEL, timeframe: tuple[date, date] - ) -> dict: - - user = get_database_user(user) - - if user != self.asset.owner and self.asset.has_perm( - user, PERM_PARTIAL_SUBMISSIONS - ): - # We cannot use cached values from daily counter when user has - # partial permissions. We need to use MongoDB aggregation engine - # to retrieve the correct value according to user's permissions. - permission_filters = self.asset.get_filters_for_partial_perm( - user.pk, perm=PERM_VIEW_SUBMISSIONS - ) - - if not permission_filters: - return {} - - query = { - '_userform_id': self.mongo_userform_id, - '_submission_time': { - '$gte': f'{timeframe[0]}', - '$lte': f'{timeframe[1]}T23:59:59' - } - } - - query = MongoHelper.get_permission_filters_query( - query, permission_filters - ) - - documents = settings.MONGO_DB.instances.aggregate([ - { - '$match': query, - }, - { - '$group': { - '_id': { - '$dateToString': { - 'format': '%Y-%m-%d', - 'date': { - '$dateFromString': { - 'format': "%Y-%m-%dT%H:%M:%S", - 'dateString': "$_submission_time" - } - } - } - }, - 'count': {'$sum': 1} - } - } - ]) - return {doc['_id']: doc['count'] for doc in documents} - - # Trivial case, user has 'view_permissions' - daily_counts = ( - KobocatDailyXFormSubmissionCounter.objects.values( - 'date', 'counter' - ).filter( - xform_id=self.xform_id, - date__range=timeframe, - ) - ) - return { - str(count['date']): count['counter'] for count in daily_counts - } - - def get_data_download_links(self): - exports_base_url = '/'.join(( - settings.KOBOCAT_URL.rstrip('/'), - self.asset.owner.username, - 'exports', - self.backend_response['id_string'] - )) - reports_base_url = '/'.join(( - settings.KOBOCAT_URL.rstrip('/'), - self.asset.owner.username, - 'reports', - self.backend_response['id_string'] - )) - links = { - # To be displayed in iframes - 'xls_legacy': '/'.join((exports_base_url, 'xls/')), - 'csv_legacy': '/'.join((exports_base_url, 'csv/')), - 'zip_legacy': '/'.join((exports_base_url, 'zip/')), - 'kml_legacy': '/'.join((exports_base_url, 'kml/')), - # For GET requests that return files directly - 'xls': '/'.join((reports_base_url, 'export.xlsx')), - 'csv': '/'.join((reports_base_url, 'export.csv')), - } - return links - - def get_enketo_survey_links(self): - if not self.get_data('backend_response'): - return {} - - data = { - 'server_url': '{}/{}'.format( - settings.KOBOCAT_URL.rstrip('/'), - self.asset.owner.username - ), - 'form_id': self.backend_response['id_string'] - } - - try: - response = requests.post( - f'{settings.ENKETO_URL}/{settings.ENKETO_SURVEY_ENDPOINT}', - # bare tuple implies basic auth - auth=(settings.ENKETO_API_KEY, ''), - data=data - ) - response.raise_for_status() - except requests.exceptions.RequestException: - # Don't 500 the entire asset view if Enketo is unreachable - logging.error( - 'Failed to retrieve links from Enketo', exc_info=True) - return {} - try: - links = response.json() - except ValueError: - logging.error('Received invalid JSON from Enketo', exc_info=True) - return {} - - try: - enketo_id = links.pop('enketo_id') - except KeyError: - logging.error( - 'Invalid response from Enketo: `enketo_id` is not found', - exc_info=True, - ) - return {} - - stored_enketo_id = self.get_data('enketo_id') - if stored_enketo_id != enketo_id: - if stored_enketo_id: - logging.warning( - f'Enketo ID has changed from {stored_enketo_id} to {enketo_id}' - ) - self.save_to_db({'enketo_id': enketo_id}) - - if self.xform.require_auth: - # Unfortunately, EE creates unique ID based on OpenRosa server URL. - # Thus, we need to always generated the ID with the same URL - # (i.e.: with username) to be retro-compatible and then, - # overwrite the OpenRosa server URL again. - self.set_enketo_open_rosa_server( - require_auth=True, enketo_id=enketo_id - ) - - for discard in ('enketo_id', 'code', 'preview_iframe_url'): - try: - del links[discard] - except KeyError: - pass - return links - - def get_orphan_postgres_submissions(self) -> Optional[QuerySet, bool]: - """ - Return a queryset of all submissions still present in PostgreSQL - database related to `self.xform`. - Return False if one submission still exists in MongoDB at - least. - Otherwise, if `self.xform` does not exist (anymore), return None - """ - all_submissions = self.get_submissions( - user=self.asset.owner, - fields=['_id'], - skip_count=True, - ) - try: - next(all_submissions) - except StopIteration: - pass - else: - return False - - try: - return ReadOnlyKobocatInstance.objects.filter(xform_id=self.xform_id) - except InvalidXFormException: - return None - - def get_submission_detail_url(self, submission_id: int) -> str: - url = f'{self.submission_list_url}/{submission_id}' - return url - - def get_submission_validation_status_url(self, submission_id: int) -> str: - url = '{detail_url}/validation_status'.format( - detail_url=self.get_submission_detail_url(submission_id) - ) - return url - - def get_submissions( - self, - user: settings.AUTH_USER_MODEL, - format_type: str = SUBMISSION_FORMAT_TYPE_JSON, - submission_ids: list = [], - request: Optional['rest_framework.request.Request'] = None, - **mongo_query_params - ) -> Union[Generator[dict, None, None], list]: - """ - Retrieve submissions that `user` is allowed to access. - - The format `format_type` can be either: - - 'json' (See `kpi.constants.SUBMISSION_FORMAT_TYPE_JSON`) - - 'xml' (See `kpi.constants.SUBMISSION_FORMAT_TYPE_XML`) - - Results can be filtered by submission ids. Moreover MongoDB filters can - be passed through `query` to narrow down the results. - - If `user` has no access to these submissions or no matches are found, - an empty generator is returned. - - If `format_type` is 'json', a generator of dictionaries is returned. - Otherwise, if `format_type` is 'xml', a generator of strings is returned. - - If `request` is provided, submission attachments url are rewritten to - point to KPI (instead of KoBoCAT). - See `BaseDeploymentBackend._rewrite_json_attachment_urls()` - """ - - mongo_query_params['submission_ids'] = submission_ids - params = self.validate_submission_list_params( - user, format_type=format_type, **mongo_query_params - ) - - if format_type == SUBMISSION_FORMAT_TYPE_JSON: - submissions = self.__get_submissions_in_json(request, **params) - elif format_type == SUBMISSION_FORMAT_TYPE_XML: - submissions = self.__get_submissions_in_xml(**params) - else: - raise BadFormatException( - "The format {} is not supported".format(format_type) - ) - return submissions - - def get_validation_status( - self, submission_id: int, user: settings.AUTH_USER_MODEL - ) -> dict: - url = self.get_submission_validation_status_url(submission_id) - kc_request = requests.Request(method='GET', url=url) - kc_response = self.__kobocat_proxy_request(kc_request, user) - - return self.__prepare_as_drf_response_signature(kc_response) - - @staticmethod - def internal_to_external_url(url): - """ - Replace the value of `settings.KOBOCAT_INTERNAL_URL` with that of - `settings.KOBOCAT_URL` when it appears at the beginning of - `url` - """ - return re.sub( - pattern='^{}'.format(re.escape(settings.KOBOCAT_INTERNAL_URL)), - repl=settings.KOBOCAT_URL, - string=url - ) - - @property - def mongo_userform_id(self): - return '{}_{}'.format(self.asset.owner.username, self.xform_id_string) - - def redeploy(self, active=None): - """ - Replace (overwrite) the deployment, keeping the same identifier, and - optionally changing whether the deployment is active. - CAUTION: Does not save deployment data to the database! - """ - if active is None: - active = self.active - url = self.normalize_internal_url(self.backend_response['url']) - id_string = self.backend_response['id_string'] - xlsx_io = self.asset.to_xlsx_io( - versioned=True, append={ - 'settings': { - 'id_string': id_string, - 'form_title': self.asset.name, - } - } - ) - payload = { - 'downloadable': active, - 'title': self.asset.name, - 'has_kpi_hook': self.asset.has_active_hooks - } - files = {'xls_file': ('{}.xlsx'.format(id_string), xlsx_io)} - json_response = self._kobocat_request( - 'PATCH', url, data=payload, files=files - ) - self.store_data({ - 'active': json_response['downloadable'], - 'backend_response': json_response, - 'version': self.asset.version_id, - }) - - self.set_asset_uid() - - def remove_from_kc_only_flag( - self, specific_user: Union[int, settings.AUTH_USER_MODEL] = None - ): - """ - Removes `from_kc_only` flag for ALL USERS unless `specific_user` is - provided - - Args: - specific_user (int, User): User object or pk - """ - # This flag lets us know that permission assignments in KPI exist - # only because they were copied from KoBoCAT (by `sync_from_kobocat`). - # As soon as permissions are assigned through KPI, this flag must be - # removed - # - # This method is here instead of `ObjectPermissionMixin` because - # it's specific to KoBoCat as backend. - - # TODO: Remove this method after kobotoolbox/kobocat#642 - - filters = { - 'permission__codename': PERM_FROM_KC_ONLY, - 'asset_id': self.asset.id, - } - if specific_user is not None: - try: - user_id = specific_user.pk - except AttributeError: - user_id = specific_user - filters['user_id'] = user_id - - ObjectPermission.objects.filter(**filters).delete() - - def rename_enketo_id_key(self, previous_owner_username: str): - parsed_url = urlparse(settings.KOBOCAT_URL) - domain_name = parsed_url.netloc - asset_uid = self.asset.uid - enketo_redis_client = get_redis_connection('enketo_redis_main') - - try: - enketo_redis_client.rename( - src=f'or:{domain_name}/{previous_owner_username},{asset_uid}', - dst=f'or:{domain_name}/{self.asset.owner.username},{asset_uid}' - ) - except redis.exceptions.ResponseError: - # original does not exist, weird but don't raise a 500 for that - pass - - def set_active(self, active): - """ - `PATCH` active boolean of the survey. - Store results in deployment data - """ - # self.store_data is an alias for - # self.asset._deployment_data.update(...) - url = self.normalize_internal_url( - self.backend_response['url']) - payload = { - 'downloadable': bool(active) - } - json_response = self._kobocat_request('PATCH', url, data=payload) - assert json_response['downloadable'] == bool(active) - - self.save_to_db({ - 'active': json_response['downloadable'], - 'backend_response': json_response, - }) - - def set_asset_uid(self, force: bool = False) -> bool: - """ - Link KoBoCAT `XForm` back to its corresponding KPI `Asset` by - populating the `kpi_asset_uid` field (use KoBoCAT proxy to PATCH XForm). - Useful when a form is created from the legacy upload form. - Store results in deployment data. - - It returns `True` only if `XForm.kpi_asset_uid` field is updated - during this call, otherwise `False`. - """ - is_synchronized = not ( - force or - self.backend_response.get('kpi_asset_uid', None) is None - ) - if is_synchronized: - return False - - url = self.normalize_internal_url(self.backend_response['url']) - payload = { - 'kpi_asset_uid': self.asset.uid - } - json_response = self._kobocat_request('PATCH', url, data=payload) - is_set = json_response['kpi_asset_uid'] == self.asset.uid - assert is_set - self.store_data({ - 'backend_response': json_response, - }) - return True - - def set_enketo_open_rosa_server( - self, require_auth: bool, enketo_id: str = None - ): - # Kobocat handles Open Rosa requests with different accesses. - # - Authenticated access, https://[kc] - # - Anonymous access, https://[kc]/username - # Enketo generates its unique ID based on the server URL. - # Thus, if the project requires authentication, we need to update Redis - # directly to keep the same ID and let Enketo submit data to correct - # endpoint - if not enketo_id: - enketo_id = self.enketo_id - - server_url = settings.KOBOCAT_URL.rstrip('/') - if not require_auth: - server_url = f'{server_url}/{self.asset.owner.username}' - - enketo_redis_client = get_redis_connection('enketo_redis_main') - enketo_redis_client.hset( - f'id:{enketo_id}', - 'openRosaServer', - server_url, - ) - - def set_has_kpi_hooks(self): - """ - `PATCH` `has_kpi_hooks` boolean of related KoBoCAT XForm. - It lets KoBoCAT know whether it needs to notify KPI - each time a submission comes in. - - Store results in deployment data - """ - has_active_hooks = self.asset.has_active_hooks - url = self.normalize_internal_url( - self.backend_response['url']) - payload = { - 'has_kpi_hooks': has_active_hooks, - 'kpi_asset_uid': self.asset.uid - } - - try: - json_response = self._kobocat_request('PATCH', url, data=payload) - except KobocatDeploymentException as e: - if ( - has_active_hooks is False - and hasattr(e, 'response') - and e.response.status_code == status.HTTP_404_NOT_FOUND - ): - # It's okay if we're trying to unset the active hooks flag and - # the KoBoCAT project is already gone. See #2497 - pass - else: - raise - else: - assert json_response['has_kpi_hooks'] == has_active_hooks - self.store_data({ - 'backend_response': json_response, - }) - - def set_validation_status( - self, - submission_id: int, - user: settings.AUTH_USER_MODEL, - data: dict, - method: str, - ) -> dict: - """ - Update validation status through KoBoCAT proxy, - authenticated by `user`'s API token. - If `method` is `DELETE`, the status is reset to `None` - - It returns a dictionary which can used as Response object arguments - """ - - self.validate_access_with_partial_perms( - user=user, - perm=PERM_VALIDATE_SUBMISSIONS, - submission_ids=[submission_id], - ) - - kc_request_params = { - 'method': method, - 'url': self.get_submission_validation_status_url(submission_id), - } - - if method == 'PATCH': - kc_request_params.update({'json': data}) - - kc_request = requests.Request(**kc_request_params) - kc_response = self.__kobocat_proxy_request(kc_request, user) - return self.__prepare_as_drf_response_signature(kc_response) - - def set_validation_statuses( - self, user: settings.AUTH_USER_MODEL, data: dict - ) -> dict: - """ - Bulk update validation status for provided submissions through - KoBoCAT proxy, authenticated by `user`'s API token. - - `data` should contains either the submission ids or the query to - retrieve the subset of submissions chosen by then user. - If none of them are provided, all the submissions are selected - Examples: - {"submission_ids": [1, 2, 3]} - {"query":{"_validation_status.uid":"validation_status_not_approved"} - """ - submission_ids = self.validate_access_with_partial_perms( - user=user, - perm=PERM_VALIDATE_SUBMISSIONS, - submission_ids=data['submission_ids'], - query=data['query'], - ) - - # If `submission_ids` is not empty, user has partial permissions. - # Otherwise, they have have full access. - if submission_ids: - # Remove query from `data` because all the submission ids have been - # already retrieved - data.pop('query', None) - data['submission_ids'] = submission_ids - - # `PATCH` KC even if KPI receives `DELETE` - url = self.submission_list_url - kc_request = requests.Request(method='PATCH', url=url, json=data) - kc_response = self.__kobocat_proxy_request(kc_request, user) - return self.__prepare_as_drf_response_signature(kc_response) - - def store_submission( - self, user, xml_submission, submission_uuid, attachments=None, **kwargs - ): - file_tuple = (submission_uuid, io.StringIO(xml_submission)) - files = {'xml_submission_file': file_tuple} - if attachments: - files.update(attachments) - kc_request = requests.Request( - method='POST', url=self.submission_url, files=files - ) - kc_response = self.__kobocat_proxy_request(kc_request, user=user) - return kc_response - - @property - def submission_count(self): - try: - return self.xform.num_of_submissions - except InvalidXFormException: - return 0 - - @property - def submission_list_url(self): - url = '{kc_base}/api/v1/data/{formid}'.format( - kc_base=settings.KOBOCAT_INTERNAL_URL, - formid=self.backend_response['formid'] - ) - return url - - @property - def submission_model(self): - return ReadOnlyKobocatInstance - - @property - def submission_url(self) -> str: - # Use internal host to secure calls to KoBoCAT API, - # kobo-service-account can restrict requests per hosts. - url = '{kc_base}/submission'.format( - kc_base=settings.KOBOCAT_INTERNAL_URL, - ) - return url - - def sync_media_files(self, file_type: str = AssetFile.FORM_MEDIA): - - url = self.normalize_internal_url(self.backend_response['url']) - response = self._kobocat_request('GET', url) - kc_files = defaultdict(dict) - - # Build a list of KoBoCAT metadata to compare with KPI - for metadata in response.get('metadata', []): - if metadata['data_type'] == self.SYNCED_DATA_FILE_TYPES[file_type]: - kc_files[metadata['data_value']] = { - 'pk': metadata['id'], - 'url': metadata['url'], - 'md5': metadata['file_hash'], - 'from_kpi': metadata['from_kpi'], - } - - kc_filenames = kc_files.keys() - - queryset = self._get_metadata_queryset(file_type=file_type) - - for media_file in queryset: - - backend_media_id = media_file.backend_media_id - - # File does not exist in KC - if backend_media_id not in kc_filenames: - if media_file.deleted_at is None: - # New file - self.__save_kc_metadata(media_file) - else: - # Orphan, delete it - media_file.delete(force=True) - continue - - # Existing file - if backend_media_id in kc_filenames: - kc_file = kc_files[backend_media_id] - if media_file.deleted_at is None: - # If md5 differs, we need to re-upload it. - if media_file.md5_hash != kc_file['md5']: - if media_file.file_type == AssetFile.PAIRED_DATA: - self.__update_kc_metadata_hash( - media_file, kc_file['pk'] - ) - else: - self.__delete_kc_metadata(kc_file) - self.__save_kc_metadata(media_file) - elif kc_file['from_kpi']: - self.__delete_kc_metadata(kc_file, media_file) - else: - # Remote file has been uploaded directly to KC. We - # cannot delete it, but we need to vacuum KPI. - media_file.delete(force=True) - # Skip deletion of key corresponding to `backend_media_id` - # in `kc_files` to avoid unique constraint failure in case - # user deleted - # and re-uploaded the same file in a row between - # two deployments - # Example: - # - User uploads file1.jpg (pk == 1) - # - User deletes file1.jpg (pk == 1) - # - User re-uploads file1.jpg (pk == 2) - # Next time, 'file1.jpg' is encountered in this loop, - # it would try to re-upload to KC if its hash differs - # from KC version and would fail because 'file1.jpg' - # already exists in KC db. - continue - - # Remove current filename from `kc_files`. - # All files which will remain in this dict (after this loop) - # will be considered obsolete and will be deleted - del kc_files[backend_media_id] - - # Remove KC orphan files previously uploaded through KPI - for kc_file in kc_files.values(): - if kc_file['from_kpi']: - self.__delete_kc_metadata(kc_file) - - @property - def xform(self): - if not hasattr(self, '_xform'): - pk = self.backend_response['formid'] - xform = ( - KobocatXForm.objects.filter(pk=pk) - .only( - 'user__username', - 'id_string', - 'num_of_submissions', - 'attachment_storage_bytes', - 'require_auth', - ) - .select_related( - 'user' - ) # Avoid extra query to validate username below - .first() - ) - - if not ( - xform - and xform.user.username == self.asset.owner.username - and xform.id_string == self.xform_id_string - ): - raise InvalidXFormException( - 'Deployment links to an unexpected KoBoCAT XForm') - setattr(self, '_xform', xform) - - return self._xform - - @property - def xform_id(self): - return self.xform.pk - - @property - def xform_id_string(self): - return self.get_data('backend_response.id_string') - - @staticmethod - @contextmanager - def suspend_submissions(user_ids: list[int]): - KobocatUserProfile.objects.filter( - user_id__in=user_ids - ).update( - metadata=UpdateJSONFieldAttributes( - 'metadata', - updates={'submissions_suspended': True}, - ), - ) - try: - yield - finally: - KobocatUserProfile.objects.filter( - user_id__in=user_ids - ).update( - metadata=UpdateJSONFieldAttributes( - 'metadata', - updates={'submissions_suspended': False}, - ), - ) - - def transfer_submissions_ownership( - self, previous_owner_username: str - ) -> bool: - - results = settings.MONGO_DB.instances.update_many( - {'_userform_id': f'{previous_owner_username}_{self.xform_id_string}'}, - { - '$set': { - '_userform_id': self.mongo_userform_id - } - }, - ) - - return ( - results.matched_count == 0 or - ( - results.matched_count > 0 - and results.matched_count == results.modified_count - ) - ) - - def transfer_counters_ownership(self, new_owner: 'kobo_auth.User'): - - NLPUsageCounter.objects.filter( - asset=self.asset, user=self.asset.owner - ).update(user=new_owner) - KobocatDailyXFormSubmissionCounter.objects.filter( - xform=self.xform, user_id=self.asset.owner.pk - ).update(user=new_owner) - KobocatMonthlyXFormSubmissionCounter.objects.filter( - xform=self.xform, user_id=self.asset.owner.pk - ).update(user=new_owner) - - KobocatUserProfile.objects.filter(user_id=self.asset.owner.pk).update( - attachment_storage_bytes=F('attachment_storage_bytes') - - self.xform.attachment_storage_bytes - ) - KobocatUserProfile.objects.filter(user_id=self.asset.owner.pk).update( - attachment_storage_bytes=F('attachment_storage_bytes') - + self.xform.attachment_storage_bytes - ) - - def _kobocat_request(self, method, url, expect_formid=True, **kwargs): - """ - Make a POST or PATCH request and return parsed JSON. Keyword arguments, - e.g. `data` and `files`, are passed through to `requests.request()`. - - If `expect_formid` is False, it bypasses the presence of 'formid' - property in KoBoCAT response and returns the KoBoCAT response whatever - it is. - - `kwargs` contains arguments to be passed to KoBoCAT request. - """ - - expected_status_codes = { - 'GET': 200, - 'POST': 201, - 'PATCH': 200, - 'DELETE': 204, - } - - try: - expected_status_code = expected_status_codes[method] - except KeyError: - raise NotImplementedError( - 'This backend does not implement the {} method'.format(method) - ) - - # Make the request to KC - try: - kc_request = requests.Request(method=method, url=url, **kwargs) - response = self.__kobocat_proxy_request(kc_request, - user=self.asset.owner) - - except requests.exceptions.RequestException as e: - # Failed to access the KC API - # TODO: clarify that the user cannot correct this - raise KobocatDeploymentException(detail=str(e)) - - # If it's a no-content success, return immediately - if response.status_code == expected_status_code == 204: - return {} - - # Parse the response - try: - json_response = response.json() - except ValueError as e: - # Unparseable KC API output - # TODO: clarify that the user cannot correct this - raise KobocatDeploymentException( - detail=str(e), response=response) - - # Check for failure - if ( - response.status_code != expected_status_code - or json_response.get('type') == 'alert-error' - or (expect_formid and 'formid' not in json_response) - ): - if 'text' in json_response: - # KC API refused us for a specified reason, likely invalid - # input Raise a 400 error that includes the reason - e = KobocatDeploymentException(detail=json_response['text']) - e.status_code = status.HTTP_400_BAD_REQUEST - raise e - else: - # Unspecified failure; raise 500 - raise KobocatDeploymentException( - detail='Unexpected KoBoCAT error {}: {}'.format( - response.status_code, response.content), - response=response - ) - - return json_response - - def _last_submission_time(self): - id_string = self.backend_response['id_string'] - return last_submission_time( - xform_id_string=id_string, user_id=self.asset.owner.pk) - - def __delete_kc_metadata( - self, kc_file_: dict, file_: Union[AssetFile, PairedData] = None - ): - """ - A simple utility to delete metadata in KoBoCAT through proxy. - If related KPI file is provided (i.e. `file_`), it is deleted too. - """ - # Delete file in KC - - delete_url = self.normalize_internal_url(kc_file_['url']) - self._kobocat_request('DELETE', url=delete_url, expect_formid=False) - - if file_ is None: - return - - # Delete file in KPI if requested - file_.delete(force=True) - - def __get_submissions_in_json( - self, - request: Optional['rest_framework.request.Request'] = None, - **params - ) -> Generator[dict, None, None]: - """ - Retrieve submissions directly from Mongo. - Submissions can be filtered with `params`. - """ - # Apply a default sort of _id to prevent unpredictable natural sort - if not params.get('sort'): - params['sort'] = {'_id': 1} - mongo_cursor, total_count = MongoHelper.get_instances( - self.mongo_userform_id, **params) - - # Python-only attribute used by `kpi.views.v2.data.DataViewSet.list()` - self.current_submission_count = total_count - - add_supplemental_details_to_query = self.asset.has_advanced_features - - fields = params.get('fields', []) - if len(fields) > 0 and '_uuid' not in fields: - # skip the query if submission '_uuid' is not even q'd from mongo - add_supplemental_details_to_query = False - - if add_supplemental_details_to_query: - mongo_cursor = stream_with_extras(mongo_cursor, self.asset) - - return ( - self._rewrite_json_attachment_urls( - MongoHelper.to_readable_dict(submission), - request, - ) - for submission in mongo_cursor - ) - - def __get_submissions_in_xml( - self, - **params - ) -> Generator[str, None, None]: - """ - Retrieve submissions directly from PostgreSQL. - Submissions can be filtered with `params`. - """ - - mongo_filters = ['query', 'permission_filters'] - use_mongo = any(mongo_filter in mongo_filters for mongo_filter in params - if params.get(mongo_filter) is not None) - - if use_mongo: - # We use Mongo to retrieve matching instances. - params['fields'] = ['_id'] - # Force `sort` by `_id` for Mongo - # See FIXME about sort in `BaseDeploymentBackend.validate_submission_list_params()` - params['sort'] = {'_id': 1} - submissions, count = MongoHelper.get_instances( - self.mongo_userform_id, **params - ) - submission_ids = [ - submission.get('_id') - for submission in submissions - ] - self.current_submission_count = count - - queryset = ReadOnlyKobocatInstance.objects.filter( - xform_id=self.xform_id, - ) - - if len(submission_ids) > 0 or use_mongo: - queryset = queryset.filter(id__in=submission_ids) - - # Python-only attribute used by `kpi.views.v2.data.DataViewSet.list()` - if not use_mongo: - self.current_submission_count = queryset.count() - - # Force Sort by id - # See FIXME about sort in `BaseDeploymentBackend.validate_submission_list_params()` - queryset = queryset.order_by('id') - - # When using Mongo, data is already paginated, - # no need to do it with PostgreSQL too. - if not use_mongo: - offset = params.get('start') - limit = offset + params.get('limit') - queryset = queryset[offset:limit] - - return (lazy_instance.xml for lazy_instance in queryset) - - @staticmethod - def __kobocat_proxy_request(kc_request, user=None): - """ - Send `kc_request`, which must specify `method` and `url` at a minimum. - If the incoming request to be proxied is authenticated, - logged-in user's API token will be added to `kc_request.headers` - - :param kc_request: requests.models.Request - :param user: User - :return: requests.models.Response - """ - if not is_user_anonymous(user): - kc_request.headers.update(get_request_headers(user.username)) - - session = requests.Session() - return session.send(kc_request.prepare()) - - @staticmethod - def __prepare_as_drf_response_signature( - requests_response, expected_response_format='json' - ): - """ - Prepares a dict from `Requests` response. - Useful to get response from KoBoCAT and use it as a dict or pass it to - DRF Response - """ - - prepared_drf_response = {} - - # `requests_response` may not have `headers` attribute - content_type = requests_response.headers.get('Content-Type') - content_language = requests_response.headers.get('Content-Language') - if content_type: - prepared_drf_response['content_type'] = content_type - if content_language: - prepared_drf_response['headers'] = { - 'Content-Language': content_language - } - - prepared_drf_response['status'] = requests_response.status_code - - try: - prepared_drf_response['data'] = json.loads( - requests_response.content) - except ValueError as e: - if ( - not requests_response.status_code == status.HTTP_204_NO_CONTENT - and expected_response_format == 'json' - ): - prepared_drf_response['data'] = { - 'detail': t( - 'KoBoCAT returned an unexpected response: {}'.format( - str(e)) - ) - } - - return prepared_drf_response - - @staticmethod - def prepare_bulk_update_response(kc_responses: list) -> dict: - """ - Formatting the response to allow for partial successes to be seen - more explicitly. - - Args: - kc_responses (list): A list containing dictionaries with keys of - `_uuid` from the newly generated uuid and `response`, the response - object received from KoBoCAT - - Returns: - dict: formatted dict to be passed to a Response object and sent to - the client - """ - - OPEN_ROSA_XML_MESSAGE = '{http://openrosa.org/http/response}message' - - # Unfortunately, the response message from OpenRosa is in XML format, - # so it needs to be parsed before extracting the text - results = [] - for response in kc_responses: - message = t('Something went wrong') - try: - xml_parsed = fromstring_preserve_root_xmlns( - response['response'].content - ) - except DET.ParseError: - pass - else: - message_el = xml_parsed.find(OPEN_ROSA_XML_MESSAGE) - if message_el is not None and message_el.text.strip(): - message = message_el.text - - results.append( - { - 'uuid': response['uuid'], - 'status_code': response['response'].status_code, - 'message': message, - } - ) - - total_update_attempts = len(results) - total_successes = [result['status_code'] for result in results].count( - status.HTTP_201_CREATED - ) - - return { - 'status': status.HTTP_200_OK - if total_successes > 0 - # FIXME: If KoboCAT returns something unexpected, like a 404 or a - # 500, then 400 is not the right response to send to the client - else status.HTTP_400_BAD_REQUEST, - 'data': { - 'count': total_update_attempts, - 'successes': total_successes, - 'failures': total_update_attempts - total_successes, - 'results': results, - }, - } - - def __save_kc_metadata(self, file_: SyncBackendMediaInterface): - """ - Prepares request and data corresponding to the kind of media file - (i.e. FileStorage or remote URL) to `POST` to KC through proxy. - """ - server = settings.KOBOCAT_INTERNAL_URL - metadata_url = f'{server}/api/v1/metadata' - - kwargs = { - 'data': { - 'data_value': file_.backend_media_id, - 'xform': self.xform_id, - 'data_type': self.SYNCED_DATA_FILE_TYPES[file_.file_type], - 'from_kpi': True, - 'data_filename': file_.filename, - 'data_file_type': file_.mimetype, - 'file_hash': file_.md5_hash, - } - } - - if not file_.is_remote_url: - kwargs['files'] = { - 'data_file': ( - file_.filename, - file_.content.file, - file_.mimetype, - ) - } - - self._kobocat_request( - 'POST', url=metadata_url, expect_formid=False, **kwargs - ) - - file_.synced_with_backend = True - file_.save(update_fields=['synced_with_backend']) - - def __update_kc_metadata_hash( - self, file_: SyncBackendMediaInterface, kc_metadata_id: int - ): - """ - Update metadata hash in KC - """ - server = settings.KOBOCAT_INTERNAL_URL - metadata_detail_url = f'{server}/api/v1/metadata/{kc_metadata_id}' - data = {'file_hash': file_.md5_hash} - self._kobocat_request( - 'PATCH', url=metadata_detail_url, expect_formid=False, data=data - ) - - file_.synced_with_backend = True - file_.save(update_fields=['synced_with_backend']) diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 58e7d64efc..1efbd5e650 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -168,7 +168,7 @@ def delete_submission( } def delete_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs ) -> dict: """ Bulk delete provided submissions authenticated by `user`'s API token. diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 7a4298864e..5ab5b92191 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -72,7 +72,6 @@ from .kc_access.utils import ( assign_applicable_kc_permissions, kc_transaction_atomic, - last_submission_time ) from ..exceptions import ( BadFormatException, @@ -227,7 +226,7 @@ def delete_submission( } def delete_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs ) -> dict: """ Bulk delete provided submissions. @@ -1422,10 +1421,7 @@ def _delete_openrosa_metadata( file_.delete(force=True) def _last_submission_time(self): - id_string = self.xform.id_string - return last_submission_time( - xform_id_string=id_string, user_id=self.asset.owner.pk - ) + return self.xform.last_submission_time def _save_openrosa_metadata(self, file_: SyncBackendMediaInterface): """ diff --git a/kpi/management/commands/copy_kc_profile.py b/kpi/management/commands/copy_kc_profile.py index 38e5750f5a..55c169478d 100644 --- a/kpi/management/commands/copy_kc_profile.py +++ b/kpi/management/commands/copy_kc_profile.py @@ -3,7 +3,7 @@ from hub.models import ExtraUserDetail from kobo.apps.kobo_auth.shortcuts import User -from kpi.deployment_backends.kc_access.utils import get_kc_profile_data +from kobo.apps.openrosa.apps.main import UserProfile class Command(BaseCommand): @@ -51,7 +51,8 @@ def handle(self, *args, **options): user=user) if not extra_details.data.get('copied_kc_profile', False) or \ options.get('again'): - kc_detail = get_kc_profile_data(user.pk) + + kc_detail = UserProfile.to_dict(user_id=user.pk) for k, v in kc_detail.items(): if extra_details.data.get(k, None) is None: extra_details.data[k] = v diff --git a/kpi/management/commands/sync_kobocat_xforms.py b/kpi/management/commands/sync_kobocat_xforms.py index 89e47c8ad5..fae1d98374 100644 --- a/kpi/management/commands/sync_kobocat_xforms.py +++ b/kpi/management/commands/sync_kobocat_xforms.py @@ -23,7 +23,7 @@ from kpi.constants import PERM_FROM_KC_ONLY from kpi.utils.log import logging from kobo.apps.openrosa.apps.logger.models.xform import XForm -from kpi.deployment_backends.kobocat_backend import KobocatDeploymentBackend +from kpi.deployment_backends.openrosa_backend import OpenRosaDeploymentBackend from kpi.models import Asset, ObjectPermission from kpi.utils.object_permission import get_anonymous_user from kpi.utils.models import _set_auto_field_update @@ -177,6 +177,7 @@ def _xform_to_asset_content(xform): def _get_kc_backend_response(xform): + # FIXME wrong backend info # Get the form data from KC user = xform.user response = _kc_forms_api_request(user.auth_token, xform.pk) @@ -259,9 +260,9 @@ def _sync_form_metadata(asset, xform, changes): if not asset.has_deployment: # A brand-new asset asset.date_created = xform.date_created - kc_deployment = KobocatDeploymentBackend(asset) - kc_deployment.store_data({ - 'backend': 'kobocat', + backend_deployment = OpenRosaDeploymentBackend(asset) + backend_deployment.store_data({ + 'backend': 'openrosa', 'active': xform.downloadable, 'backend_response': _get_kc_backend_response(xform), 'version': asset.version_id diff --git a/kpi/migrations/0011_explode_asset_deployments.py b/kpi/migrations/0011_explode_asset_deployments.py index 2898198a70..0111e83a4f 100644 --- a/kpi/migrations/0011_explode_asset_deployments.py +++ b/kpi/migrations/0011_explode_asset_deployments.py @@ -2,7 +2,7 @@ import sys from django.db import migrations -from kpi.deployment_backends.kobocat_backend import KobocatDeploymentBackend +from kpi.deployment_backends.openrosa_backend import OpenRosaDeploymentBackend from kpi.utils.models import _set_auto_field_update @@ -20,8 +20,8 @@ def explode_assets(apps, schema_editor): for asset in deployed_assets: deployment = asset.assetdeployment_set.last() # Copy the deployment-related data - kc_deployment = KobocatDeploymentBackend(asset) - kc_deployment.store_data({ + backend_deployment = OpenRosaDeploymentBackend(asset) + backend_deployment.store_data({ 'backend': 'kobocat', 'active': deployment.data['downloadable'], 'backend_response': deployment.data, diff --git a/kpi/serializers/v2/service_usage.py b/kpi/serializers/v2/service_usage.py index 09388d0d61..d524443140 100644 --- a/kpi/serializers/v2/service_usage.py +++ b/kpi/serializers/v2/service_usage.py @@ -14,7 +14,7 @@ from kobo.apps.organizations.utils import organization_month_start, organization_year_start from kobo.apps.stripe.constants import ACTIVE_STRIPE_STATUSES from kobo.apps.trackers.models import NLPUsageCounter -from kpi.deployment_backends.kobocat_backend import KobocatDeploymentBackend +from kpi.deployment_backends.openrosa_backend import OpenRosaDeploymentBackend from kpi.models.asset import Asset @@ -91,7 +91,7 @@ def _get_nlp_tracking_data(self, asset, start_date=None): 'total_nlp_asr_seconds': 0, 'total_nlp_mt_characters': 0, } - return KobocatDeploymentBackend.nlp_tracking_data( + return OpenRosaDeploymentBackend.nlp_tracking_data( asset_ids=[asset.id], start_date=start_date ) diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index cf4d22db41..febab500ed 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -534,7 +534,7 @@ def retrieve(self, request, pk, *args, **kwargs): # The `get_submissions()` is a generator in KobocatDeploymentBackend # class but a list in MockDeploymentBackend. We cast the result as a list # no matter what is the deployment back-end class to make it work with - # both. Since the number of submissions is be very small, it should not + # both. Since the number of submissions is very small, it should not # have a big impact on memory (i.e. list vs generator) submissions = list(deployment.get_submissions(**params)) if not submissions: From 06db7915939859cc42e3c12dc0714acc3efe4b25 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 30 Jul 2024 17:34:51 -0400 Subject: [PATCH 014/119] Fix error when calling "delete_submissions" with same params as "bulk_update_submissions" --- kpi/deployment_backends/base_backend.py | 2 +- kpi/deployment_backends/kobocat_backend.py | 4 +++- kpi/deployment_backends/mock_backend.py | 2 +- kpi/deployment_backends/openrosa_backend.py | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index 0eaf773563..d2e6d88ac2 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -217,7 +217,7 @@ def delete_submission( @abc.abstractmethod def delete_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs ) -> dict: pass diff --git a/kpi/deployment_backends/kobocat_backend.py b/kpi/deployment_backends/kobocat_backend.py index 0a6c87a2f3..72093432e6 100644 --- a/kpi/deployment_backends/kobocat_backend.py +++ b/kpi/deployment_backends/kobocat_backend.py @@ -302,7 +302,9 @@ def delete_submission( return self.__prepare_as_drf_response_signature(kc_response) - def delete_submissions(self, data: dict, user: settings.AUTH_USER_MODEL) -> dict: + def delete_submissions( + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs + ) -> dict: """ Bulk delete provided submissions through KoBoCAT proxy, authenticated by `user`'s API token. diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 58e7d64efc..1efbd5e650 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -168,7 +168,7 @@ def delete_submission( } def delete_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs ) -> dict: """ Bulk delete provided submissions authenticated by `user`'s API token. diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 7a4298864e..4bf50c1909 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -227,7 +227,7 @@ def delete_submission( } def delete_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL + self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs ) -> dict: """ Bulk delete provided submissions. From cae6480aa4e7da8ad5ba95b2abc217a9fd4263b0 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 31 Jul 2024 09:43:10 -0400 Subject: [PATCH 015/119] Remove installed app --- kobo/settings/base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/kobo/settings/base.py b/kobo/settings/base.py index a9774c52cc..19b1644a35 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -126,7 +126,6 @@ 'kobo.apps.external_integrations.ExternalIntegrationsAppConfig', 'markdownx', 'kobo.apps.help', - # 'kobo.apps.shadow_model.ShadowModelAppConfig', 'trench', 'kobo.apps.accounts.mfa.apps.MfaAppConfig', 'kobo.apps.languages.LanguageAppConfig', From aa7195f880806b23ed1f7d8560012894105e9c0a Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 31 Jul 2024 11:40:02 -0400 Subject: [PATCH 016/119] Use HookUtils directly when submission comes in --- kobo/apps/hook/utils.py | 5 +++-- kobo/apps/hook/views/v2/hook_signal.py | 2 +- .../openrosa/apps/viewer/models/parsed_instance.py | 14 ++++++++++++-- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/kobo/apps/hook/utils.py b/kobo/apps/hook/utils.py index 55fe5760e0..5bb6e1391c 100644 --- a/kobo/apps/hook/utils.py +++ b/kobo/apps/hook/utils.py @@ -1,4 +1,5 @@ # coding: utf-8 +from .models.hook import Hook from .models.hook_log import HookLog from .tasks import service_definition_task @@ -6,13 +7,13 @@ class HookUtils: @staticmethod - def call_services(asset: 'kpi.models.asset.Asset', submission_id: int): + def call_services(asset_uid: str, submission_id: int) -> bool: """ Delegates to Celery data submission to remote servers """ # Retrieve `Hook` ids, to send data to their respective endpoint. hooks_ids = ( - asset.hooks.filter(active=True) + Hook.objects.filter(asset__uid=asset_uid, active=True) .values_list('id', flat=True) .distinct() ) diff --git a/kobo/apps/hook/views/v2/hook_signal.py b/kobo/apps/hook/views/v2/hook_signal.py index 5bab71ef46..d9150f9fa5 100644 --- a/kobo/apps/hook/views/v2/hook_signal.py +++ b/kobo/apps/hook/views/v2/hook_signal.py @@ -63,7 +63,7 @@ def create(self, request, *args, **kwargs): if not (submission and int(submission['_id']) == submission_id): raise Http404 - if HookUtils.call_services(self.asset, submission_id): + if HookUtils.call_services(self.asset.uid, submission_id): # Follow Open Rosa responses by default response_status_code = status.HTTP_202_ACCEPTED response = { diff --git a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py index 469fae597a..2427ab651e 100644 --- a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py +++ b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py @@ -12,7 +12,6 @@ from kobo.apps.openrosa.apps.api.mongo_helper import MongoHelper from kobo.apps.openrosa.apps.logger.models import Instance from kobo.apps.openrosa.apps.logger.models import Note -from kobo.apps.openrosa.apps.restservice.utils import call_service from kobo.apps.openrosa.libs.utils.common_tags import ( ID, UUID, @@ -27,6 +26,8 @@ ) from kobo.apps.openrosa.libs.utils.decorators import apply_form_field_names from kobo.apps.openrosa.libs.utils.model_tools import queryset_iterator +from kobo.apps.hook.utils import HookUtils +from kpi.utils.log import logging # this is Mongo Collection where we will store the parsed submissions xform_instances = settings.MONGO_DB.instances @@ -371,7 +372,16 @@ def save(self, asynchronous=False, *args, **kwargs): # Rest Services were called before data was saved in DB. success = self.update_mongo(asynchronous) if success and created: - call_service(self) + records = ParsedInstance.objects.filter( + instance_id=self.instance_id + ).values_list('instance__xform__kpi_asset_uid', flat=True) + if not (asset_uid := records[0]): + logging.warning( + f'ParsedInstance #: {self.pk} - XForm is not linked with Asset' + ) + else: + HookUtils.call_services(asset_uid, self.instance_id) + return success def add_note(self, note): From 11a0322906040e77f398d5f204cb581013fd4ede Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 31 Jul 2024 13:45:32 -0400 Subject: [PATCH 017/119] Remove back and forth API calls to activate Rest Services - Remove logger.XForm `has_kpi_hook` field - Remove logger.Instance `posted_to_kpi` field - Remove openrosa.rest_service django app - Remove hook signal viewset --- kobo/apps/hook/tasks.py | 32 +++---- kobo/apps/hook/tests/test_api_hook.py | 36 -------- kobo/apps/hook/tests/test_utils.py | 52 +++++++++++ kobo/apps/hook/views/v1/__init__.py | 1 - kobo/apps/hook/views/v1/hook_signal.py | 33 ------- kobo/apps/hook/views/v2/__init__.py | 1 - kobo/apps/hook/views/v2/hook_signal.py | 84 ------------------ .../api/tests/viewsets/test_xform_viewset.py | 1 - ...as_kpi_hooks_and_instance_posted_to_kpi.py | 27 ++++++ .../openrosa/apps/logger/models/instance.py | 4 - .../apps/openrosa/apps/logger/models/xform.py | 9 -- .../0015_drop_old_restservice_tables.py | 77 ++++++++++++++++ .../apps/restservice/RestServiceInterface.py | 4 - .../openrosa/apps/restservice/__init__.py | 9 -- kobo/apps/openrosa/apps/restservice/app.py | 12 --- .../apps/restservice/management/__init__.py | 1 - .../management/commands/__init__.py | 1 - .../commands/update_kpi_hooks_endpoint.py | 41 --------- .../restservice/migrations/0001_initial.py | 25 ------ ...add_related_name_with_delete_on_cascade.py | 22 ----- .../0003_remove_deprecated_services.py | 17 ---- .../apps/restservice/migrations/__init__.py | 1 - kobo/apps/openrosa/apps/restservice/models.py | 31 ------- .../apps/restservice/services/__init__.py | 2 - .../apps/restservice/services/kpi_hook.py | 43 --------- .../apps/openrosa/apps/restservice/signals.py | 36 -------- kobo/apps/openrosa/apps/restservice/tasks.py | 35 -------- .../restservice/templates/add-service.html | 11 --- .../apps/restservice/tests/__init__.py | 1 - .../restservice/tests/fixtures/dhisform.xls | Bin 6656 -> 0 bytes .../restservice/tests/test_restservice.py | 40 --------- kobo/apps/openrosa/apps/restservice/utils.py | 24 ----- kobo/apps/openrosa/libs/constants.py | 1 - .../libs/serializers/xform_serializer.py | 1 - kobo/settings/base.py | 1 - kpi/deployment_backends/base_backend.py | 4 - kpi/deployment_backends/mock_backend.py | 14 --- kpi/deployment_backends/openrosa_backend.py | 24 +---- kpi/signals.py | 10 --- .../v2/test_api_invalid_password_access.py | 17 ---- kpi/urls/router_api_v1.py | 6 -- kpi/urls/router_api_v2.py | 7 -- 42 files changed, 174 insertions(+), 624 deletions(-) create mode 100644 kobo/apps/hook/tests/test_utils.py delete mode 100644 kobo/apps/hook/views/v1/hook_signal.py delete mode 100644 kobo/apps/hook/views/v2/hook_signal.py create mode 100644 kobo/apps/openrosa/apps/logger/migrations/0035_remove_xform_has_kpi_hooks_and_instance_posted_to_kpi.py create mode 100644 kobo/apps/openrosa/apps/main/migrations/0015_drop_old_restservice_tables.py delete mode 100644 kobo/apps/openrosa/apps/restservice/RestServiceInterface.py delete mode 100644 kobo/apps/openrosa/apps/restservice/__init__.py delete mode 100644 kobo/apps/openrosa/apps/restservice/app.py delete mode 100644 kobo/apps/openrosa/apps/restservice/management/__init__.py delete mode 100644 kobo/apps/openrosa/apps/restservice/management/commands/__init__.py delete mode 100644 kobo/apps/openrosa/apps/restservice/management/commands/update_kpi_hooks_endpoint.py delete mode 100644 kobo/apps/openrosa/apps/restservice/migrations/0001_initial.py delete mode 100644 kobo/apps/openrosa/apps/restservice/migrations/0002_add_related_name_with_delete_on_cascade.py delete mode 100644 kobo/apps/openrosa/apps/restservice/migrations/0003_remove_deprecated_services.py delete mode 100644 kobo/apps/openrosa/apps/restservice/migrations/__init__.py delete mode 100644 kobo/apps/openrosa/apps/restservice/models.py delete mode 100644 kobo/apps/openrosa/apps/restservice/services/__init__.py delete mode 100644 kobo/apps/openrosa/apps/restservice/services/kpi_hook.py delete mode 100644 kobo/apps/openrosa/apps/restservice/signals.py delete mode 100644 kobo/apps/openrosa/apps/restservice/tasks.py delete mode 100644 kobo/apps/openrosa/apps/restservice/templates/add-service.html delete mode 100644 kobo/apps/openrosa/apps/restservice/tests/__init__.py delete mode 100755 kobo/apps/openrosa/apps/restservice/tests/fixtures/dhisform.xls delete mode 100644 kobo/apps/openrosa/apps/restservice/tests/test_restservice.py delete mode 100644 kobo/apps/openrosa/apps/restservice/utils.py diff --git a/kobo/apps/hook/tasks.py b/kobo/apps/hook/tasks.py index 6ff659961f..b1dfbf7a96 100644 --- a/kobo/apps/hook/tasks.py +++ b/kobo/apps/hook/tasks.py @@ -32,7 +32,7 @@ def service_definition_task(self, hook_id, submission_id): hook = Hook.objects.get(id=hook_id) # Use camelcase (even if it's not PEP-8 compliant) # because variable represents the class, not the instance. - ServiceDefinition = hook.get_service_definition() + ServiceDefinition = hook.get_service_definition() # noqa service_definition = ServiceDefinition(hook, submission_id) if not service_definition.send(): # Countdown is in seconds @@ -43,10 +43,7 @@ def service_definition_task(self, hook_id, submission_id): @shared_task -def retry_all_task(hooklogs_ids): - """ - :param list: . - """ +def retry_all_task(hooklogs_ids: int): hook_logs = HookLog.objects.filter(id__in=hooklogs_ids) for hook_log in hook_logs: hook_log.retry() @@ -71,22 +68,24 @@ def failures_reports(): if failures_reports_period_task: last_run_at = failures_reports_period_task.last_run_at - queryset = HookLog.objects.filter(hook__email_notification=True, - status=HOOK_LOG_FAILED) + queryset = HookLog.objects.filter( + hook__email_notification=True, status=HOOK_LOG_FAILED + ) if last_run_at: queryset = queryset.filter(date_modified__gte=last_run_at) - queryset = queryset.order_by('hook__asset__name', - 'hook__uid', - '-date_modified') + queryset = queryset.order_by( + 'hook__asset__name', 'hook__uid', '-date_modified' + ) # PeriodicTask are updated every 3 minutes (default). # It means, if this task interval is less than 3 minutes, some data can be duplicated in emails. # Setting `beat-sync-every` to 1, makes PeriodicTask to be updated before running the task. # So, we need to update it manually. # see: http://docs.celeryproject.org/en/latest/userguide/configuration.html#beat-sync-every - PeriodicTask.objects.filter(task=beat_schedule.get("task")). \ - update(last_run_at=timezone.now()) + PeriodicTask.objects.filter(task=beat_schedule.get('task')).update( + last_run_at=timezone.now() + ) records = {} max_length = 0 @@ -147,9 +146,12 @@ def failures_reports(): text_content = plain_text_template.render(variables) html_content = html_template.render(variables) - msg = EmailMultiAlternatives(translation.gettext('REST Services Failure Report'), text_content, - constance.config.SUPPORT_EMAIL, - [record.get('email')]) + msg = EmailMultiAlternatives( + translation.gettext('REST Services Failure Report'), + text_content, + constance.config.SUPPORT_EMAIL, + [record.get('email')], + ) msg.attach_alternative(html_content, 'text/html') email_messages.append(msg) diff --git a/kobo/apps/hook/tests/test_api_hook.py b/kobo/apps/hook/tests/test_api_hook.py index 511d6486f2..b8fbe5f7f8 100644 --- a/kobo/apps/hook/tests/test_api_hook.py +++ b/kobo/apps/hook/tests/test_api_hook.py @@ -56,42 +56,6 @@ def test_anonymous_access(self): def test_create_hook(self): self._create_hook() - @patch('ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address) - @responses.activate - def test_data_submission(self): - # Create first hook - first_hook = self._create_hook(name="dummy external service", - endpoint="http://dummy.service.local/", - settings={}) - responses.add(responses.POST, first_hook.endpoint, - status=status.HTTP_200_OK, - content_type="application/json") - hook_signal_url = reverse("hook-signal-list", kwargs={"parent_lookup_asset": self.asset.uid}) - - submissions = self.asset.deployment.get_submissions(self.asset.owner) - data = {'submission_id': submissions[0]['_id']} - response = self.client.post(hook_signal_url, data=data, format='json') - self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) - - # Create second hook - second_hook = self._create_hook(name="other dummy external service", - endpoint="http://otherdummy.service.local/", - settings={}) - responses.add(responses.POST, second_hook.endpoint, - status=status.HTTP_200_OK, - content_type="application/json") - - response = self.client.post(hook_signal_url, data=data, format='json') - self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) - - response = self.client.post(hook_signal_url, data=data, format='json') - self.assertEqual(response.status_code, status.HTTP_409_CONFLICT) - - data = {'submission_id': 4} # Instance doesn't belong to `self.asset` - response = self.client.post(hook_signal_url, data=data, format='json') - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - def test_editor_access(self): hook = self._create_hook() diff --git a/kobo/apps/hook/tests/test_utils.py b/kobo/apps/hook/tests/test_utils.py new file mode 100644 index 0000000000..ad198a7b7c --- /dev/null +++ b/kobo/apps/hook/tests/test_utils.py @@ -0,0 +1,52 @@ +import responses +from mock import patch +from rest_framework import status + +from .hook_test_case import HookTestCase, MockSSRFProtect +from ..utils import HookUtils + + +class HookUtilsTestCase(HookTestCase): + + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MockSSRFProtect._get_ip_address + ) + @responses.activate + def test_data_submission(self): + # Create first hook + first_hook = self._create_hook( + name='dummy external service', + endpoint='http://dummy.service.local/', + settings={}, + ) + responses.add( + responses.POST, + first_hook.endpoint, + status=status.HTTP_200_OK, + content_type='application/json', + ) + + submissions = self.asset.deployment.get_submissions(self.asset.owner) + submission_id = submissions[0]['_id'] + assert HookUtils.call_services(self.asset.uid, submission_id) is True + + # Create second hook + second_hook = self._create_hook( + name='other dummy external service', + endpoint='http://otherdummy.service.local/', + settings={}, + ) + responses.add( + responses.POST, + second_hook.endpoint, + status=status.HTTP_200_OK, + content_type='application/json', + ) + # Since second hook hasn't received the submission, `call_services` + # should still return True + assert HookUtils.call_services(self.asset.uid, submission_id) is True + + # But if we try again, it should return False (we cannot send the same + # submission twice to the same external endpoint). + assert HookUtils.call_services(self.asset.uid, submission_id) is False diff --git a/kobo/apps/hook/views/v1/__init__.py b/kobo/apps/hook/views/v1/__init__.py index 66a9504388..c3bb54f968 100644 --- a/kobo/apps/hook/views/v1/__init__.py +++ b/kobo/apps/hook/views/v1/__init__.py @@ -1,4 +1,3 @@ # coding: utf-8 from .hook import HookViewSet from .hook_log import HookLogViewSet -from .hook_signal import HookSignalViewSet diff --git a/kobo/apps/hook/views/v1/hook_signal.py b/kobo/apps/hook/views/v1/hook_signal.py deleted file mode 100644 index 37b0a5c5b3..0000000000 --- a/kobo/apps/hook/views/v1/hook_signal.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding: utf-8 -from kobo.apps.hook.views.v2.hook_signal import HookSignalViewSet as HookSignalViewSetV2 - - -class HookSignalViewSet(HookSignalViewSetV2): - """ - ## This document is for a deprecated version of kpi's API. - - **Please upgrade to latest release `/api/v2/assets/hook-signal/`** - - - This endpoint is only used to trigger asset's hooks if any. - - Tells the hooks to post an instance to external servers. -
-    POST /api/v2/assets/{uid}/hook-signal/
-    
- - - > Example - > - > curl -X POST https://[kpi-url]/assets/aSAvYreNzVEkrWg5Gdcvg/hook-signal/ - - - > **Expected payload** - > - > { - > "submission_id": {integer} - > } - - """ - - pass diff --git a/kobo/apps/hook/views/v2/__init__.py b/kobo/apps/hook/views/v2/__init__.py index 66a9504388..c3bb54f968 100644 --- a/kobo/apps/hook/views/v2/__init__.py +++ b/kobo/apps/hook/views/v2/__init__.py @@ -1,4 +1,3 @@ # coding: utf-8 from .hook import HookViewSet from .hook_log import HookLogViewSet -from .hook_signal import HookSignalViewSet diff --git a/kobo/apps/hook/views/v2/hook_signal.py b/kobo/apps/hook/views/v2/hook_signal.py deleted file mode 100644 index d9150f9fa5..0000000000 --- a/kobo/apps/hook/views/v2/hook_signal.py +++ /dev/null @@ -1,84 +0,0 @@ -# coding: utf-8 -from django.http import Http404 -from django.utils.translation import gettext_lazy as t -from rest_framework import status, viewsets, serializers -from rest_framework.response import Response -from rest_framework.pagination import _positive_int as positive_int -from rest_framework_extensions.mixins import NestedViewSetMixin - - -from kobo.apps.hook.utils import HookUtils -from kpi.models import Asset -from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin - - -class HookSignalViewSet(AssetNestedObjectViewsetMixin, NestedViewSetMixin, - viewsets.ViewSet): - """ - ## - This endpoint is only used to trigger asset's hooks if any. - - Tells the hooks to post an instance to external servers. -
-    POST /api/v2/assets/{uid}/hook-signal/
-    
- - - > Example - > - > curl -X POST https://[kpi-url]/api/v2/assets/aSAvYreNzVEkrWg5Gdcvg/hook-signal/ - - - > **Expected payload** - > - > { - > "submission_id": {integer} - > } - - """ - - parent_model = Asset - - def create(self, request, *args, **kwargs): - """ - It's only used to trigger hook services of the Asset (so far). - - :param request: - :return: - """ - try: - submission_id = positive_int( - request.data.get('submission_id'), strict=True) - except ValueError: - raise serializers.ValidationError( - {'submission_id': t('A positive integer is required.')}) - - # Check if instance really belongs to Asset. - try: - submission = self.asset.deployment.get_submission(submission_id, - request.user) - except ValueError: - raise Http404 - - if not (submission and int(submission['_id']) == submission_id): - raise Http404 - - if HookUtils.call_services(self.asset.uid, submission_id): - # Follow Open Rosa responses by default - response_status_code = status.HTTP_202_ACCEPTED - response = { - "detail": t( - "We got and saved your data, but may not have " - "fully processed it. You should not try to resubmit.") - } - else: - # call_services() refused to launch any task because this - # instance already has a `HookLog` - response_status_code = status.HTTP_409_CONFLICT - response = { - "detail": t( - "Your data for instance {} has been already " - "submitted.".format(submission_id)) - } - - return Response(response, status=response_status_code) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py index 2831e70fb3..cd79a93c39 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py @@ -477,7 +477,6 @@ def test_xform_serializer_none(self): 'instances_with_geopoints': False, 'num_of_submissions': 0, 'attachment_storage_bytes': 0, - 'has_kpi_hooks': False, 'kpi_asset_uid': '', } self.assertEqual(data, XFormSerializer(None).data) diff --git a/kobo/apps/openrosa/apps/logger/migrations/0035_remove_xform_has_kpi_hooks_and_instance_posted_to_kpi.py b/kobo/apps/openrosa/apps/logger/migrations/0035_remove_xform_has_kpi_hooks_and_instance_posted_to_kpi.py new file mode 100644 index 0000000000..b6e48241c8 --- /dev/null +++ b/kobo/apps/openrosa/apps/logger/migrations/0035_remove_xform_has_kpi_hooks_and_instance_posted_to_kpi.py @@ -0,0 +1,27 @@ +# Generated by Django 4.2.11 on 2024-07-31 15:59 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import kobo.apps.openrosa.apps.logger.models.attachment +import kobo.apps.openrosa.apps.logger.models.xform +import kpi.deployment_backends.kc_access.storage + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('logger', '0034_set_require_auth_at_project_level'), + ] + + operations = [ + migrations.RemoveField( + model_name='xform', + name='has_kpi_hooks', + ), + migrations.RemoveField( + model_name='instance', + name='posted_to_kpi', + ), + ] diff --git a/kobo/apps/openrosa/apps/logger/models/instance.py b/kobo/apps/openrosa/apps/logger/models/instance.py index 86a52500f0..7b685d261b 100644 --- a/kobo/apps/openrosa/apps/logger/models/instance.py +++ b/kobo/apps/openrosa/apps/logger/models/instance.py @@ -106,10 +106,6 @@ class Instance(models.Model): # TODO Don't forget to update all records with command `update_is_sync_with_mongo`. is_synced_with_mongo = LazyDefaultBooleanField(default=False) - # If XForm.has_kpi_hooks` is True, this field should be True either. - # It tells whether the instance has been successfully sent to KPI. - posted_to_kpi = LazyDefaultBooleanField(default=False) - class Meta: app_label = 'logger' diff --git a/kobo/apps/openrosa/apps/logger/models/xform.py b/kobo/apps/openrosa/apps/logger/models/xform.py index c7fd168a59..ceb8614082 100644 --- a/kobo/apps/openrosa/apps/logger/models/xform.py +++ b/kobo/apps/openrosa/apps/logger/models/xform.py @@ -96,7 +96,6 @@ class XForm(BaseModel): tags = TaggableManager() - has_kpi_hooks = LazyDefaultBooleanField(default=False) kpi_asset_uid = models.CharField(max_length=32, null=True) pending_delete = models.BooleanField(default=False) @@ -166,14 +165,6 @@ def data_dictionary(self, use_cache: bool = False): def has_instances_with_geopoints(self): return self.instances_with_geopoints - @property - def kpi_hook_service(self): - """ - Returns kpi hook service if it exists. XForm should have only one occurrence in any case. - :return: RestService - """ - return self.restservices.filter(name="kpi_hook").first() - def _set_id_string(self): matches = self.instance_id_regex.findall(self.xml) if len(matches) != 1: diff --git a/kobo/apps/openrosa/apps/main/migrations/0015_drop_old_restservice_tables.py b/kobo/apps/openrosa/apps/main/migrations/0015_drop_old_restservice_tables.py new file mode 100644 index 0000000000..1b58d492d7 --- /dev/null +++ b/kobo/apps/openrosa/apps/main/migrations/0015_drop_old_restservice_tables.py @@ -0,0 +1,77 @@ +# Generated by Django 4.2.11 on 2024-07-31 15:59 + +from django.db import migrations, connections +from django.conf import settings + + +KC_REST_SERVICES_TABLES = [ + 'restservice_restservice', +] + + +def get_operations(): + if settings.TESTING or settings.SKIP_HEAVY_MIGRATIONS: + # Skip this migration if running in test environment or because we want + # to voluntarily skip it. + return [] + + tables = KC_REST_SERVICES_TABLES + operations = [] + + sql = """ + SELECT con.conname + FROM pg_catalog.pg_constraint con + INNER JOIN pg_catalog.pg_class rel + ON rel.oid = con.conrelid + INNER JOIN pg_catalog.pg_namespace nsp + ON nsp.oid = connamespace + WHERE nsp.nspname = 'public' + AND rel.relname = %s; + """ + with connections[settings.OPENROSA_DB_ALIAS].cursor() as cursor: + drop_table_queries = [] + for table in tables: + cursor.execute(sql, [table]) + drop_index_queries = [] + for row in cursor.fetchall(): + if not row[0].endswith('_pkey'): + drop_index_queries.append( + f'ALTER TABLE public.{table} DROP CONSTRAINT {row[0]};' + ) + drop_table_queries.append(f'DROP TABLE IF EXISTS {table};') + operations.append( + migrations.RunSQL( + sql=''.join(drop_index_queries), + reverse_sql=migrations.RunSQL.noop, + ) + ) + + operations.append( + migrations.RunSQL( + sql=''.join(drop_table_queries), + reverse_sql=migrations.RunSQL.noop, + ) + ) + + return operations + + +def print_migration_warning(apps, schema_editor): + if settings.TESTING or settings.SKIP_HEAVY_MIGRATIONS: + return + print( + """ + This migration might take a while. If it is too slow, you may want to + re-run migrations with SKIP_HEAVY_MIGRATIONS=True and apply this one + manually from the django shell. + """ + ) + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0014_drop_old_formdisclaimer_tables'), + ] + + operations = [migrations.RunPython(print_migration_warning), *get_operations()] diff --git a/kobo/apps/openrosa/apps/restservice/RestServiceInterface.py b/kobo/apps/openrosa/apps/restservice/RestServiceInterface.py deleted file mode 100644 index 28495d5a5c..0000000000 --- a/kobo/apps/openrosa/apps/restservice/RestServiceInterface.py +++ /dev/null @@ -1,4 +0,0 @@ -# coding: utf-8 -class RestServiceInterface: - def send(self, url, data=None): - raise NotImplementedError diff --git a/kobo/apps/openrosa/apps/restservice/__init__.py b/kobo/apps/openrosa/apps/restservice/__init__.py deleted file mode 100644 index 7fe25636ee..0000000000 --- a/kobo/apps/openrosa/apps/restservice/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# coding: utf-8 -SERVICE_KPI_HOOK = ("kpi_hook", "KPI Hook POST") - -SERVICE_CHOICES = ( - SERVICE_KPI_HOOK, -) - - -default_app_config = "kobo.apps.openrosa.apps.restservice.app.RestServiceConfig" diff --git a/kobo/apps/openrosa/apps/restservice/app.py b/kobo/apps/openrosa/apps/restservice/app.py deleted file mode 100644 index 32c379ee84..0000000000 --- a/kobo/apps/openrosa/apps/restservice/app.py +++ /dev/null @@ -1,12 +0,0 @@ -# coding: utf-8 -from django.apps import AppConfig - - -class RestServiceConfig(AppConfig): - name = 'kobo.apps.openrosa.apps.restservice' - verbose_name = 'restservice' - - def ready(self): - # Register RestService signals - from . import signals - super().ready() diff --git a/kobo/apps/openrosa/apps/restservice/management/__init__.py b/kobo/apps/openrosa/apps/restservice/management/__init__.py deleted file mode 100644 index 57d631c3f0..0000000000 --- a/kobo/apps/openrosa/apps/restservice/management/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# coding: utf-8 diff --git a/kobo/apps/openrosa/apps/restservice/management/commands/__init__.py b/kobo/apps/openrosa/apps/restservice/management/commands/__init__.py deleted file mode 100644 index 57d631c3f0..0000000000 --- a/kobo/apps/openrosa/apps/restservice/management/commands/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# coding: utf-8 diff --git a/kobo/apps/openrosa/apps/restservice/management/commands/update_kpi_hooks_endpoint.py b/kobo/apps/openrosa/apps/restservice/management/commands/update_kpi_hooks_endpoint.py deleted file mode 100644 index 64a3a79053..0000000000 --- a/kobo/apps/openrosa/apps/restservice/management/commands/update_kpi_hooks_endpoint.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding: utf-8 -from django.core.management.base import BaseCommand - -from kobo.apps.openrosa.apps.restservice.models import RestService - - -class Command(BaseCommand): - """ - A faster method is available with PostgreSQL: - UPDATE restservice_restservice - SET service_url = REGEXP_REPLACE( - service_url, - '/assets/([^/]*)/submissions/', - '/api/v2/assets/\1/hook-signal/' - ) - WHERE service_url LIKE '/assets/%'; - """ - - help = 'Updates KPI rest service endpoint' - - def handle(self, *args, **kwargs): - - rest_services = RestService.objects.filter(name='kpi_hook').all() - for rest_service in rest_services: - service_url = rest_service.service_url - do_save = False - if service_url.endswith('/submissions/'): - service_url = service_url.replace('/submissions/', '/hook-signal/') - rest_service.service_url = service_url - do_save = True - rest_service.save(update_fields=["service_url"]) - - if service_url.startswith('/assets/'): - service_url = service_url.replace('/assets/', '/api/v2/assets/') - rest_service.service_url = service_url - do_save = True - - if do_save: - rest_service.save(update_fields=["service_url"]) - - print('Done!') diff --git a/kobo/apps/openrosa/apps/restservice/migrations/0001_initial.py b/kobo/apps/openrosa/apps/restservice/migrations/0001_initial.py deleted file mode 100644 index 0d68804e6d..0000000000 --- a/kobo/apps/openrosa/apps/restservice/migrations/0001_initial.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('logger', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='RestService', - fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('service_url', models.URLField(verbose_name='Service URL')), - ('name', models.CharField(max_length=50, choices=[('f2dhis2', 'f2dhis2'), ('generic_json', 'JSON POST'), ('generic_xml', 'XML POST'), ('bamboo', 'bamboo')])), - ('xform', models.ForeignKey(to='logger.XForm', on_delete=models.CASCADE)), - ], - ), - migrations.AlterUniqueTogether( - name='restservice', - unique_together=set([('service_url', 'xform', 'name')]), - ), - ] diff --git a/kobo/apps/openrosa/apps/restservice/migrations/0002_add_related_name_with_delete_on_cascade.py b/kobo/apps/openrosa/apps/restservice/migrations/0002_add_related_name_with_delete_on_cascade.py deleted file mode 100644 index c2d6cf46c3..0000000000 --- a/kobo/apps/openrosa/apps/restservice/migrations/0002_add_related_name_with_delete_on_cascade.py +++ /dev/null @@ -1,22 +0,0 @@ -# coding: utf-8 -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('restservice', '0001_initial'), - ] - - operations = [ - migrations.AlterField( - model_name='restservice', - name='name', - field=models.CharField(max_length=50, choices=[('f2dhis2', 'f2dhis2'), ('generic_json', 'JSON POST'), ('generic_xml', 'XML POST'), ('bamboo', 'bamboo'), ('kpi_hook', 'KPI Hook POST')]), - ), - migrations.AlterField( - model_name='restservice', - name='xform', - field=models.ForeignKey(related_name='restservices', to='logger.XForm', on_delete=models.CASCADE), - ), - ] diff --git a/kobo/apps/openrosa/apps/restservice/migrations/0003_remove_deprecated_services.py b/kobo/apps/openrosa/apps/restservice/migrations/0003_remove_deprecated_services.py deleted file mode 100644 index 306e80da8f..0000000000 --- a/kobo/apps/openrosa/apps/restservice/migrations/0003_remove_deprecated_services.py +++ /dev/null @@ -1,17 +0,0 @@ -# coding: utf-8 -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('restservice', '0002_add_related_name_with_delete_on_cascade'), - ] - - operations = [ - migrations.AlterField( - model_name='restservice', - name='name', - field=models.CharField(max_length=50, choices=[('kpi_hook', 'KPI Hook POST')]), - ), - ] diff --git a/kobo/apps/openrosa/apps/restservice/migrations/__init__.py b/kobo/apps/openrosa/apps/restservice/migrations/__init__.py deleted file mode 100644 index 57d631c3f0..0000000000 --- a/kobo/apps/openrosa/apps/restservice/migrations/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# coding: utf-8 diff --git a/kobo/apps/openrosa/apps/restservice/models.py b/kobo/apps/openrosa/apps/restservice/models.py deleted file mode 100644 index f93945a267..0000000000 --- a/kobo/apps/openrosa/apps/restservice/models.py +++ /dev/null @@ -1,31 +0,0 @@ -# coding: utf-8 -from django.db import models -from django.utils.translation import gettext_lazy - -from kobo.apps.openrosa.apps.logger.models.xform import XForm -from kobo.apps.openrosa.apps.restservice import SERVICE_CHOICES - - -class RestService(models.Model): - - class Meta: - app_label = 'restservice' - unique_together = ('service_url', 'xform', 'name') - - service_url = models.URLField(gettext_lazy("Service URL")) - xform = models.ForeignKey(XForm, related_name="restservices", on_delete=models.CASCADE) - name = models.CharField(max_length=50, choices=SERVICE_CHOICES) - - def __str__(self): - return "%s:%s - %s" % (self.xform, self.long_name, self.service_url) - - def get_service_definition(self): - m = __import__(''.join(['kobo.apps.openrosa.apps.restservice.services.', - self.name]), - globals(), locals(), ['ServiceDefinition']) - return m.ServiceDefinition - - @property - def long_name(self): - sv = self.get_service_definition() - return sv.verbose_name diff --git a/kobo/apps/openrosa/apps/restservice/services/__init__.py b/kobo/apps/openrosa/apps/restservice/services/__init__.py deleted file mode 100644 index f6b69c77ea..0000000000 --- a/kobo/apps/openrosa/apps/restservice/services/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# coding: utf-8 -__all__ = ('kpi_hook') diff --git a/kobo/apps/openrosa/apps/restservice/services/kpi_hook.py b/kobo/apps/openrosa/apps/restservice/services/kpi_hook.py deleted file mode 100644 index 4d0f7127fe..0000000000 --- a/kobo/apps/openrosa/apps/restservice/services/kpi_hook.py +++ /dev/null @@ -1,43 +0,0 @@ -# coding: utf-8 -import logging -import re - -import requests -from django.conf import settings -from kobo.apps.openrosa.apps.restservice.RestServiceInterface import RestServiceInterface -from kobo.apps.openrosa.apps.logger.models import Instance - - -class ServiceDefinition(RestServiceInterface): - id = 'kpi_hook' - verbose_name = 'KPI Hook POST' - - def send(self, endpoint, data): - - # Will be used internally by KPI to fetch data with KoBoCatBackend - post_data = { - 'submission_id': data.get('instance_id') - } - headers = {'Content-Type': 'application/json'} - - # Verify if endpoint starts with `/assets/` before sending - # the request to KPI - pattern = r'{}'.format(settings.KPI_HOOK_ENDPOINT_PATTERN.replace( - '{asset_uid}', '[^/]*')) - - # Match v2 and v1 endpoints. - if re.match(pattern, endpoint) or re.match(pattern[7:], endpoint): - # Build the url in the service to avoid saving hardcoded - # domain name in the DB - url = f'{settings.KOBOFORM_INTERNAL_URL}{endpoint}' - response = requests.post(url, headers=headers, json=post_data) - response.raise_for_status() - - # Save successful - Instance.objects.filter(pk=data.get('instance_id')).update( - posted_to_kpi=True - ) - else: - logging.warning( - f'This endpoint: `{endpoint}` is not valid for `KPI Hook`' - ) diff --git a/kobo/apps/openrosa/apps/restservice/signals.py b/kobo/apps/openrosa/apps/restservice/signals.py deleted file mode 100644 index 80ae3b874c..0000000000 --- a/kobo/apps/openrosa/apps/restservice/signals.py +++ /dev/null @@ -1,36 +0,0 @@ -# coding: utf-8 -from django.conf import settings -from django.db.models.signals import post_save -from django.dispatch import receiver - -from kobo.apps.openrosa.apps.restservice import SERVICE_KPI_HOOK -from kobo.apps.openrosa.apps.logger.models import XForm -from kobo.apps.openrosa.apps.restservice.models import RestService - - -@receiver(post_save, sender=XForm) -def save_kpi_hook_service(sender, instance, **kwargs): - """ - Creates/Deletes Kpi hook Rest service related to XForm instance - :param sender: XForm class - :param instance: XForm instance - :param kwargs: dict - """ - kpi_hook_service = instance.kpi_hook_service - if instance.has_kpi_hooks: - # Only register the service if it hasn't been created yet. - if kpi_hook_service is None: - # For retro-compatibility, if `asset_uid` is null, fallback on - # `id_string` - asset_uid = instance.kpi_asset_uid if instance.kpi_asset_uid \ - else instance.id_string - kpi_hook_service = RestService( - service_url=settings.KPI_HOOK_ENDPOINT_PATTERN.format( - asset_uid=asset_uid), - xform=instance, - name=SERVICE_KPI_HOOK[0] - ) - kpi_hook_service.save() - elif kpi_hook_service is not None: - # Only delete the service if it already exists. - kpi_hook_service.delete() diff --git a/kobo/apps/openrosa/apps/restservice/tasks.py b/kobo/apps/openrosa/apps/restservice/tasks.py deleted file mode 100644 index ce67dfd5e2..0000000000 --- a/kobo/apps/openrosa/apps/restservice/tasks.py +++ /dev/null @@ -1,35 +0,0 @@ -# coding: utf-8 -import logging - -from celery import shared_task -from django.conf import settings - -from kobo.apps.openrosa.apps.restservice.models import RestService - - -@shared_task(bind=True) -def service_definition_task(self, rest_service_id, data): - """ - Tries to send data to the endpoint of the hook - It retries 3 times maximum. - - after 2 minutes, - - after 20 minutes, - - after 200 minutes - - :param self: Celery.Task. - :param rest_service_id: RestService primary key. - :param data: dict. - """ - try: - rest_service = RestService.objects.get(pk=rest_service_id) - service = rest_service.get_service_definition()() - service.send(rest_service.service_url, data) - except Exception as e: - logger = logging.getLogger("console_logger") - logger.error("service_definition_task - {}".format(str(e)), exc_info=True) - # Countdown is in seconds - countdown = 120 * (10 ** self.request.retries) - # Max retries is 3 by default. - raise self.retry(countdown=countdown, max_retries=settings.REST_SERVICE_MAX_RETRIES) - - return True diff --git a/kobo/apps/openrosa/apps/restservice/templates/add-service.html b/kobo/apps/openrosa/apps/restservice/templates/add-service.html deleted file mode 100644 index 717082f50e..0000000000 --- a/kobo/apps/openrosa/apps/restservice/templates/add-service.html +++ /dev/null @@ -1,11 +0,0 @@ -{% load i18n %} -{% block content %} -
-

- Please manage REST Services within the new user interface. Go to the - Project Dashboard and navigate to - - Your Project > Settings > REST Services. - -

-{% endblock %} diff --git a/kobo/apps/openrosa/apps/restservice/tests/__init__.py b/kobo/apps/openrosa/apps/restservice/tests/__init__.py deleted file mode 100644 index 57d631c3f0..0000000000 --- a/kobo/apps/openrosa/apps/restservice/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# coding: utf-8 diff --git a/kobo/apps/openrosa/apps/restservice/tests/fixtures/dhisform.xls b/kobo/apps/openrosa/apps/restservice/tests/fixtures/dhisform.xls deleted file mode 100755 index d0b29d74b7c51a8b21223b776b499207ea71d6ed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6656 zcmeHLTWnNS6y5jEOu6ONX?a*c@Cud|T2Md)G05;JkEBS$BNAgQ!^~|vGMyQxw*(T2 zgUX*I8bACYAtd}5jgOe%ha|Rten>PVd}v})^urj93`Sxwz_`{qciPJ^Bh!XxAU!kd zoW1vX?DN?BJnpyO)DNHebnay-3C~DCZWU^!#wRyGPcHId5fe0Jw+e+qQ4De-+($ET z)9WkOFo;DA0bKJ*fCbb7%$wq+_tDTMx?05y?3OfsuGrEpDWq9Bij}TD%!bAI)l^f{ zzh6{S{%&O^^pDQK-R6q<{`u$r&3!xoRPulB`Sky(z%*bwPzT@$kb0m2m;uZLW&yK- zhk!<44loy(2RsbS2NnR205UF+cpTt3R(L7t zCqb|F_%&WSIx`bgxf!Unu!=Py`C{7F-161le;F3>NpMKn#gCA^C>h$u=4diz|5ss? z>O<6^Z4San*Be3HHB31LOqY+9{Je_SEBiySh}dh9@J_PdjG&yzNfj?t@pQz7e6H$z zuOusAnO~t);kCCtHwYOUu>&R~DoH8HMY(&OYsqIJP=%dvZw$ZJ&jJLOVF{;mm+&q;x4I2TZ9eXd5O4Y{C7 zXXGZSbY?D8H~_ygDhGL{cPLGpC*D~qMKr-qk(y6Y2RNTpS zt;YG-IU=<8QD|>X9A>pxZ#ofk`q6JU?M9Pbqm?1rOOZWVmA!Y4u%F3vOlbhw;gQIU zM_mUCk#Vw#bR0ydA}~rf;U*o-R3hGmF=07CZ`rK%U+b|aC#`{+&97oPjaH*=%s^ZsG+2ZBm4JCV1#fwq@%I^$6Z)o<>Eub$pAH)C3h9y4wj4Z^1 zCJvP}KU3O_He<@U1NaYbD5i$&Lrz$ZsbL$^RL`r9DBeU))2vZGfNPIGAAK|ydF93A zHAz3H^fSsbX_BPp(|RRh)Cpx|YGoa*dPGJhP$J_MtyG59xlMM{d!Xy((VT%x8FgsR z1jp7j7r(Oc&)n!xXu^#7>9Q)g-E})uYNzSb1Sdy1gavV#Yx*llG0LP1} z=1g$3s^6p&l$qZS8aA5eNYnzMdYuIFPk^tJV7^7kYx8;)!T5qHlT|&cv^DO$~HlJsiRd|+i7&Vzi&oaw+ zmg(~%t~)~`tXEqBSGrL>SeU16mrobXF^`WJzlGR-3&FyT*+Nif#6r-vNJPatQWm0& z51rvpU>*NzA->AvAq~8u0+^nO0s{s9y z`Rq7OJhcEcj1GXC(|&;4QXJrWHv>?^FE{numGmkg0?1EoFENQQ@/hook-signal/` - response = self.client.post( - reverse( - self._get_endpoint('hook-signal-list'), - kwargs={ - 'format': 'json', - 'parent_lookup_asset': self.asset.uid, - }, - ), - data=data, - **headers, - ) - # return a 202 first time but 409 other attempts. - assert response.status_code != status.HTTP_403_FORBIDDEN diff --git a/kpi/urls/router_api_v1.py b/kpi/urls/router_api_v1.py index 4e53c7ae8c..368101a9e3 100644 --- a/kpi/urls/router_api_v1.py +++ b/kpi/urls/router_api_v1.py @@ -3,7 +3,6 @@ from kobo.apps.hook.views.v1.hook import HookViewSet from kobo.apps.hook.views.v1.hook_log import HookLogViewSet -from kobo.apps.hook.views.v1.hook_signal import HookSignalViewSet from kobo.apps.reports.views import ReportsViewSet from kpi.views.v1 import ( @@ -29,11 +28,6 @@ basename='asset-version', parents_query_lookups=['asset'], ) -asset_routes.register(r'hook-signal', - HookSignalViewSet, - basename='hook-signal', - parents_query_lookups=['asset'], - ) asset_routes.register(r'submissions', SubmissionViewSet, basename='submission', diff --git a/kpi/urls/router_api_v2.py b/kpi/urls/router_api_v2.py index 9883884fc4..7c7dbd2575 100644 --- a/kpi/urls/router_api_v2.py +++ b/kpi/urls/router_api_v2.py @@ -5,7 +5,6 @@ from kobo.apps.audit_log.urls import router as audit_log_router from kobo.apps.hook.views.v2.hook import HookViewSet from kobo.apps.hook.views.v2.hook_log import HookLogViewSet -from kobo.apps.hook.views.v2.hook_signal import HookSignalViewSet from kobo.apps.languages.urls import router as language_router from kobo.apps.organizations.views import OrganizationViewSet from kobo.apps.project_ownership.urls import router as project_ownership_router @@ -104,12 +103,6 @@ def get_urls(self, *args, **kwargs): parents_query_lookups=['asset'], ) -asset_routes.register(r'hook-signal', - HookSignalViewSet, - basename='hook-signal', - parents_query_lookups=['asset'], - ) - asset_routes.register(r'paired-data', PairedDataViewset, basename='paired-data', From 5943b2451f6f585eb4a856a7348d43a16a43eabd Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 1 Aug 2024 12:23:29 -0400 Subject: [PATCH 018/119] Improve celery retries --- kobo/apps/hook/constants.py | 9 + kobo/apps/hook/exceptions.py | 3 + kobo/apps/hook/models/hook_log.py | 65 ++---- .../models/service_definition_interface.py | 214 +++++++++++------- kobo/apps/hook/tasks.py | 34 ++- kobo/apps/hook/tests/test_utils.py | 8 +- kobo/apps/hook/utils.py | 33 --- kobo/apps/hook/utils/__init__.py | 0 kobo/apps/hook/utils/lazy.py | 44 ++++ kobo/apps/hook/utils/services.py | 27 +++ kobo/apps/hook/views/v2/hook.py | 25 +- kobo/apps/hook/views/v2/hook_log.py | 2 +- .../apps/viewer/models/parsed_instance.py | 4 +- kobo/settings/base.py | 2 +- 14 files changed, 272 insertions(+), 198 deletions(-) create mode 100644 kobo/apps/hook/exceptions.py delete mode 100644 kobo/apps/hook/utils.py create mode 100644 kobo/apps/hook/utils/__init__.py create mode 100644 kobo/apps/hook/utils/lazy.py create mode 100644 kobo/apps/hook/utils/services.py diff --git a/kobo/apps/hook/constants.py b/kobo/apps/hook/constants.py index 00998fc333..7b84fa03bb 100644 --- a/kobo/apps/hook/constants.py +++ b/kobo/apps/hook/constants.py @@ -1,5 +1,6 @@ # coding: utf-8 from enum import Enum +from rest_framework import status HOOK_LOG_FAILED = 0 @@ -16,3 +17,11 @@ class HookLogStatus(Enum): KOBO_INTERNAL_ERROR_STATUS_CODE = None SUBMISSION_PLACEHOLDER = '%SUBMISSION%' + +# Status codes that trigger a retry +RETRIABLE_STATUS_CODES = [ + # status.HTTP_429_TOO_MANY_REQUESTS, + status.HTTP_502_BAD_GATEWAY, + status.HTTP_503_SERVICE_UNAVAILABLE, + status.HTTP_504_GATEWAY_TIMEOUT, +] diff --git a/kobo/apps/hook/exceptions.py b/kobo/apps/hook/exceptions.py new file mode 100644 index 0000000000..1997f47c2e --- /dev/null +++ b/kobo/apps/hook/exceptions.py @@ -0,0 +1,3 @@ + +class HookRemoteServerDownError(Exception): + pass diff --git a/kobo/apps/hook/models/hook_log.py b/kobo/apps/hook/models/hook_log.py index 51c1a8a2f9..2cba30815f 100644 --- a/kobo/apps/hook/models/hook_log.py +++ b/kobo/apps/hook/models/hook_log.py @@ -1,4 +1,3 @@ -# coding: utf-8 from datetime import timedelta import constance @@ -17,39 +16,44 @@ class HookLog(models.Model): - hook = models.ForeignKey("Hook", related_name="logs", on_delete=models.CASCADE) + hook = models.ForeignKey( + "Hook", related_name="logs", on_delete=models.CASCADE + ) uid = KpiUidField(uid_prefix="hl") - submission_id = models.IntegerField(default=0, db_index=True) # `KoBoCAT.logger.Instance.id` + submission_id = models.IntegerField( # `KoboCAT.logger.Instance.id` + default=0, db_index=True + ) tries = models.PositiveSmallIntegerField(default=0) status = models.PositiveSmallIntegerField( choices=[[e.value, e.name.title()] for e in HookLogStatus], - default=HookLogStatus.PENDING.value + default=HookLogStatus.PENDING.value, ) # Could use status_code, but will speed-up queries - status_code = models.IntegerField(default=KOBO_INTERNAL_ERROR_STATUS_CODE, null=True, blank=True) + status_code = models.IntegerField( + default=KOBO_INTERNAL_ERROR_STATUS_CODE, null=True, blank=True + ) message = models.TextField(default="") date_created = models.DateTimeField(auto_now_add=True) date_modified = models.DateTimeField(auto_now_add=True) class Meta: - ordering = ["-date_created"] + ordering = ['-date_created'] + @property def can_retry(self) -> bool: """ Return whether instance can be resent to external endpoint. Notice: even if False is returned, `self.retry()` can be triggered. """ if self.hook.active: - seconds = HookLog.get_elapsed_seconds( - constance.config.HOOK_MAX_RETRIES - ) - threshold = timezone.now() - timedelta(seconds=seconds) - # We can retry only if system has already tried 3 times. - # If log is still pending after 3 times, there was an issue, - # we allow the retry - return ( - self.status == HOOK_LOG_FAILED - or (self.date_modified < threshold and self.status == HOOK_LOG_PENDING) - ) + if self.tries >= constance.config.HOOK_MAX_RETRIES: + # If log is still pending after `constance.config.HOOK_MAX_RETRIES` + # times, there was an issue, we allow the retry. + threshold = timezone.now() - timedelta(seconds=120) + + return self.status == HOOK_LOG_FAILED or ( + self.date_modified < threshold + and self.status == HOOK_LOG_PENDING + ) return False @@ -66,29 +70,6 @@ def change_status( self.save(reset_status=True) - @staticmethod - def get_elapsed_seconds(retries_count: int) -> int: - """ - Calculate number of elapsed seconds since first try. - Return the number of seconds. - """ - # We need to sum all seconds between each retry - seconds = 0 - for retries_count in range(retries_count): - # Range is zero-indexed - seconds += HookLog.get_remaining_seconds(retries_count) - - return seconds - - @staticmethod - def get_remaining_seconds(retries_count): - """ - Calculate number of remaining seconds before next retry - :param retries_count: int. - :return: int. Number of seconds - """ - return 60 * (10 ** retries_count) - def retry(self): """ Retries to send data to external service @@ -100,7 +81,7 @@ def retry(self): service_definition.send() self.refresh_from_db() except Exception as e: - logging.error("HookLog.retry - {}".format(str(e)), exc_info=True) + logging.error('HookLog.retry - {}'.format(str(e)), exc_info=True) self.change_status(HOOK_LOG_FAILED) return False @@ -110,7 +91,7 @@ def save(self, *args, **kwargs): # Update date_modified each time object is saved self.date_modified = timezone.now() # We don't want to alter tries when we only change the status - if kwargs.pop("reset_status", False) is False: + if kwargs.pop('reset_status', False) is False: self.tries += 1 self.hook.reset_totals() super().save(*args, **kwargs) diff --git a/kobo/apps/hook/models/service_definition_interface.py b/kobo/apps/hook/models/service_definition_interface.py index e721bf45b7..9b2bd1a095 100644 --- a/kobo/apps/hook/models/service_definition_interface.py +++ b/kobo/apps/hook/models/service_definition_interface.py @@ -15,7 +15,9 @@ HOOK_LOG_SUCCESS, HOOK_LOG_FAILED, KOBO_INTERNAL_ERROR_STATUS_CODE, + RETRIABLE_STATUS_CODES, ) +from ..exceptions import HookRemoteServerDownError class ServiceDefinitionInterface(metaclass=ABCMeta): @@ -41,7 +43,8 @@ def _get_data(self): 'service_json.ServiceDefinition._get_data: ' f'Hook #{self._hook.uid} - Data #{self._submission_id} - ' f'{str(e)}', - exc_info=True) + exc_info=True, + ) return None @abstractmethod @@ -71,106 +74,141 @@ def _prepare_request_kwargs(self): """ pass - def send(self): + def send(self) -> bool: """ - Sends data to external endpoint - :return: bool + Sends data to external endpoint. + + Raise an exception if something is wrong. Retries are only allowed + when `HookRemoteServerDownError` is raised. """ - success = False + if not self._data: + self.save_log( + KOBO_INTERNAL_ERROR_STATUS_CODE, 'Submission has been deleted', allow_retries=False + ) + return False + # Need to declare response before requests.post assignment in case of # RequestException response = None - if self._data: - try: - request_kwargs = self._prepare_request_kwargs() - - # Add custom headers - request_kwargs.get("headers").update( - self._hook.settings.get("custom_headers", {})) - - # Add user agent - public_domain = "- {} ".format(os.getenv("PUBLIC_DOMAIN_NAME")) \ - if os.getenv("PUBLIC_DOMAIN_NAME") else "" - request_kwargs.get("headers").update({ - "User-Agent": "KoboToolbox external service {}#{}".format( - public_domain, - self._hook.uid) - }) - - # If the request needs basic authentication with username and - # password, let's provide them - if self._hook.auth_level == Hook.BASIC_AUTH: - request_kwargs.update({ - "auth": (self._hook.settings.get("username"), - self._hook.settings.get("password")) - }) - - ssrf_protect_options = {} - if constance.config.SSRF_ALLOWED_IP_ADDRESS.strip(): - ssrf_protect_options['allowed_ip_addresses'] = constance.\ - config.SSRF_ALLOWED_IP_ADDRESS.strip().split('\r\n') - - if constance.config.SSRF_DENIED_IP_ADDRESS.strip(): - ssrf_protect_options['denied_ip_addresses'] = constance.\ - config.SSRF_DENIED_IP_ADDRESS.strip().split('\r\n') - - SSRFProtect.validate(self._hook.endpoint, - options=ssrf_protect_options) - - response = requests.post(self._hook.endpoint, timeout=30, - **request_kwargs) - response.raise_for_status() - self.save_log(response.status_code, response.text, True) - success = True - except requests.exceptions.RequestException as e: - # If request fails to communicate with remote server. - # Exception is raised before request.post can return something. - # Thus, response equals None - status_code = KOBO_INTERNAL_ERROR_STATUS_CODE - text = str(e) - if response is not None: - text = response.text - status_code = response.status_code - self.save_log(status_code, text) - except SSRFProtectException as e: - logging.error( - 'service_json.ServiceDefinition.send: ' - f'Hook #{self._hook.uid} - ' - f'Data #{self._submission_id} - ' - f'{str(e)}', - exc_info=True) - self.save_log( - KOBO_INTERNAL_ERROR_STATUS_CODE, - f'{self._hook.endpoint} is not allowed') - except Exception as e: - logging.error( - 'service_json.ServiceDefinition.send: ' - f'Hook #{self._hook.uid} - ' - f'Data #{self._submission_id} - ' - f'{str(e)}', - exc_info=True) - self.save_log( - KOBO_INTERNAL_ERROR_STATUS_CODE, - "An error occurred when sending data to external endpoint") - else: - self.save_log( - KOBO_INTERNAL_ERROR_STATUS_CODE, - 'Submission has been deleted' + try: + request_kwargs = self._prepare_request_kwargs() + + # Add custom headers + request_kwargs.get('headers').update( + self._hook.settings.get('custom_headers', {}) ) - return success + # Add user agent + public_domain = ( + '- {} '.format(os.getenv('PUBLIC_DOMAIN_NAME')) + if os.getenv('PUBLIC_DOMAIN_NAME') + else '' + ) + request_kwargs.get('headers').update( + { + 'User-Agent': 'KoboToolbox external service {}#{}'.format( + public_domain, self._hook.uid + ) + } + ) - def save_log(self, status_code: int, message: str, success: bool = False): + # If the request needs basic authentication with username and + # password, let's provide them + if self._hook.auth_level == Hook.BASIC_AUTH: + request_kwargs.update( + { + 'auth': ( + self._hook.settings.get('username'), + self._hook.settings.get('password'), + ) + } + ) + + ssrf_protect_options = {} + if constance.config.SSRF_ALLOWED_IP_ADDRESS.strip(): + ssrf_protect_options[ + 'allowed_ip_addresses' + ] = constance.config.SSRF_ALLOWED_IP_ADDRESS.strip().split( + '\r\n' + ) + + if constance.config.SSRF_DENIED_IP_ADDRESS.strip(): + ssrf_protect_options[ + 'denied_ip_addresses' + ] = constance.config.SSRF_DENIED_IP_ADDRESS.strip().split( + '\r\n' + ) + + SSRFProtect.validate( + self._hook.endpoint, options=ssrf_protect_options + ) + + response = requests.post( + self._hook.endpoint, timeout=30, **request_kwargs + ) + response.raise_for_status() + self.save_log(response.status_code, response.text, success=True) + + return True + + except requests.exceptions.RequestException as e: + # If request fails to communicate with remote server. + # Exception is raised before request.post can return something. + # Thus, response equals None + status_code = KOBO_INTERNAL_ERROR_STATUS_CODE + text = str(e) + if response is not None: + text = response.text + status_code = response.status_code + + if status_code in RETRIABLE_STATUS_CODES: + self.save_log(status_code, text, allow_retries=True) + raise HookRemoteServerDownError + + self.save_log(status_code, text) + raise + except SSRFProtectException as e: + logging.error( + 'service_json.ServiceDefinition.send: ' + f'Hook #{self._hook.uid} - ' + f'Data #{self._submission_id} - ' + f'{str(e)}', + exc_info=True, + ) + self.save_log( + KOBO_INTERNAL_ERROR_STATUS_CODE, + f'{self._hook.endpoint} is not allowed' + ) + raise + except Exception as e: + logging.error( + 'service_json.ServiceDefinition.send: ' + f'Hook #{self._hook.uid} - ' + f'Data #{self._submission_id} - ' + f'{str(e)}', + exc_info=True, + ) + self.save_log( + KOBO_INTERNAL_ERROR_STATUS_CODE, + 'An error occurred when sending ' + f'data to external endpoint: {str(e)}', + ) + raise + + def save_log( + self, + status_code: int, + message: str, + success: bool = False, + allow_retries: bool = False, + ): """ Updates/creates log entry with: - `status_code` as the HTTP status code of the remote server response - `message` as the content of the remote server response """ - fields = { - 'hook': self._hook, - 'submission_id': self._submission_id - } + fields = {'hook': self._hook, 'submission_id': self._submission_id} try: # Try to load the log with a multiple field FK because # we don't know the log `uid` in this context, but we do know @@ -181,7 +219,7 @@ def save_log(self, status_code: int, message: str, success: bool = False): if success: log.status = HOOK_LOG_SUCCESS - elif log.tries >= constance.config.HOOK_MAX_RETRIES: + elif not allow_retries or log.tries >= constance.config.HOOK_MAX_RETRIES: log.status = HOOK_LOG_FAILED log.status_code = status_code diff --git a/kobo/apps/hook/tasks.py b/kobo/apps/hook/tasks.py index b1dfbf7a96..c87cd21076 100644 --- a/kobo/apps/hook/tasks.py +++ b/kobo/apps/hook/tasks.py @@ -9,37 +9,33 @@ from django.utils import translation, timezone from django_celery_beat.models import PeriodicTask +from kobo.celery import celery_app from kpi.utils.log import logging from .constants import HOOK_LOG_FAILED +from .exceptions import HookRemoteServerDownError from .models import Hook, HookLog - - -@shared_task(bind=True) -def service_definition_task(self, hook_id, submission_id): +from .utils.lazy import LazyMaxRetriesInt + + +@celery_app.task( + autoretry_for=(HookRemoteServerDownError,), + retry_backoff=60, + retry_backoff_max=1200, + max_retries=LazyMaxRetriesInt(), + retry_jitter=True, + queue='kpi_low_priority_queue', +) +def service_definition_task(hook_id: int, submission_id: int) -> bool: """ Tries to send data to the endpoint of the hook It retries n times (n = `constance.config.HOOK_MAX_RETRIES`) - - - after 1 minutes, - - after 10 minutes, - - after 100 minutes - etc ... - - :param self: Celery.Task. - :param hook_id: int. Hook PK - :param submission_id: int. Instance PK """ hook = Hook.objects.get(id=hook_id) # Use camelcase (even if it's not PEP-8 compliant) # because variable represents the class, not the instance. ServiceDefinition = hook.get_service_definition() # noqa service_definition = ServiceDefinition(hook, submission_id) - if not service_definition.send(): - # Countdown is in seconds - countdown = HookLog.get_remaining_seconds(self.request.retries) - raise self.retry(countdown=countdown, max_retries=constance.config.HOOK_MAX_RETRIES) - - return True + return service_definition.send() @shared_task diff --git a/kobo/apps/hook/tests/test_utils.py b/kobo/apps/hook/tests/test_utils.py index ad198a7b7c..94167f1886 100644 --- a/kobo/apps/hook/tests/test_utils.py +++ b/kobo/apps/hook/tests/test_utils.py @@ -3,7 +3,7 @@ from rest_framework import status from .hook_test_case import HookTestCase, MockSSRFProtect -from ..utils import HookUtils +from ..utils.services import call_services class HookUtilsTestCase(HookTestCase): @@ -29,7 +29,7 @@ def test_data_submission(self): submissions = self.asset.deployment.get_submissions(self.asset.owner) submission_id = submissions[0]['_id'] - assert HookUtils.call_services(self.asset.uid, submission_id) is True + assert call_services(self.asset.uid, submission_id) is True # Create second hook second_hook = self._create_hook( @@ -45,8 +45,8 @@ def test_data_submission(self): ) # Since second hook hasn't received the submission, `call_services` # should still return True - assert HookUtils.call_services(self.asset.uid, submission_id) is True + assert call_services(self.asset.uid, submission_id) is True # But if we try again, it should return False (we cannot send the same # submission twice to the same external endpoint). - assert HookUtils.call_services(self.asset.uid, submission_id) is False + assert call_services(self.asset.uid, submission_id) is False diff --git a/kobo/apps/hook/utils.py b/kobo/apps/hook/utils.py deleted file mode 100644 index 5bb6e1391c..0000000000 --- a/kobo/apps/hook/utils.py +++ /dev/null @@ -1,33 +0,0 @@ -# coding: utf-8 -from .models.hook import Hook -from .models.hook_log import HookLog -from .tasks import service_definition_task - - -class HookUtils: - - @staticmethod - def call_services(asset_uid: str, submission_id: int) -> bool: - """ - Delegates to Celery data submission to remote servers - """ - # Retrieve `Hook` ids, to send data to their respective endpoint. - hooks_ids = ( - Hook.objects.filter(asset__uid=asset_uid, active=True) - .values_list('id', flat=True) - .distinct() - ) - # At least, one of the hooks must not have a log that corresponds to - # `submission_id` - # to make success equal True - success = False - for hook_id in hooks_ids: - if not HookLog.objects.filter( - submission_id=submission_id, hook_id=hook_id - ).exists(): - success = True - service_definition_task.apply_async( - queue='kpi_low_priority_queue', args=(hook_id, submission_id) - ) - - return success diff --git a/kobo/apps/hook/utils/__init__.py b/kobo/apps/hook/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/kobo/apps/hook/utils/lazy.py b/kobo/apps/hook/utils/lazy.py new file mode 100644 index 0000000000..58a64c9d1a --- /dev/null +++ b/kobo/apps/hook/utils/lazy.py @@ -0,0 +1,44 @@ +import constance + + +class LazyMaxRetriesInt: + """ + constance settings cannot be used as default parameters of a function. + This wrapper helps to return the value of `constance.config.HOOK_MAX_RETRIES` + on demand. + """ + def __call__(self, *args, **kwargs): + return constance.config.HOOK_MAX_RETRIES + + def __repr__(self): + return str(constance.config.HOOK_MAX_RETRIES) + + def __eq__(self, other): + if isinstance(other, int): + return self() == other + return NotImplemented + + def __ne__(self, other): + if isinstance(other, int): + return self() != other + return NotImplemented + + def __lt__(self, other): + if isinstance(other, int): + return self() < other + return NotImplemented + + def __le__(self, other): + if isinstance(other, int): + return self() <= other + return NotImplemented + + def __gt__(self, other): + if isinstance(other, int): + return self() > other + return NotImplemented + + def __ge__(self, other): + if isinstance(other, int): + return self() >= other + return NotImplemented diff --git a/kobo/apps/hook/utils/services.py b/kobo/apps/hook/utils/services.py new file mode 100644 index 0000000000..f0d05c7ad1 --- /dev/null +++ b/kobo/apps/hook/utils/services.py @@ -0,0 +1,27 @@ +from ..models.hook import Hook +from ..models.hook_log import HookLog +from ..tasks import service_definition_task + + +def call_services(asset_uid: str, submission_id: int) -> bool: + """ + Delegates to Celery data submission to remote servers + """ + # Retrieve `Hook` ids, to send data to their respective endpoint. + hooks_ids = ( + Hook.objects.filter(asset__uid=asset_uid, active=True) + .values_list('id', flat=True) + .distinct() + ) + # At least, one of the hooks must not have a log that corresponds to + # `submission_id` + # to make success equal True + success = False + + for hook_id in hooks_ids: + if not HookLog.objects.filter( + submission_id=submission_id, hook_id=hook_id + ).exists(): + success = True + service_definition_task.delay(hook_id, submission_id) + return success diff --git a/kobo/apps/hook/views/v2/hook.py b/kobo/apps/hook/views/v2/hook.py index 9c4f795b0d..1e2a975868 100644 --- a/kobo/apps/hook/views/v2/hook.py +++ b/kobo/apps/hook/views/v2/hook.py @@ -174,13 +174,20 @@ def retry(self, request, uid=None, *args, **kwargs): response = {"detail": t("Task successfully scheduled")} status_code = status.HTTP_200_OK if hook.active: - seconds = HookLog.get_elapsed_seconds(constance.config.HOOK_MAX_RETRIES) - threshold = timezone.now() - timedelta(seconds=seconds) - - records = hook.logs.filter(Q(date_modified__lte=threshold, - status=HOOK_LOG_PENDING) | - Q(status=HOOK_LOG_FAILED)). \ - values_list("id", "uid").distinct() + threshold = timezone.now() - timedelta(seconds=120) + + records = ( + hook.logs.filter( + Q( + date_modified__lte=threshold, + status=HOOK_LOG_PENDING, + tries__gte=constance.config.HOOK_MAX_RETRIES, + ) + | Q(status=HOOK_LOG_FAILED) + ) + .values_list('id', 'uid') + .distinct() + ) # Prepare lists of ids hooklogs_ids = [] hooklogs_uids = [] @@ -190,7 +197,9 @@ def retry(self, request, uid=None, *args, **kwargs): if len(records) > 0: # Mark all logs as PENDING - HookLog.objects.filter(id__in=hooklogs_ids).update(status=HOOK_LOG_PENDING) + HookLog.objects.filter(id__in=hooklogs_ids).update( + status=HOOK_LOG_PENDING + ) # Delegate to Celery retry_all_task.apply_async( queue='kpi_low_priority_queue', args=(hooklogs_ids,) diff --git a/kobo/apps/hook/views/v2/hook_log.py b/kobo/apps/hook/views/v2/hook_log.py index ab6e1e29c9..049047655c 100644 --- a/kobo/apps/hook/views/v2/hook_log.py +++ b/kobo/apps/hook/views/v2/hook_log.py @@ -108,7 +108,7 @@ def retry(self, request, uid=None, *args, **kwargs): status_code = status.HTTP_200_OK hook_log = self.get_object() - if hook_log.can_retry(): + if hook_log.can_retry: hook_log.change_status() success = hook_log.retry() if success: diff --git a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py index 2427ab651e..4afd88a3f3 100644 --- a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py +++ b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py @@ -26,7 +26,7 @@ ) from kobo.apps.openrosa.libs.utils.decorators import apply_form_field_names from kobo.apps.openrosa.libs.utils.model_tools import queryset_iterator -from kobo.apps.hook.utils import HookUtils +from kobo.apps.hook.utils.services import call_services from kpi.utils.log import logging # this is Mongo Collection where we will store the parsed submissions @@ -380,7 +380,7 @@ def save(self, asynchronous=False, *args, **kwargs): f'ParsedInstance #: {self.pk} - XForm is not linked with Asset' ) else: - HookUtils.call_services(asset_uid, self.instance_id) + call_services(asset_uid, self.instance_id) return success diff --git a/kobo/settings/base.py b/kobo/settings/base.py index b0ba92ff27..715d2badc3 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -1198,7 +1198,7 @@ def dj_stripe_request_callback_method(): # http://docs.celeryproject.org/en/latest/getting-started/brokers/redis.html#redis-visibility-timeout # TODO figure out how to pass `Constance.HOOK_MAX_RETRIES` or `HookLog.get_remaining_seconds() # Otherwise hardcode `HOOK_MAX_RETRIES` in Settings - "visibility_timeout": 60 * (10 ** 3) # Longest ETA for RestService (seconds) + "visibility_timeout": 60 * (10 ** 2) # Longest ETA for RestService (seconds) } CELERY_TASK_DEFAULT_QUEUE = "kpi_queue" From c8996f4beb31cd6e68dffb61766bad7d006c66a7 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 1 Aug 2024 17:55:51 -0400 Subject: [PATCH 019/119] Fix tests --- kobo/apps/hook/models/hook_log.py | 18 ++--- kobo/apps/hook/tests/hook_test_case.py | 48 ++++++++----- kobo/apps/hook/tests/test_api_hook.py | 99 +++++++++++++++++++------- kobo/apps/hook/tests/test_ssrf.py | 12 ++-- kobo/apps/hook/views/v2/hook_log.py | 15 ++-- 5 files changed, 129 insertions(+), 63 deletions(-) diff --git a/kobo/apps/hook/models/hook_log.py b/kobo/apps/hook/models/hook_log.py index 2cba30815f..00ecd429e0 100644 --- a/kobo/apps/hook/models/hook_log.py +++ b/kobo/apps/hook/models/hook_log.py @@ -45,15 +45,15 @@ def can_retry(self) -> bool: Notice: even if False is returned, `self.retry()` can be triggered. """ if self.hook.active: - if self.tries >= constance.config.HOOK_MAX_RETRIES: - # If log is still pending after `constance.config.HOOK_MAX_RETRIES` - # times, there was an issue, we allow the retry. - threshold = timezone.now() - timedelta(seconds=120) - - return self.status == HOOK_LOG_FAILED or ( - self.date_modified < threshold - and self.status == HOOK_LOG_PENDING - ) + # If log is still pending after `constance.config.HOOK_MAX_RETRIES` + # times, there was an issue, we allow the retry. + threshold = timezone.now() - timedelta(seconds=120) + + return self.status == HOOK_LOG_FAILED or ( + self.date_modified < threshold + and self.status == HOOK_LOG_PENDING + and self.tries >= constance.config.HOOK_MAX_RETRIES + ) return False diff --git a/kobo/apps/hook/tests/hook_test_case.py b/kobo/apps/hook/tests/hook_test_case.py index b4ea20658e..c5c31d420a 100644 --- a/kobo/apps/hook/tests/hook_test_case.py +++ b/kobo/apps/hook/tests/hook_test_case.py @@ -1,6 +1,7 @@ # coding: utf-8 import json +import pytest import responses from django.conf import settings from django.urls import reverse @@ -11,6 +12,7 @@ from kpi.exceptions import BadFormatException from kpi.tests.kpi_test_case import KpiTestCase from ..constants import HOOK_LOG_FAILED +from ..exceptions import HookRemoteServerDownError from ..models import HookLog, Hook @@ -94,26 +96,45 @@ def _send_and_fail(self): :return: dict """ + first_hooklog_response = self._send_and_wait_for_retry() + + # Fakes Celery n retries by forcing status to `failed` + # (where n is `settings.HOOKLOG_MAX_RETRIES`) + first_hooklog = HookLog.objects.get( + uid=first_hooklog_response.get('uid') + ) + first_hooklog.change_status(HOOK_LOG_FAILED) + + return first_hooklog_response + + def _send_and_wait_for_retry(self): self.hook = self._create_hook() ServiceDefinition = self.hook.get_service_definition() submissions = self.asset.deployment.get_submissions(self.asset.owner) submission_id = submissions[0]['_id'] service_definition = ServiceDefinition(self.hook, submission_id) - first_mock_response = {'error': 'not found'} + first_mock_response = {'error': 'gateway timeout'} # Mock first request's try - responses.add(responses.POST, self.hook.endpoint, - json=first_mock_response, status=status.HTTP_404_NOT_FOUND) + responses.add( + responses.POST, + self.hook.endpoint, + json=first_mock_response, + status=status.HTTP_504_GATEWAY_TIMEOUT, + ) # Mock next requests' tries - responses.add(responses.POST, self.hook.endpoint, - status=status.HTTP_200_OK, - content_type='application/json') + responses.add( + responses.POST, + self.hook.endpoint, + status=status.HTTP_200_OK, + content_type='application/json', + ) # Try to send data to external endpoint - success = service_definition.send() - self.assertFalse(success) + with pytest.raises(HookRemoteServerDownError): + service_definition.send() # Retrieve the corresponding log url = reverse('hook-log-list', kwargs={ @@ -126,20 +147,13 @@ def _send_and_fail(self): # Result should match first try self.assertEqual( - first_hooklog_response.get('status_code'), status.HTTP_404_NOT_FOUND + first_hooklog_response.get('status_code'), + status.HTTP_504_GATEWAY_TIMEOUT, ) self.assertEqual( json.loads(first_hooklog_response.get('message')), first_mock_response, ) - - # Fakes Celery n retries by forcing status to `failed` - # (where n is `settings.HOOKLOG_MAX_RETRIES`) - first_hooklog = HookLog.objects.get( - uid=first_hooklog_response.get('uid') - ) - first_hooklog.change_status(HOOK_LOG_FAILED) - return first_hooklog_response def __prepare_submission(self): diff --git a/kobo/apps/hook/tests/test_api_hook.py b/kobo/apps/hook/tests/test_api_hook.py index b8fbe5f7f8..b5a62c1623 100644 --- a/kobo/apps/hook/tests/test_api_hook.py +++ b/kobo/apps/hook/tests/test_api_hook.py @@ -1,6 +1,7 @@ # coding: utf-8 import json +import pytest import responses from constance.test import override_config from django.urls import reverse @@ -22,6 +23,7 @@ ) from kpi.utils.datetime import several_minutes_from_now from .hook_test_case import HookTestCase, MockSSRFProtect +from ..exceptions import HookRemoteServerDownError class ApiHookTestCase(HookTestCase): @@ -169,18 +171,20 @@ def test_partial_update_hook(self): self.assertFalse(hook.active) self.assertEqual(hook.name, "some disabled external service") - @patch('ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address) + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MockSSRFProtect._get_ip_address + ) @responses.activate def test_send_and_retry(self): first_log_response = self._send_and_fail() # Let's retry through API call - retry_url = reverse("hook-log-retry", kwargs={ - "parent_lookup_asset": self.asset.uid, - "parent_lookup_hook": self.hook.uid, - "uid": first_log_response.get("uid") + retry_url = reverse('hook-log-retry', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': self.hook.uid, + 'uid': first_log_response.get('uid') }) # It should be a success @@ -188,17 +192,49 @@ def test_send_and_retry(self): self.assertEqual(response.status_code, status.HTTP_200_OK) # Let's check if logs has 2 tries - detail_url = reverse("hook-log-detail", kwargs={ - "parent_lookup_asset": self.asset.uid, - "parent_lookup_hook": self.hook.uid, - "uid": first_log_response.get("uid") + detail_url = reverse('hook-log-detail', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': self.hook.uid, + 'uid': first_log_response.get('uid') }) response = self.client.get(detail_url, format=SUBMISSION_FORMAT_TYPE_JSON) - self.assertEqual(response.data.get("tries"), 2) + self.assertEqual(response.data.get('tries'), 2) - @patch('ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address) + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MockSSRFProtect._get_ip_address + ) + @responses.activate + def test_send_and_cannot_retry(self): + + first_log_response = self._send_and_wait_for_retry() + + # Let's retry through API call + retry_url = reverse('hook-log-retry', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': self.hook.uid, + 'uid': first_log_response.get('uid') + }) + + # It should be a failure. The hook log is going to be retried + response = self.client.patch(retry_url, format=SUBMISSION_FORMAT_TYPE_JSON) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + + # Let's check if logs has 2 tries + detail_url = reverse('hook-log-detail', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': self.hook.uid, + 'uid': first_log_response.get('uid') + }) + + response = self.client.get(detail_url, format=SUBMISSION_FORMAT_TYPE_JSON) + self.assertEqual(response.data.get('tries'), 1) + + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MockSSRFProtect._get_ip_address + ) @responses.activate def test_payload_template(self): @@ -321,12 +357,17 @@ def test_hook_log_filter_success(self): @responses.activate def test_hook_log_filter_failure(self): # Create failing hook - hook = self._create_hook(name="failing hook", - endpoint="http://failing.service.local/", - settings={}) - responses.add(responses.POST, hook.endpoint, - status=status.HTTP_500_INTERNAL_SERVER_ERROR, - content_type="application/json") + hook = self._create_hook( + name='failing hook', + endpoint='http://failing.service.local/', + settings={}, + ) + responses.add( + responses.POST, + hook.endpoint, + status=status.HTTP_504_GATEWAY_TIMEOUT, + content_type="application/json", + ) # simulate a submission ServiceDefinition = hook.get_service_definition() @@ -334,8 +375,8 @@ def test_hook_log_filter_failure(self): submission_id = submissions[0]['_id'] service_definition = ServiceDefinition(hook, submission_id) - success = service_definition.send() - self.assertFalse(success) + with pytest.raises(HookRemoteServerDownError): + service_definition.send() # Get log for the failing hook hook_log_url = reverse('hook-log-list', kwargs={ @@ -344,18 +385,24 @@ def test_hook_log_filter_failure(self): }) # There should be no success log for the failing hook - response = self.client.get(f'{hook_log_url}?status={HOOK_LOG_SUCCESS}', format='json') + response = self.client.get( + f'{hook_log_url}?status={HOOK_LOG_SUCCESS}', format='json' + ) self.assertEqual(response.data.get('count'), 0) # There should be a pending log for the failing hook - response = self.client.get(f'{hook_log_url}?status={HOOK_LOG_PENDING}', format='json') + response = self.client.get( + f'{hook_log_url}?status={HOOK_LOG_PENDING}', format='json' + ) self.assertEqual(response.data.get('count'), 1) def test_hook_log_filter_validation(self): # Create hook - hook = self._create_hook(name="success hook", - endpoint="http://hook.service.local/", - settings={}) + hook = self._create_hook( + name='success hook', + endpoint='http://hook.service.local/', + settings={}, + ) # Get log for the success hook hook_log_url = reverse('hook-log-list', kwargs={ diff --git a/kobo/apps/hook/tests/test_ssrf.py b/kobo/apps/hook/tests/test_ssrf.py index 89a71f4d4c..3d28e4a766 100644 --- a/kobo/apps/hook/tests/test_ssrf.py +++ b/kobo/apps/hook/tests/test_ssrf.py @@ -1,12 +1,13 @@ -# coding: utf-8 +import pytest import responses from constance.test import override_config from mock import patch from rest_framework import status +from ssrf_protect.exceptions import SSRFProtectException from kobo.apps.hook.constants import ( - HOOK_LOG_PENDING, + HOOK_LOG_FAILED, KOBO_INTERNAL_ERROR_STATUS_CODE ) from .hook_test_case import HookTestCase, MockSSRFProtect @@ -34,9 +35,10 @@ def test_send_with_ssrf_options(self): content_type='application/json') # Try to send data to external endpoint - success = service_definition.send() - self.assertFalse(success) + with pytest.raises(SSRFProtectException): + service_definition.send() + hook_log = hook.logs.all()[0] self.assertEqual(hook_log.status_code, KOBO_INTERNAL_ERROR_STATUS_CODE) - self.assertEqual(hook_log.status, HOOK_LOG_PENDING) + self.assertEqual(hook_log.status, HOOK_LOG_FAILED) self.assertTrue('is not allowed' in hook_log.message) diff --git a/kobo/apps/hook/views/v2/hook_log.py b/kobo/apps/hook/views/v2/hook_log.py index 049047655c..6b5a8874c1 100644 --- a/kobo/apps/hook/views/v2/hook_log.py +++ b/kobo/apps/hook/views/v2/hook_log.py @@ -114,15 +114,18 @@ def retry(self, request, uid=None, *args, **kwargs): if success: # Return status_code of remote server too. # `response["status_code"]` is not the same as `status_code` - response["detail"] = hook_log.message - response["status_code"] = hook_log.status_code + response['detail'] = hook_log.message + response['status_code'] = hook_log.status_code else: - response["detail"] = t( - "An error has occurred when sending the data. Please try again later.") + response['detail'] = t( + 'An error has occurred when sending the data. ' + 'Please try again later.' + ) status_code = status.HTTP_500_INTERNAL_SERVER_ERROR else: - response["detail"] = t( - "Data is being or has already been processed") + response['detail'] = t( + 'Data is being or has already been processed' + ) status_code = status.HTTP_400_BAD_REQUEST return Response(response, status=status_code) From 72ea39046eb225be909f18c0da1ef4234601f413 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 31 Jul 2024 19:10:15 -0400 Subject: [PATCH 020/119] Remove kobo_service_account dependency --- dependencies/pip/dev_requirements.txt | 5 - dependencies/pip/requirements.in | 3 - dependencies/pip/requirements.txt | 5 - kobo/apps/openrosa/apps/api/permissions.py | 28 +-- .../tests/viewsets/test_abstract_viewset.py | 87 ++++--- .../tests/viewsets/test_attachment_viewset.py | 24 +- .../api/tests/viewsets/test_data_viewset.py | 223 +---------------- .../tests/viewsets/test_metadata_viewset.py | 30 --- .../apps/api/tests/viewsets/test_user.py | 21 -- .../viewsets/test_xform_submission_api.py | 78 +----- .../api/tests/viewsets/test_xform_viewset.py | 228 ++---------------- kobo/apps/openrosa/apps/api/tools.py | 7 +- .../apps/api/viewsets/connect_viewset.py | 9 +- .../apps/api/viewsets/data_viewset.py | 12 +- .../apps/api/viewsets/xform_submission_api.py | 5 +- .../apps/api/viewsets/xform_viewset.py | 12 +- kobo/apps/openrosa/apps/logger/app.py | 4 - .../apps/openrosa/apps/logger/models/xform.py | 4 +- .../apps/logger/tests/test_parsing.py | 8 +- .../apps/logger/tests/test_publish_xls.py | 40 ++- .../openrosa/apps/main/tests/test_base.py | 41 +--- .../apps/main/tests/test_past_bugs.py | 47 ++-- .../openrosa/apps/main/tests/test_process.py | 49 ++-- .../test_user_id_string_unique_together.py | 26 +- .../apps/viewer/models/parsed_instance.py | 7 +- kobo/apps/openrosa/libs/filters.py | 7 +- kobo/apps/openrosa/libs/models/base_model.py | 17 -- .../tests/mixins/make_submission_mixin.py | 99 +++++--- kobo/apps/openrosa/libs/utils/logger_tools.py | 5 +- kobo/apps/openrosa/libs/utils/middleware.py | 4 - kobo/settings/base.py | 2 - kpi/deployment_backends/openrosa_backend.py | 5 +- kpi/permissions.py | 3 +- 33 files changed, 295 insertions(+), 850 deletions(-) delete mode 100644 kobo/apps/openrosa/libs/models/base_model.py diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index 2b8d3377ad..4030660476 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -10,8 +10,6 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/kobo-service-account.git@cb52c6221b68af9b13237d0a1157e3f1965a82b1#egg=kobo-service-account - # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in -e git+https://github.com/kobotoolbox/ssrf-protect@9b97d3f0fd8f737a38dd7a6b64efeffc03ab3cdd#egg=ssrf_protect @@ -160,7 +158,6 @@ django==4.2.11 # django-timezone-field # djangorestframework # jsonfield - # kobo-service-account # model-bakery django-allauth==0.61.1 # via -r dependencies/pip/requirements.in @@ -227,7 +224,6 @@ djangorestframework==3.15.1 # -r dependencies/pip/requirements.in # djangorestframework-csv # drf-extensions - # kobo-service-account djangorestframework-csv==3.0.2 # via -r dependencies/pip/requirements.in djangorestframework-jsonp==1.0.2 @@ -505,7 +501,6 @@ redis==5.0.3 # celery # django-redis # django-redis-sessions - # kobo-service-account referencing==0.34.0 # via # jsonschema diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index c6183bcded..978aa509e7 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -4,9 +4,6 @@ # formpack -e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack -# service-account --e git+https://github.com/kobotoolbox/kobo-service-account.git@cb52c6221b68af9b13237d0a1157e3f1965a82b1#egg=kobo-service-account - # More up-to-date version of django-digest than PyPI seems to have. # Also, python-digest is an unlisted dependency thereof. -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index 22a6b99dcd..4c2eafb4c8 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -10,8 +10,6 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/kobo-service-account.git@cb52c6221b68af9b13237d0a1157e3f1965a82b1#egg=kobo-service-account - # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in -e git+https://github.com/kobotoolbox/ssrf-protect@9b97d3f0fd8f737a38dd7a6b64efeffc03ab3cdd#egg=ssrf_protect @@ -138,7 +136,6 @@ django==4.2.11 # django-timezone-field # djangorestframework # jsonfield - # kobo-service-account django-allauth==0.61.1 # via -r dependencies/pip/requirements.in django-amazon-ses==4.0.1 @@ -204,7 +201,6 @@ djangorestframework==3.15.1 # -r dependencies/pip/requirements.in # djangorestframework-csv # drf-extensions - # kobo-service-account djangorestframework-csv==3.0.2 # via -r dependencies/pip/requirements.in djangorestframework-jsonp==1.0.2 @@ -420,7 +416,6 @@ redis==5.0.3 # celery # django-redis # django-redis-sessions - # kobo-service-account referencing==0.34.0 # via # jsonschema diff --git a/kobo/apps/openrosa/apps/api/permissions.py b/kobo/apps/openrosa/apps/api/permissions.py index 1523a6ada6..e7ea0771aa 100644 --- a/kobo/apps/openrosa/apps/api/permissions.py +++ b/kobo/apps/openrosa/apps/api/permissions.py @@ -1,6 +1,5 @@ # coding: utf-8 from django.http import Http404 -from kobo_service_account.models import ServiceAccountUser from rest_framework.permissions import ( BasePermission, DjangoObjectPermissions, @@ -90,7 +89,6 @@ def has_permission(self, request, view): request.method not in SAFE_METHODS and view.action and view.action in ['create', 'update', 'partial_update', 'destroy'] - and not isinstance(request.user, ServiceAccountUser) ): raise LegacyAPIException @@ -174,23 +172,14 @@ def has_object_permission(self, request, view, obj): except KeyError: pass else: - # Only service account is allowed to bulk delete submissions. - # Even KoBoCAT superusers are not allowed - if ( - view.action == 'bulk_delete' - and not isinstance(user, ServiceAccountUser) - ): - # return False + # Deleting submissions is not allowed anymore with KoboCAT API + if view.action == 'bulk_delete': raise LegacyAPIException return user.has_perms(required_perms, obj) - # Only service account is allowed to delete submissions. - # Even KoBoCAT superusers are not allowed - if ( - view.action == 'destroy' - and not isinstance(user, ServiceAccountUser) - ): + # Deleting submissions in not allowed anymore with KoboCAT API + if view.action == 'destroy': raise LegacyAPIException return super().has_object_permission(request, view, obj) @@ -205,7 +194,7 @@ def has_object_permission(self, request, view, obj): user = request.user required_perms = [f'logger.{CAN_CHANGE_XFORM}'] - # Grant access if user is owner or super user + # Grant access if user is owner or superuser if user.is_superuser or user == obj.user: return True @@ -370,11 +359,8 @@ class UserDeletePermission(BasePermission): perms_map = {} def has_permission(self, request, view): - if not isinstance(request.user, ServiceAccountUser): - # Do not reveal user's existence - raise Http404 - - return True + # Do not reveal user's existence + raise Http404 def has_object_permission(self, request, view, obj): # Always return True because it must pass `has_permission()` first diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py index cfd27d2989..6632ae1a9d 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py @@ -1,15 +1,16 @@ # coding: utf-8 import os +from typing import Union from django.conf import settings from django.contrib.auth.models import ( AnonymousUser, Permission, ) +from django.core.files.base import ContentFile from django.test import TestCase -from django.test.client import Client from django_digest.test import DigestAuth -from kobo_service_account.utils import get_request_headers +from rest_framework import status from rest_framework.reverse import reverse from rest_framework.test import APIRequestFactory @@ -20,6 +21,7 @@ from kobo.apps.openrosa.apps.main.models import UserProfile, MetaData from kobo.apps.openrosa.libs.tests.mixins.make_submission_mixin import MakeSubmissionMixin from kobo.apps.openrosa.libs.tests.mixins.request_mixin import RequestMixin +from kobo.apps.openrosa.libs.utils import logger_tools class TestAbstractViewSet(RequestMixin, MakeSubmissionMixin, TestCase): @@ -52,8 +54,14 @@ def setUp(self): self.maxDiff = None def publish_xls_form( - self, path=None, data=None, assert_=True, use_service_account=True + self, path=None, data=None, assert_creation=True, use_api=False ): + # KoboCAT (v1) API does not allow project creation anymore. + # Only KPI API allows that. The project can be only added to KoboCAT + # during deployment. Thus, this method will create the XForm object directly + # without an API call except if `use_api` is True. + + # In unit tests, if we need to test the result of the (KoboCAT API), if not data: data = { 'owner': self.user.username, @@ -77,38 +85,36 @@ def publish_xls_form( 'transportation.xls', ) - xform_list_url = reverse('xform-list') + if use_api is True: + xform_list_url = reverse('xform-list') + with open(path, 'rb') as xls_file: + post_data = {'xls_file': xls_file} + response = self.client.post( + xform_list_url, data=post_data, **self.extra + ) - if use_service_account: - # Only service account user is allowed to `POST` to XForm API - client = Client() - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - else: - # For test purposes we want to try to `POST` with current logged-in - # user - client = self.client - service_account_meta = self.extra - - with open(path, 'rb') as xls_file: - post_data = {'xls_file': xls_file} - response = client.post( - xform_list_url, data=post_data, **service_account_meta - ) + if assert_creation is False: + return response - if not assert_: - return response + self.assertEqual(response.status_code, 201) + self.xform = XForm.objects.all().order_by('pk').reverse()[0] + data.update({ + 'url': f'http://testserver/api/v1/forms/{self.xform.pk}' + }) + self.assertEqual(dict(response.data, **data), response.data) + self.form_data = response.data + else: + with open(path, 'rb') as f: + xls_file = ContentFile(f.read(), name=f'transportation.xls') - self.assertEqual(response.status_code, 201) - self.xform = XForm.objects.all().order_by('pk').reverse()[0] - data.update({ - 'url': f'http://testserver/api/v1/forms/{self.xform.pk}' - }) + self.xform = logger_tools.publish_xls_form(xls_file, self.user) + response = self.client.get( + reverse('xform-detail', kwargs={'pk': self.xform.pk}) + ) - self.assertEqual(dict(response.data, **data), response.data) - self.form_data = response.data + if assert_creation is True: + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.form_data = response.data def user_profile_data(self): return { @@ -184,15 +190,16 @@ def _login_user_and_profile(self, extra_post_data={}): def _make_submission( self, - path, - username=None, - add_uuid=False, - forced_submission_time=None, - auth=None, - media_file=None, - use_service_account=False, + path: str, + username: str = None, + add_uuid: bool = False, + forced_submission_time: bool = None, + auth: Union[DigestAuth, bool] = None, + media_file: 'io.BufferedReader' = None, + assert_success: bool = True, + use_api: bool = True, ): - if auth is None and not use_service_account: + if auth is None: auth = DigestAuth( self.profile_data['username'], self.profile_data['password1'] ) @@ -204,7 +211,7 @@ def _make_submission( forced_submission_time, auth, media_file, - use_service_account, + use_api, ) def _make_submissions(self, username=None): diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py index a705237179..7e874b9858 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py @@ -1,10 +1,7 @@ # coding: utf-8 import os -import pytest from django.conf import settings -from kobo_service_account.utils import get_request_headers -from rest_framework import status from rest_framework.reverse import reverse from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import ( @@ -81,23 +78,6 @@ def _retrieve_view(self, auth_headers): def test_retrieve_view(self): self._retrieve_view(self.extra) - def test_retrieve_view_with_service_account(self): - extra = {'HTTP_AUTHORIZATION': f'Token {self.alice.auth_token}'} - # Alice cannot view bob's attachment and should receive a 404. - # The first assertion is `response.status_code == 200`, thus it should - # raise an error - assertion_pattern = ( - f'{status.HTTP_404_NOT_FOUND} != {status.HTTP_200_OK}' - ) - with pytest.raises(AssertionError, match=assertion_pattern) as e: - self._retrieve_view(extra) - - # Try the same request with service account user on behalf of alice - extra = self.get_meta_from_headers(get_request_headers(self.alice.username)) - # Test server does not provide `host` header - extra['HTTP_HOST'] = settings.TEST_HTTP_HOST - self._retrieve_view(extra) - def test_list_view(self): self._submit_transport_instance_w_attachment() @@ -323,13 +303,13 @@ def test_update_attachment_on_edit(self): 'transport_with_attachment', 'IMG_2235.JPG' ) + # Edit are only allowed with service account with open(media_file_path, 'rb') as media_file: self._make_submission( xml_path, media_file=media_file, - auth=False, - use_service_account=True, + use_api=False, ) # Validate counters are up-to-date and instances count is still one. diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py index f952087543..1be75afba9 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py @@ -1,10 +1,8 @@ # coding: utf-8 import requests -from django.conf import settings from django.test import RequestFactory from kobo.apps.openrosa.libs.utils.guardian import assign_perm, remove_perm -from kobo_service_account.utils import get_request_headers from rest_framework import status from kobo.apps.openrosa.apps.api.viewsets.data_viewset import DataViewSet @@ -93,57 +91,6 @@ def test_data(self): self.assertEqual(dict(response.data, **data), response.data) - def test_data_with_service_account(self): - self._make_submissions() - view = DataViewSet.as_view({'get': 'list'}) - - # Access the list endpoint as Bob. - request = self.factory.get('/', **self.extra) - response = view(request) - self.assertEqual(response.status_code, status.HTTP_200_OK) - formid = self.xform.pk - data = _data_list(formid) - self.assertEqual(response.data, data) - - # Access the data endpoint as Bob; reinitialize `request` since it has - # already been consumed within the previous block - request = self.factory.get('/', **self.extra) - response = view(request, pk=formid) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIsInstance(response.data, list) - self.assertTrue(self.xform.instances.count()) - - # Alice cannot see Bob's data - self._create_user_and_login(username='alice', password='alice') - self.extra = { - 'HTTP_AUTHORIZATION': 'Token %s' % self.user.auth_token} - request = self.factory.get('/', **self.extra) - response = view(request, pk=formid) - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - - # Try the same request with service account user on behalf of alice - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - # Test server does not provide `host` header - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - request = self.factory.get('/', **service_account_meta) - response = view(request, pk=formid) - self.assertEqual(response.status_code, status.HTTP_200_OK) - - dataid = self.xform.instances.all().order_by('id')[0].pk - data = _data_instance(dataid) - response_first_element = sorted(response.data, key=lambda x: x['_id'])[0] - self.assertEqual(dict(response_first_element, **data), - response_first_element) - - view = DataViewSet.as_view({'get': 'retrieve'}) - response = view(request, pk=formid, dataid=dataid) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertIsInstance(response.data, dict) - self.assertEqual(dict(response.data, **data), - response.data) - def test_data_anon(self): self._make_submissions() view = DataViewSet.as_view({'get': 'list'}) @@ -509,7 +456,7 @@ def test_delete_submission(self): count = self.xform.instances.all().count() self.assertEqual(before_count - 1, count) - def test_cannot_delete_submission_as_granted_user_but_not_service_account(self): + def test_cannot_delete_submission_as_granted_user(self): self._make_submissions() before_count = self.xform.instances.all().count() view = DataViewSet.as_view({'delete': 'destroy'}) @@ -542,39 +489,7 @@ def test_cannot_delete_submission_as_granted_user_but_not_service_account(self): status_code=status.HTTP_405_METHOD_NOT_ALLOWED, ) - def test_delete_submission_with_service_account(self): - self._make_submissions() - before_count = self.xform.instances.all().count() - view = DataViewSet.as_view({'delete': 'destroy'}) - formid = self.xform.pk - - self._create_user_and_login(username='alice', password='alice') - # `self.user` is now alice - # Give Alice some permissions but not to delete submissions. - assign_perm(CAN_VIEW_XFORM, self.user, self.xform) - assign_perm(CAN_CHANGE_XFORM, self.user, self.xform) - self.extra = {'HTTP_AUTHORIZATION': f'Token {self.user.auth_token}'} - request = self.factory.delete('/', **self.extra) - dataid = self.xform.instances.all().order_by('id')[0].pk - response = view(request, pk=formid, dataid=dataid) - - # Alice cannot delete submissions without `CAN_DELETE_DATA_XFORM` - self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) - - # Try the same request with service account user on behalf of alice - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - # Test server does not provide `host` header - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - - request = self.factory.delete('/', **service_account_meta) - response = view(request, pk=formid, dataid=dataid) - self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - count = self.xform.instances.all().count() - self.assertEqual(before_count - 1, count) - - def test_bulk_delete_submissions_as_granted_user_but_not_service_account(self): + def test_cannot_bulk_delete_submissions_as_granted_user(self): self._make_submissions() view = DataViewSet.as_view({'delete': 'bulk_delete'}) formid = self.xform.pk @@ -615,46 +530,6 @@ def test_cannot_bulk_delete_submissions(self): count = self.xform.instances.all().count() self.assertEqual(before_count - 2, count) - def test_bulk_delete_submissions_with_service_account(self): - self._make_submissions() - before_count = self.xform.instances.all().count() - view = DataViewSet.as_view({'delete': 'bulk_delete'}) - formid = self.xform.pk - submission_ids = self.xform.instances.values_list( - 'pk', flat=True - ).all()[:2] - data = {'submission_ids': list(submission_ids)} - - self._create_user_and_login(username='alice', password='alice') - # `self.user` is now alice - # Give Alice some permissions but not to delete submissions. - assign_perm(CAN_VIEW_XFORM, self.user, self.xform) - assign_perm(CAN_CHANGE_XFORM, self.user, self.xform) - self.extra = {'HTTP_AUTHORIZATION': f'Token {self.user.auth_token}'} - request = self.factory.delete( - '/', data=data, format='json', **self.extra, - ) - response = view(request, pk=formid) - - # Alice cannot delete submissions without `CAN_DELETE_DATA_XFORM` - self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) - - # Try the same request with service account user on behalf of alice - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - # Test server does not provide `host` header - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - - request = self.factory.delete( - '/', data=data, format='json', **service_account_meta - ) - response = view(request, pk=formid) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data['detail'], '2 submissions have been deleted') - count = self.xform.instances.all().count() - self.assertEqual(before_count - 2, count) - def test_update_validation_status(self): self._make_submissions() view = DataViewSet.as_view({'patch': 'validation_status'}) @@ -681,50 +556,6 @@ def test_update_validation_status(self): submission['_validation_status']['by_whom'], self.user.username # bob ) - def test_update_validation_status_with_service_account(self): - self._make_submissions() - view = DataViewSet.as_view({'patch': 'validation_status'}) - formid = self.xform.pk - dataid = self.xform.instances.all().order_by('id')[0].pk - data = { - 'validation_status.uid': 'validation_status_on_hold' - } - self._create_user_and_login(username='alice', password='alice') - # `self.user` is now alice - # Give Alice view permission but not validate. - assign_perm(CAN_VIEW_XFORM, self.user, self.xform) - self.extra = {'HTTP_AUTHORIZATION': f'Token {self.user.auth_token}'} - request = self.factory.patch( - '/', data=data, format='json', **self.extra - ) - response = view(request, pk=formid, dataid=dataid) - # 403, not 404. Alice is aware of form existence. - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - # Try the same request with service account user on behalf of alice - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - # Test server does not provide `host` header - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - request = self.factory.patch( - '/', data=data, format='json', **service_account_meta - ) - response = view(request, pk=formid, dataid=dataid) - self.assertEqual(response.status_code, status.HTTP_200_OK) - cursor = ParsedInstance.query_mongo_minimal( - query={'_id': dataid}, - fields=None, - sort=None, - ) - submission = next(cursor) - self.assertEqual( - submission['_validation_status']['uid'], 'validation_status_on_hold' - ) - self.assertEqual( - submission['_validation_status']['by_whom'], self.user.username # alice - ) - def test_bulk_update_validation_status(self): self._make_submissions() view = DataViewSet.as_view({'patch': 'bulk_validation_status'}) @@ -756,56 +587,6 @@ def test_bulk_update_validation_status(self): submission['_validation_status']['by_whom'], self.user.username # bob ) - def test_bulk_update_validation_statuses_with_service_account(self): - self._make_submissions() - view = DataViewSet.as_view({'patch': 'bulk_validation_status'}) - formid = self.xform.pk - submission_ids = list(self.xform.instances.values_list( - 'pk', flat=True - ).all().order_by('pk')[:2]) - data = { - 'submission_ids': submission_ids, - 'validation_status.uid': 'validation_status_on_hold' - - } - - self._create_user_and_login(username='alice', password='alice') - # `self.user` is now alice - # Give Alice view permission but not validate. - assign_perm(CAN_VIEW_XFORM, self.user, self.xform) - self.extra = {'HTTP_AUTHORIZATION': f'Token {self.user.auth_token}'} - request = self.factory.patch( - '/', data=data, format='json', **self.extra, - ) - response = view(request, pk=formid) - # 403, not 404. Alice is aware of form existence. - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - # Try the same request with service account user on behalf of alice - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - # Test server does not provide `host` header - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - request = self.factory.patch( - '/', data=data, format='json', **service_account_meta - ) - response = view(request, pk=formid) - self.assertEqual(response.status_code, status.HTTP_200_OK) - cursor = ParsedInstance.query_mongo_minimal( - query={'_id': {'$in': submission_ids}}, - fields=None, - sort=None, - ) - for submission in cursor: - self.assertEqual( - submission['_validation_status']['uid'], - 'validation_status_on_hold' - ) - self.assertEqual( - submission['_validation_status']['by_whom'], self.user.username # alice - ) - def test_cannot_access_data_of_pending_delete_xform(self): # Ensure bob is able to see their data self.test_data() diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py index adf845f94f..b625f2462f 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py @@ -4,7 +4,6 @@ from django.conf import settings from django.core.files.uploadedfile import InMemoryUploadedFile from kobo.apps.openrosa.libs.utils.guardian import assign_perm -from kobo_service_account.utils import get_request_headers from rest_framework import status from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import ( @@ -92,35 +91,6 @@ def test_delete_metadata(self): self.assertEqual(response.status_code, 204) self.assertEqual(count, MetaData.objects.count()) - def test_delete_metadata_with_service_account(self): - alice_meta = {'HTTP_AUTHORIZATION': f'Token {self.alice.auth_token}'} - - # Try the same request with service account user on behalf of alice - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.alice.username) - ) - # Test server does not provide `host` header - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - - for data_type in ['supporting_doc', 'media', 'source']: - count = MetaData.objects.count() - # Add bob's metadata - self._add_form_metadata( - self.xform, data_type, self.data_value, self.path - ) - # Try to delete bob's objects with alice account, - # object should not found (we do not reveal presence of a - # non-granted object) - request = self.factory.delete('/', **alice_meta) - response = self.view(request, pk=self.metadata.pk) - self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - - # Try with service account on behalf of alice, it should work - request = self.factory.delete('/', **service_account_meta) - response = self.view(request, pk=self.metadata.pk) - self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) - self.assertEqual(count, MetaData.objects.count()) - def test_windows_csv_file_upload_to_metadata(self): data_value = 'transportation.csv' path = os.path.join(self.fixture_dir, data_value) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py index a2b1462cbc..b408b289bf 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py @@ -8,7 +8,6 @@ from rest_framework import status from rest_framework.reverse import reverse -from kobo_service_account.utils import get_request_headers from kobo.apps.openrosa.apps.logger.models.xform import XForm from .test_abstract_viewset import TestAbstractViewSet @@ -95,26 +94,6 @@ def test_cannot_delete_myself(self): response = self.client.delete(url) assert response.status_code == status.HTTP_404_NOT_FOUND - def test_service_account_cannot_access_user_detail(self): - self.client.logout() - url = reverse('user-detail', args=(self.alice.username,)) - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.alice.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - response = self.client.get(url, **service_account_meta) - assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED - - def test_service_account_can_delete_user(self): - self.client.logout() - url = reverse('user-detail', args=(self.alice.username,)) - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.alice.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - response = self.client.delete(url, **service_account_meta) - assert response.status_code == status.HTTP_204_NO_CONTENT - def test_only_open_rosa_endpoints_allowed_with_not_validated_password(self): # log in as bob self._login_user_and_profile() diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py index 8bc0a561b7..ea6ab542d8 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py @@ -8,7 +8,6 @@ from django.core.files.uploadedfile import InMemoryUploadedFile from django_digest.test import DigestAuth from kobo.apps.openrosa.libs.utils.guardian import assign_perm -from kobo_service_account.utils import get_request_headers from rest_framework import status from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import \ @@ -383,80 +382,9 @@ def test_post_submission_json_without_submission_key(self): auth = DigestAuth('bob', 'bobbob') request.META.update(auth(request.META, response)) response = self.view(request) - self.assertContains(response, 'No submission key provided.', - status_code=400) - - def test_edit_submission_with_service_account(self): - """ - Simulate KPI duplicating/editing feature, i.e. resubmit existing - submission with a different UUID (and a deprecatedID). - """ - - # Ensure only authenticated users can submit data - path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - '..', - 'fixtures', - 'transport_submission.json') - with open(path, 'rb') as f: - data = json.loads(f.read()) - - # Submit data as Bob - request = self.factory.post( - '/submission', data, format='json', **self.extra - ) - response = self.view(request) - self.assertEqual(response.status_code, status.HTTP_201_CREATED) - - # Create user without edit permissions ('change_xform' and 'report_xform') - alice_data = { - 'username': 'alice', - 'password1': 'alicealice', - 'password2': 'alicealice', - 'email': 'alice@localhost.com', - } - self._login_user_and_profile(alice_data) - - new_uuid = f'uuid:{uuid.uuid4()}' - data['submission']['meta'] = { - 'instanceID': new_uuid, - 'deprecatedID': data['submission']['meta']['instanceID'] - } - # New ODK form. Let's provide a uuid. - data['submission'].update({ - 'formhub': { - 'uuid': self.xform.uuid - } - }) - - request = self.factory.post( - '/submission', data, format='json', **self.extra - ) - response = self.view(request) - # Alice should get access forbidden. - self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - - # Try to submit with service account user on behalf of alice - service_account_meta = self.get_meta_from_headers( - get_request_headers('alice') - ) - # Test server does not provide `host` header - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - request = self.factory.post( - '/submission', data, format='json', **service_account_meta - ) - response = self.view(request) - self.assertContains(response, 'Successful submission', - status_code=status.HTTP_201_CREATED) - self.assertTrue(response.has_header('X-OpenRosa-Version')) - self.assertTrue( - response.has_header('X-OpenRosa-Accept-Content-Length') - ) - self.assertTrue(response.has_header('Date')) - self.assertEqual(response['Content-Type'], 'application/json') - self.assertEqual( - response['Location'], 'http://testserver/submission' - ) + self.assertContains( + response, 'No submission key provided.', status_code=400 + ) def test_submission_blocking_flag(self): # Set 'submissions_suspended' True in the profile metadata to test if diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py index cd79a93c39..3a77be64da 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py @@ -1,14 +1,13 @@ # coding: utf-8 import os -import re from xml.dom import Node +import pytest from django.conf import settings from django.urls import reverse -from django.test.client import Client from defusedxml import minidom from kobo.apps.openrosa.libs.utils.guardian import assign_perm -from kobo_service_account.utils import get_request_headers +from pyxform.errors import PyXFormError from rest_framework import status from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import ( @@ -240,69 +239,9 @@ def test_form_tags(self): self.assertEqual(response.data, []) def test_cannot_publish_xlsform_with_user_account(self): - response = self.publish_xls_form(use_service_account=False, assert_=False) + response = self.publish_xls_form(use_api=True, assert_creation=False) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED - def test_publish_xlsform_with_service_account(self): - self.publish_xls_form(use_service_account=True, assert_=True) - - def test_publish_invalid_xls_form(self): - path = os.path.join( - settings.OPENROSA_APP_DIR, - 'apps', - 'main', - 'tests', - 'fixtures', - 'transportation', - 'transportation.bad_id.xls', - ) - - client = Client() - xform_list_url = reverse('xform-list') - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - - with open(path, 'rb') as xls_file: - post_data = {'xls_file': xls_file} - response = client.post( - xform_list_url, data=post_data, **service_account_meta - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - error_msg = '[row : 5] Question or group with no name.' - self.assertEqual(response.data.get('text'), error_msg) - - def test_publish_invalid_xls_form_no_choices(self): - path = os.path.join( - settings.OPENROSA_APP_DIR, - 'apps', - 'main', - 'tests', - 'fixtures', - 'transportation', - 'transportation.no_choices.xls', - ) - client = Client() - xform_list_url = reverse('xform-list') - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - - with open(path, 'rb') as xls_file: - post_data = {'xls_file': xls_file} - response = client.post( - xform_list_url, data=post_data, **service_account_meta - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - error_msg = ( - "There should be a choices sheet in this xlsform. " - "Please ensure that the choices sheet has the mandatory " - "columns 'list_name', 'name', and 'label'." - ) - self.assertEqual(response.data.get('text'), error_msg) - def test_cannot_partial_update_with_user_account(self): self.publish_xls_form() view = XFormViewSet.as_view({ @@ -310,19 +249,6 @@ def test_cannot_partial_update_with_user_account(self): }) title = 'مرحب' description = 'DESCRIPTION' - data = {'public': True, 'description': description, 'title': title, - 'downloadable': True} - - self.assertFalse(self.xform.shared) - - request = self.factory.patch('/', data=data, **self.extra) - response = view(request, pk=self.xform.id) - assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED - - def test_partial_update_with_service_account(self): - self.publish_xls_form() - title = 'مرحب' - description = 'DESCRIPTION' data = { 'public': True, 'description': description, @@ -331,117 +257,9 @@ def test_partial_update_with_service_account(self): } self.assertFalse(self.xform.shared) - alice_profile_data = { - 'username': 'alice', - 'email': 'alice@kobotoolbox.org', - 'password1': 'alice', - 'password2': 'alice', - 'name': 'Alice', - 'city': 'AliceTown', - 'country': 'CA', - 'organization': 'Alice Inc.', - 'home_page': 'alice.com', - 'twitter': 'alicetwitter' - } - alice_profile = self._create_user_profile(alice_profile_data) - self.alice = alice_profile.user - - client = Client() - xform_detail_url = reverse('xform-detail', kwargs={'pk': self.xform.id}) - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.alice.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - response = client.patch( - xform_detail_url, - data=data, - content_type='application/json', - **service_account_meta - ) - self.assertEqual(response.status_code, status.HTTP_200_OK) - self.xform.refresh_from_db() - self.assertTrue(self.xform.downloadable) - self.assertTrue(self.xform.shared) - self.assertEqual(self.xform.description, description) - self.assertEqual(response.data['public'], True) - self.assertEqual(response.data['description'], description) - self.assertEqual(response.data['title'], title) - matches = re.findall(r"([^<]+)", self.xform.xml) - self.assertTrue(len(matches) > 0) - self.assertEqual(matches[0], title) - - def test_set_form_private(self): - key = 'shared' - self.publish_xls_form() - self.xform.__setattr__(key, True) - self.xform.save() - data = {'public': False} - - self.assertTrue(self.xform.__getattribute__(key)) - client = Client() - xform_detail_url = reverse('xform-detail', kwargs={'pk': self.xform.id}) - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - response = client.patch( - xform_detail_url, - data=data, - content_type='application/json', - **service_account_meta - ) - self.xform.refresh_from_db() - self.assertFalse(self.xform.__getattribute__(key)) - self.assertFalse(response.data['public']) - - def test_set_form_bad_value(self): - key = 'shared' - self.publish_xls_form() - data = {'public': 'String'} - - xform_detail_url = reverse('xform-detail', kwargs={'pk': self.xform.id}) - client = Client() - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - response = client.patch( - xform_detail_url, - data=data, - content_type='application/json', - **service_account_meta - ) - self.xform.reload() - self.assertFalse(self.xform.__getattribute__(key)) - self.assertEqual( - response.data, - {'shared': ["'String' value must be either True or False."]}, - ) - - def test_set_form_bad_key(self): - self.publish_xls_form() - self.xform.save() - view = XFormViewSet.as_view({ - 'patch': 'partial_update' - }) - data = {'nonExistentField': False} - - xform_detail_url = reverse('xform-detail', kwargs={'pk': self.xform.pk}) - client = Client() - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - response = client.patch( - xform_detail_url, - data=data, - content_type='application/json', - **service_account_meta - ) - assert response.status_code == status.HTTP_200_OK - self.xform.reload() - self.assertFalse(self.xform.shared) - self.assertFalse(response.data['public']) + request = self.factory.patch('/', data=data, **self.extra) + response = view(request, pk=self.xform.id) + assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED def test_cannot_form_delete_with_user_account(self): self.publish_xls_form() @@ -450,21 +268,6 @@ def test_cannot_form_delete_with_user_account(self): response = self.client.delete(xform_detail_url, **self.extra) assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED - def test_form_delete(self): - self.publish_xls_form() - self.xform.save() - xform_detail_url = reverse('xform-detail', kwargs={'pk': self.xform.pk}) - client = Client() - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - response = client.delete(xform_detail_url, **service_account_meta) - self.assertEqual(response.data, None) - self.assertEqual(response.status_code, 204) - with self.assertRaises(XForm.DoesNotExist): - self.xform.reload() - def test_xform_serializer_none(self): data = { 'title': '', @@ -493,11 +296,18 @@ def test_cannot_publish_id_string_starting_with_number(self): 'title': '2011_07_25_transportation', } - xls_path = os.path.join(settings.OPENROSA_APP_DIR, 'apps', 'main', 'tests', - 'fixtures', 'transportation', - 'transportation.id_starts_with_num.xls') + xls_path = os.path.join( + settings.OPENROSA_APP_DIR, + 'apps', + 'main', + 'tests', + 'fixtures', + 'transportation', + 'transportation.id_starts_with_num.xls', + ) count = XForm.objects.count() - response = self.publish_xls_form(xls_path, data, assert_=False) - self.assertTrue('Names must begin with a letter' in response.content.decode()) - self.assertEqual(response.status_code, 400) + with pytest.raises(PyXFormError) as e: + self.publish_xls_form(xls_path, data) + assert 'Names must begin with a letter' in str(e) + self.assertEqual(XForm.objects.count(), count) diff --git a/kobo/apps/openrosa/apps/api/tools.py b/kobo/apps/openrosa/apps/api/tools.py index 4f93cc290b..34fc551cf7 100644 --- a/kobo/apps/openrosa/apps/api/tools.py +++ b/kobo/apps/openrosa/apps/api/tools.py @@ -15,7 +15,6 @@ HttpResponseRedirect, ) from django.utils.translation import gettext as t -from kobo_service_account.utils import get_request_headers from rest_framework import exceptions from rest_framework.request import Request from taggit.forms import TagField @@ -158,15 +157,11 @@ def get_media_file_response( # When `request.user` is authenticated, their authentication is lost with # an HTTP redirection. We use KoBoCAT to proxy the response from KPI - headers = {} - if not request.user.is_anonymous: - headers = get_request_headers(request.user.username) - # Send the request internally to avoid extra traffic on the public interface internal_url = metadata.data_value.replace( settings.KOBOFORM_URL, settings.KOBOFORM_INTERNAL_URL ) - response = requests.get(internal_url, headers=headers) + response = requests.get(internal_url) return HttpResponse( content=response.content, diff --git a/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py index dcf4e09dd6..7dd379e958 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py @@ -1,7 +1,5 @@ # coding: utf-8 from django.conf import settings -from kobo_service_account.models import ServiceAccountUser -from kobo_service_account.utils import get_real_user from rest_framework.response import Response from kobo.apps.openrosa.apps.api.permissions import ConnectViewsetPermissions @@ -10,6 +8,7 @@ from kobo.apps.openrosa.libs.serializers.user_profile_serializer import ( UserProfileWithTokenSerializer ) +from kpi.utils.object_permission import get_database_user from ..utils.rest_framework.viewsets import OpenRosaGenericViewSet @@ -59,11 +58,7 @@ def list(self, request, *args, **kwargs): # login(request, request.user) session.set_expiry(settings.DEFAULT_SESSION_EXPIRY_TIME) - user = ( - get_real_user(request) - if isinstance(request.user, ServiceAccountUser) - else request.user - ) + user = get_database_user(request.user) serializer = UserProfileWithTokenSerializer( instance=UserProfile.objects.get_or_create(user=user)[0], diff --git a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py index 46ef0320e1..be8402cc8f 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py @@ -4,8 +4,6 @@ from django.http import Http404 from django.shortcuts import get_object_or_404 from django.utils.translation import gettext as t -from kobo_service_account.models import ServiceAccountUser -from kobo_service_account.utils import get_real_user from rest_framework import status from rest_framework.decorators import action from rest_framework.response import Response @@ -57,6 +55,7 @@ EnketoError, get_enketo_submission_url, ) +from kpi.utils.object_permission import get_database_user from ..utils.rest_framework.viewsets import OpenRosaModelViewSet SAFE_METHODS = ['GET', 'HEAD', 'OPTIONS'] @@ -406,11 +405,6 @@ class DataViewSet(AnonymousUserPublicFormsMixin, OpenRosaModelViewSet): queryset = XForm.objects.all() def get_queryset(self): - if isinstance(self.request.user, ServiceAccountUser): - # We need to get all xforms (even soft-deleted ones) to - # system-account user to let it delete data - # when the xform is already soft-deleted. - self.queryset = XForm.all_objects.all() return super().get_queryset() def bulk_delete(self, request, *args, **kwargs): @@ -444,7 +438,7 @@ def bulk_delete(self, request, *args, **kwargs): def bulk_validation_status(self, request, *args, **kwargs): xform = self.get_object() - real_user = get_real_user(request) + real_user = get_database_user(request.user) try: updated_records_count = set_instance_validation_statuses( @@ -560,7 +554,7 @@ def validation_status(self, request, *args, **kwargs): data = {} if request.method != 'GET': - username = get_real_user(request).username + username = get_database_user(request.user).username validation_status_uid = request.data.get('validation_status.uid') if ( request.method == 'PATCH' diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py b/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py index 37fdcef1b5..dc8b816d21 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py @@ -4,7 +4,6 @@ from django.shortcuts import get_object_or_404 from django.utils.translation import gettext as t -from kobo_service_account.utils import get_real_user from rest_framework import permissions from rest_framework import status from rest_framework import mixins @@ -28,6 +27,7 @@ UnauthenticatedEditAttempt, ) from kpi.authentication import DigestAuthentication +from kpi.utils.object_permission import get_database_user from ..utils.rest_framework.viewsets import OpenRosaGenericViewSet xml_error_re = re.compile('>(.*)<') @@ -175,7 +175,8 @@ def create(self, request, *args, **kwargs): _ = get_object_or_404(User, username=username.lower()) elif not username: # get the username from the user if not set - username = request.user and get_real_user(request).username + user = get_database_user(request.user) + username = user.username if request.method.upper() == 'HEAD': return Response(status=status.HTTP_204_NO_CONTENT, diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py index cf9cfa49c1..60e06d9cd7 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py @@ -6,8 +6,6 @@ from django.http import Http404, HttpResponseBadRequest from django.shortcuts import get_object_or_404 from django.utils.translation import gettext as t -from kobo_service_account.models import ServiceAccountUser -from kobo_service_account.utils import get_real_user from rest_framework import exceptions from rest_framework import status from rest_framework.decorators import action @@ -40,6 +38,7 @@ from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) +from kpi.utils.object_permission import get_database_user from ..utils.rest_framework.viewsets import OpenRosaModelViewSet EXPORT_EXT = { @@ -120,7 +119,7 @@ def _get_user(username): def _get_owner(request): - owner = request.data.get('owner') or get_real_user(request) + owner = request.data.get('owner') or get_database_user(request.user) if isinstance(owner, str): owner = _get_user(owner) @@ -577,11 +576,6 @@ def create(self, request, *args, **kwargs): return Response(survey, status=status.HTTP_400_BAD_REQUEST) def get_queryset(self): - if isinstance(self.request.user, ServiceAccountUser): - # We need to get all xforms (even soft-deleted ones) to - # system-account user to let it delete xform - # when it is already soft-deleted. - self.queryset = XForm.all_objects.all() return super().get_queryset() def update(self, request, pk, *args, **kwargs): @@ -592,7 +586,7 @@ def update(self, request, pk, *args, **kwargs): # Behave like `kobo.apps.openrosa.apps.main.views.update_xform`: only allow # the update to proceed if the user is the owner owner = existing_xform.user - if not get_real_user(request) == owner: + if not get_database_user(request.user) == owner: raise exceptions.PermissionDenied( detail=t("Only a form's owner can overwrite its contents") ) diff --git a/kobo/apps/openrosa/apps/logger/app.py b/kobo/apps/openrosa/apps/logger/app.py index 725a84a69f..e189993c2c 100644 --- a/kobo/apps/openrosa/apps/logger/app.py +++ b/kobo/apps/openrosa/apps/logger/app.py @@ -9,8 +9,4 @@ class LoggerAppConfig(AppConfig): def ready(self): # Makes sure all signal handlers are connected from . import signals - # Monkey patch reversion package to insert real user in DB instead of - # system account superuser. - from kobo_service_account.utils import reversion_monkey_patch - reversion_monkey_patch() super().ready() diff --git a/kobo/apps/openrosa/apps/logger/models/xform.py b/kobo/apps/openrosa/apps/logger/models/xform.py index ceb8614082..9a53b64b0c 100644 --- a/kobo/apps/openrosa/apps/logger/models/xform.py +++ b/kobo/apps/openrosa/apps/logger/models/xform.py @@ -16,10 +16,8 @@ from taggit.managers import TaggableManager from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.apps.logger.fields import LazyDefaultBooleanField from kobo.apps.openrosa.apps.logger.xform_instance_parser import XLSFormError from kobo.apps.openrosa.koboform.pyxform_utils import convert_csv_to_xls -from kobo.apps.openrosa.libs.models.base_model import BaseModel from kobo.apps.openrosa.libs.constants import ( CAN_ADD_SUBMISSIONS, CAN_VALIDATE_XFORM, @@ -51,7 +49,7 @@ class XFormAllManager(models.Manager): pass -class XForm(BaseModel): +class XForm(models.Model): CLONED_SUFFIX = '_cloned' MAX_ID_LENGTH = 100 diff --git a/kobo/apps/openrosa/apps/logger/tests/test_parsing.py b/kobo/apps/openrosa/apps/logger/tests/test_parsing.py index 7c9c948b75..a80c001373 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_parsing.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_parsing.py @@ -28,7 +28,6 @@ def _publish_and_submit_new_repeats(self): "../fixtures/new_repeats/new_repeats.xls" ) self._publish_xls_file_and_set_xform(xls_file_path) - self.assertEqual(self.response.status_code, 201) # submit an instance xml_submission_file_path = os.path.join( @@ -37,7 +36,6 @@ def _publish_and_submit_new_repeats(self): "new_repeats_2012-07-05-14-33-53.xml" ) self._make_submission(xml_submission_file_path) - self.assertEqual(self.response.status_code, 201) # load xml file to parse and compare xml_file = open(xml_submission_file_path) @@ -47,7 +45,7 @@ def _publish_and_submit_new_repeats(self): def test_parse_xform_nested_repeats(self): self._publish_and_submit_new_repeats() parser = XFormInstanceParser(self.xml, self.xform.data_dictionary()) - dict = parser.to_dict() + dict_ = parser.to_dict() expected_dict = { 'new_repeats': { 'info': @@ -70,7 +68,7 @@ def test_parse_xform_nested_repeats(self): 'gps': '-1.2627557 36.7926442 0.0 30.0' } } - self.assertEqual(dict, expected_dict) + self.assertEqual(dict_, expected_dict) flat_dict = parser.to_flat_dict() expected_flat_dict = { @@ -160,7 +158,6 @@ def test_parse_xform_nested_repeats_multiple_nodes(self): "../fixtures/new_repeats/new_repeats.xls" ) self._publish_xls_file_and_set_xform(xls_file_path) - self.assertEqual(self.response.status_code, 201) # submit an instance xml_submission_file_path = os.path.join( @@ -169,7 +166,6 @@ def test_parse_xform_nested_repeats_multiple_nodes(self): "multiple_nodes_error.xml" ) self._make_submission(xml_submission_file_path) - self.assertEqual(201, self.response.status_code) def test_xml_repeated_group_to_dict(self): xml_file = os.path.join( diff --git a/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py b/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py index a0dedfcc64..d5f315fb79 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py @@ -3,14 +3,16 @@ import sys import unittest +import pytest +from django.conf import settings from django.core.management import call_command from django.core.management.base import CommandError +from pyxform.errors import PyXFormError from kobo.apps.openrosa.apps.main.tests.test_base import TestBase from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.libs.utils.logger_tools import report_exception - class TestPublishXLS(TestBase): def test_publish_xls(self): @@ -91,3 +93,39 @@ def test_report_exception_without_exc_info(self): report_exception(subject="Test report exception", info=e) except Exception as e: raise AssertionError("%s" % e) + + def test_publish_invalid_xls_form(self): + path = os.path.join( + settings.OPENROSA_APP_DIR, + 'apps', + 'main', + 'tests', + 'fixtures', + 'transportation', + 'transportation.bad_id.xls', + ) + + with pytest.raises(PyXFormError) as e: + self._publish_xls_file(path) + assert '[row : 5] Question or group with no name.' in str(e) + + def test_publish_invalid_xls_form_no_choices(self): + path = os.path.join( + settings.OPENROSA_APP_DIR, + 'apps', + 'main', + 'tests', + 'fixtures', + 'transportation', + 'transportation.no_choices.xls', + ) + + with pytest.raises(PyXFormError) as e: + self._publish_xls_file(path) + + error_msg = ( + "There should be a choices sheet in this xlsform. " + "Please ensure that the choices sheet has the mandatory " + "columns 'list_name', 'name', and 'label'." + ) + assert error_msg in str(e) diff --git a/kobo/apps/openrosa/apps/main/tests/test_base.py b/kobo/apps/openrosa/apps/main/tests/test_base.py index eff0e9fd47..be22d72e3f 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_base.py +++ b/kobo/apps/openrosa/apps/main/tests/test_base.py @@ -7,12 +7,11 @@ from django.conf import settings from django.contrib.auth.models import AnonymousUser, Permission +from django.core.files.base import ContentFile from django.test import TestCase from django.test.client import Client from django.utils import timezone from django_digest.test import Client as DigestClient -from kobo_service_account.utils import get_request_headers -from rest_framework.reverse import reverse from rest_framework.test import APIRequestFactory from kobo.apps.kobo_auth.shortcuts import User @@ -21,6 +20,9 @@ from kobo.apps.openrosa.libs.tests.mixins.make_submission_mixin import MakeSubmissionMixin from kobo.apps.openrosa.libs.tests.mixins.request_mixin import RequestMixin from kobo.apps.openrosa.libs.utils.string import base64_encodestring +from kobo.apps.openrosa.libs.utils.logger_tools import ( + publish_xls_form, +) class TestBase(RequestMixin, MakeSubmissionMixin, TestCase): @@ -85,48 +87,29 @@ def _create_user_and_login(self, username="bob", password="bob"): self.client = self._login(username, password) self.anon = Client() - def _publish_xls_file(self, path, use_service_account=True): - - xform_list_url = reverse('xform-list') - + def _publish_xls_file(self, path): + # API does not support project creation anymore if not path.startswith(f'/{self.user.username}/'): path = os.path.join(self.this_directory, path) - if use_service_account: - # Only service account user is allowed to `POST` to XForm API - client = Client() - service_account_meta = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - service_account_meta['HTTP_HOST'] = settings.TEST_HTTP_HOST - else: - # For test purposes we want to try to `POST` with current logged-in - # user - client = self.client - service_account_meta = {} + with open(path, 'rb') as f: + xls_file = ContentFile(f.read(), name=os.path.basename(path)) - with open(path, 'rb') as xls_file: - post_data = {'xls_file': xls_file} - response = client.post( - xform_list_url, - data=post_data, - **service_account_meta, - ) - return response + return publish_xls_form(xls_file, self.user) def _publish_xlsx_file(self): path = os.path.join(self.this_directory, 'fixtures', 'exp.xlsx') pre_count = XForm.objects.count() - response = TestBase._publish_xls_file(self, path) + TestBase._publish_xls_file(self, path) # make sure publishing the survey worked - self.assertEqual(response.status_code, 201) self.assertEqual(XForm.objects.count(), pre_count + 1) def _publish_xls_file_and_set_xform(self, path): count = XForm.objects.count() - self.response = self._publish_xls_file(path) + xform = self._publish_xls_file(path) self.assertEqual(XForm.objects.count(), count + 1) self.xform = XForm.objects.order_by('pk').reverse()[0] + assert self.xform.pk == xform.pk def _share_form_data(self, id_string='transportation_2011_07_25'): xform = XForm.objects.get(id_string=id_string) diff --git a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py index 156256a74d..5589e19a13 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py +++ b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py @@ -1,6 +1,9 @@ # coding: utf-8 import os +import pytest +from pyxform.errors import PyXFormError + from kobo.apps.openrosa.apps.logger.models import XForm, Instance from .test_base import TestBase @@ -14,20 +17,29 @@ class TestInputs(TestBase): def test_uniqueness_of_group_names_enforced(self): pre_count = XForm.objects.count() self._create_user_and_login() - response = self._publish_xls_file( - 'fixtures/group_names_must_be_unique.xls') - message = 'The name "group_names_must_be_unique" is the same as the form name' - self.assertTrue(message in response.json()['text']) - self.assertEqual(XForm.objects.count(), pre_count) - def test_mch(self): - self._publish_xls_file('fixtures/bug_fixes/MCH_v1.xls') + with pytest.raises(PyXFormError) as e: + self._publish_xls_file( + 'fixtures/group_names_must_be_unique.xls' + ) + assert 'The name "group_names_must_be_unique" is the same as the form name' in str(e) + + assert XForm.objects.count() == pre_count + + def test_buggy_files(self): + message = "Unknown question type 'Select one from source'" + with pytest.raises(PyXFormError) as e: + self._publish_xls_file( + os.path.join('fixtures/bug_fixes/MCH_v1.xls') + ) + assert message == str(e) - def test_erics_files(self): - for name in ['battery_life.xls', - 'enumerator_weekly.xls', - 'Enumerator_Training_Practice_Survey.xls']: - self._publish_xls_file(os.path.join('fixtures', 'bug_fixes', name)) + message = 'Duplicate column header: label' + with pytest.raises(PyXFormError) as e: + self._publish_xls_file( + os.path.join('fixtures', 'bug_fixes', 'enumerator_weekly.xls') + ) + assert message == str(e) class TestSubmissionBugs(TestBase): @@ -38,8 +50,15 @@ def test_submission_with_mixed_case_username(self): count = Instance.objects.count() self._make_submission( os.path.join( - self.this_directory, 'fixtures', - 'transportation', 'instances', s, s + '.xml'), 'BoB') + self.this_directory, + 'fixtures', + 'transportation', + 'instances', + s, + s + '.xml', + ), + 'BoB', + ) self.assertEqual(Instance.objects.count(), count + 1) diff --git a/kobo/apps/openrosa/apps/main/tests/test_process.py b/kobo/apps/openrosa/apps/main/tests/test_process.py index 893278c8d5..c86efb9a9c 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_process.py +++ b/kobo/apps/openrosa/apps/main/tests/test_process.py @@ -59,11 +59,24 @@ def _update_dynamic_data(self): {UUID: uuid}, {'$set': {SUBMISSION_TIME: submission_time}}) def test_uuid_submit(self): - self._publish_xls_file() + xls_path = os.path.join( + self.this_directory, + 'fixtures', + 'transportation', + 'transportation.xls', + ) + self._publish_file(xls_path) + self.assertEqual(self.xform.id_string, 'transportation_2011_07_25') + survey = 'transport_2011-07-25_19-05-49' path = os.path.join( - self.this_directory, 'fixtures', 'transportation', - 'instances', survey, survey + '.xml') + self.this_directory, + 'fixtures', + 'transportation', + 'instances', + survey, + survey + '.xml', + ) with open(path, 'rb') as f: post_data = {'xml_submission_file': f, 'uuid': self.xform.uuid} url = '/submission' @@ -98,9 +111,7 @@ def _publish_file(self, xls_path, strict=True): Returns False if not strict and publish fails """ pre_count = XForm.objects.count() - self.response = TestBase._publish_xls_file(self, xls_path) - # make sure publishing the survey worked - self.assertEqual(self.response.status_code, 201) + TestBase._publish_xls_file(self, xls_path) if XForm.objects.count() != pre_count + 1: # print file location print('\nPublish Failure for file: %s' % xls_path) @@ -111,12 +122,6 @@ def _publish_file(self, xls_path, strict=True): self.xform = list(XForm.objects.all())[-1] return True - def _publish_xls_file(self): - xls_path = os.path.join(self.this_directory, "fixtures", - "transportation", "transportation.xls") - self._publish_file(xls_path) - self.assertEqual(self.xform.id_string, "transportation_2011_07_25") - def _check_formList(self): url = '/%s/formList' % self.user.username client = DigestClient() @@ -365,11 +370,11 @@ def test_publish_bad_xls_with_unicode_in_error(self): """ self._create_user_and_login() path = os.path.join( - self.this_directory, 'fixtures', - 'form_with_unicode_in_relevant_column.xlsx') - response = TestBase._publish_xls_file(self, path) - # make sure we get a 200 response - self.assertEqual(response.status_code, 201) + self.this_directory, + 'fixtures', + 'form_with_unicode_in_relevant_column.xlsx', + ) + self._publish_xls_file(path) def test_metadata_file_hash(self): self._publish_transportation_form() @@ -392,10 +397,12 @@ def test_uuid_injection_in_cascading_select(self): """ pre_count = XForm.objects.count() xls_path = os.path.join( - self.this_directory, "fixtures", "cascading_selects", - "new_cascading_select.xls") - file_name, file_ext = os.path.splitext(os.path.split(xls_path)[1]) - self.response = TestBase._publish_xls_file(self, xls_path) + self.this_directory, + 'fixtures', + 'cascading_selects', + 'new_cascading_select.xls', + ) + self._publish_xls_file(xls_path) post_count = XForm.objects.count() self.assertEqual(post_count, pre_count + 1) xform = XForm.objects.latest('date_created') diff --git a/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py b/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py index 701de0b121..4d0d88a9db 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py +++ b/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py @@ -1,6 +1,9 @@ # coding: utf-8 import os +import pytest +from django.db.utils import IntegrityError + from kobo.apps.openrosa.apps.logger.models import XForm from .test_base import TestBase @@ -14,25 +17,22 @@ def test_unique_together(self): """ self._create_user_and_login() self.this_directory = os.path.dirname(__file__) - xls_path = os.path.join(self.this_directory, - "fixtures", "gps", "gps.xls") + xls_path = os.path.join( + self.this_directory, 'fixtures', 'gps', 'gps.xls' + ) # first time - response = self._publish_xls_file(xls_path) + self._publish_xls_file(xls_path) self.assertEqual(XForm.objects.count(), 1) # second time - response = self._publish_xls_file(xls_path) - # SQLite returns `UNIQUE constraint failed` whereas PostgreSQL - # returns 'duplicate key ... violates unique constraint' - self.assertIn( - 'unique constraint', - response.json()['text'].lower(), - ) + with pytest.raises(IntegrityError) as e: + self._publish_xls_file(xls_path) + assert 'duplicate key value violates unique constraint' in str(e) + self.assertEqual(XForm.objects.count(), 1) - self.client.logout() # first time - self._create_user_and_login(username="carl", password="carl") - response = self._publish_xls_file(xls_path) + self._create_user_and_login(username='carl', password='carl') + self._publish_xls_file(xls_path) self.assertEqual(XForm.objects.count(), 2) diff --git a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py index 4afd88a3f3..d4f624dd36 100644 --- a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py +++ b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py @@ -376,9 +376,10 @@ def save(self, asynchronous=False, *args, **kwargs): instance_id=self.instance_id ).values_list('instance__xform__kpi_asset_uid', flat=True) if not (asset_uid := records[0]): - logging.warning( - f'ParsedInstance #: {self.pk} - XForm is not linked with Asset' - ) + if not settings.TESTING: + logging.warning( + f'ParsedInstance #: {self.pk} - XForm is not linked with Asset' + ) else: call_services(asset_uid, self.instance_id) diff --git a/kobo/apps/openrosa/libs/filters.py b/kobo/apps/openrosa/libs/filters.py index 0a7f756b12..e28dcec9bd 100644 --- a/kobo/apps/openrosa/libs/filters.py +++ b/kobo/apps/openrosa/libs/filters.py @@ -1,4 +1,6 @@ # coding: utf-8 +from django.core.exceptions import ObjectDoesNotExist +from django.http import Http404 from django.shortcuts import get_object_or_404 from rest_framework import filters from rest_framework.exceptions import ParseError @@ -51,13 +53,12 @@ def filter_queryset(self, request, queryset, view): # Queryset cannot be narrowed down for anonymous and superusers because # they do not have object level permissions (actually a superuser could - # have object level permissions but `ServiceAccountUser` does not). + # have object level permission). # Thus, we return queryset immediately even if it is a larger subset and # some of its objects are not allowed to accessed by `request.user`. # We need to avoid `guardian` filter to allow: # - anonymous user to see public data - # - ServiceAccountUser to take actions on all objects on behalf of the - # real user who is making the call to the API. + # - superuser to take actions on all objects. # The permissions validation is handled by the permission classes and # should deny access to forbidden data. if request.user.is_anonymous or request.user.is_superuser: diff --git a/kobo/apps/openrosa/libs/models/base_model.py b/kobo/apps/openrosa/libs/models/base_model.py deleted file mode 100644 index 63190832ef..0000000000 --- a/kobo/apps/openrosa/libs/models/base_model.py +++ /dev/null @@ -1,17 +0,0 @@ -# coding: utf-8 -import logging - -from django.db import models - - -class BaseModel(models.Model): - class Meta: - abstract = True - - def reload(self): - """ - Alias of `refresh_from_db()`. - Deprecated. - """ - logging.warning('Deprecated method. Use `refresh_from_db()` instead') - self.refresh_from_db() diff --git a/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py b/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py index e508ec7710..99c0089b9a 100644 --- a/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py +++ b/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py @@ -4,15 +4,16 @@ from tempfile import NamedTemporaryFile from typing import Union -from django.conf import settings from django.contrib.auth import authenticate from django_digest.test import DigestAuth -from kobo_service_account.utils import get_request_headers from rest_framework import status from rest_framework.test import APIRequestFactory from kobo.apps.openrosa.apps.api.viewsets.xform_submission_api import XFormSubmissionApi from kobo.apps.openrosa.apps.logger.models import Instance, XForm +from kobo.apps.openrosa.libs.utils.logger_tools import ( + safe_create_instance, +) class MakeSubmissionMixin: @@ -46,62 +47,80 @@ def _make_submission( forced_submission_time: bool = None, auth: Union[DigestAuth, bool] = None, media_file: 'io.BufferedReader' = None, - use_service_account: bool = False, assert_success: bool = True, + use_api: bool = True, ): """ Pass `auth=False` for an anonymous request, or omit `auth` to perform - the submission as 'bob' + the submission as 'bob'. + + if `use_api` is False, it adds submission directly without POSTing to + the API. It is useful for edits which are not allowed anymore through + the API. """ - # store temporary file with dynamic uuid - self.factory = APIRequestFactory() + if add_uuid: + path = self._add_uuid_to_submission_xml(path, self.xform) - if auth is None and not use_service_account: - auth = DigestAuth('bob', 'bob') + if not use_api: + class FakeRequest: + pass - extras = {} - if use_service_account: - extras = self.get_meta_from_headers( - get_request_headers(self.user.username) - ) - extras['HTTP_HOST'] = settings.TEST_HTTP_HOST + request = FakeRequest() + request.user = self.user - if add_uuid: - path = self._add_uuid_to_submission_xml(path, self.xform) + with open(path, 'rb') as xml_submission_file: - with open(path, 'rb') as f: - post_data = {'xml_submission_file': f} + error, instance = safe_create_instance( + username=username, + xml_file=xml_submission_file, + media_files=[media_file], + request=request, + ) - if media_file is not None: - post_data['media_file'] = media_file + if assert_success: + assert error is None and isinstance(instance, Instance) + + else: + + # store temporary file with dynamic uuid + self.factory = APIRequestFactory() - if username is None: - username = self.user.username + if auth is None: + auth = DigestAuth('bob', 'bob') - url_prefix = f'{username}/' if username else '' - url = f'/{url_prefix}submission' - request = self.factory.post(url, post_data, **extras) - if auth: - request.user = authenticate(username=auth.username, - password=auth.password) - self.response = None # Reset in case error in viewset below - self.response = self.submission_view(request, username=username) + with open(path, 'rb') as f: + post_data = {'xml_submission_file': f} - if auth and self.response.status_code == 401: - f.seek(0) if media_file is not None: - media_file.seek(0) + post_data['media_file'] = media_file + if username is None: + username = self.user.username + + url_prefix = f'{username}/' if username else '' + url = f'/{url_prefix}submission' request = self.factory.post(url, post_data) - request.META.update(auth(request.META, self.response)) + if auth: + request.user = authenticate(username=auth.username, + password=auth.password) + self.response = None # Reset in case error in viewset below self.response = self.submission_view(request, username=username) - if assert_success: - assert self.response.status_code in [ - status.HTTP_200_OK, - status.HTTP_201_CREATED, - status.HTTP_202_ACCEPTED, - ] + if auth and self.response.status_code == 401: + f.seek(0) + if media_file is not None: + media_file.seek(0) + + request = self.factory.post(url, post_data) + request.META.update(auth(request.META, self.response)) + self.response = self.submission_view(request, username=username) + + if assert_success: + assert self.response.status_code in [ + status.HTTP_200_OK, + status.HTTP_201_CREATED, + status.HTTP_202_ACCEPTED, + ] if forced_submission_time: instance = Instance.objects.order_by('-pk').all()[0] diff --git a/kobo/apps/openrosa/libs/utils/logger_tools.py b/kobo/apps/openrosa/libs/utils/logger_tools.py index d04002dcca..0950733991 100644 --- a/kobo/apps/openrosa/libs/utils/logger_tools.py +++ b/kobo/apps/openrosa/libs/utils/logger_tools.py @@ -32,7 +32,6 @@ from django.utils import timezone as dj_timezone from django.utils.encoding import DjangoUnicodeDecodeError, smart_str from django.utils.translation import gettext as t -from kobo_service_account.utils import get_real_user from modilabs.utils.subprocess_timeout import ProcessTimedOut from pyxform.errors import PyXFormError from pyxform.xform2json import create_survey_element_from_xml @@ -83,6 +82,8 @@ from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) +from kpi.utils.object_permission import get_database_user + OPEN_ROSA_VERSION_HEADER = 'X-OpenRosa-Version' HTTP_OPEN_ROSA_VERSION_HEADER = 'HTTP_X_OPENROSA_VERSION' @@ -791,7 +792,7 @@ def _get_instance( instance.save() else: submitted_by = ( - get_real_user(request) + get_database_user(request.user) if request and request.user.is_authenticated else None ) diff --git a/kobo/apps/openrosa/libs/utils/middleware.py b/kobo/apps/openrosa/libs/utils/middleware.py index d0c1e0df65..7839eeae48 100644 --- a/kobo/apps/openrosa/libs/utils/middleware.py +++ b/kobo/apps/openrosa/libs/utils/middleware.py @@ -16,7 +16,6 @@ from django.utils.deprecation import MiddlewareMixin from django.utils.translation import gettext as t from django.utils.translation.trans_real import parse_accept_lang_header -from kobo_service_account.models import ServiceAccountUser from kobo.apps.openrosa.libs.http import JsonResponseForbidden, XMLResponseForbidden @@ -85,9 +84,6 @@ def process_response(self, request, response): if not request.user.is_authenticated: return response - if isinstance(request.user, ServiceAccountUser): - return response - if self._skipped_view: return response diff --git a/kobo/settings/base.py b/kobo/settings/base.py index 715d2badc3..30dc1cbb06 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -905,7 +905,6 @@ def __init__(self, *args, **kwargs): 'kpi.authentication.BasicAuthentication', 'kpi.authentication.TokenAuthentication', 'oauth2_provider.contrib.rest_framework.OAuth2Authentication', - 'kobo_service_account.authentication.ServiceAccountAuthentication', ], 'DEFAULT_RENDERER_CLASSES': [ 'rest_framework.renderers.JSONRenderer', @@ -942,7 +941,6 @@ def __init__(self, *args, **kwargs): # Session if it comes first (which bypass BasicAuthentication and MFA validation) 'kobo.apps.openrosa.libs.authentication.HttpsOnlyBasicAuthentication', 'kpi.authentication.SessionAuthentication', - 'kobo_service_account.authentication.ServiceAccountAuthentication', ], 'DEFAULT_RENDERER_CLASSES': [ # Keep JSONRenderer at the top "in order to send JSON responses to diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 97e8d8d6e7..83c906194a 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -38,7 +38,10 @@ remove_validation_status_from_instance, set_instance_validation_statuses, ) -from kobo.apps.openrosa.libs.utils.logger_tools import safe_create_instance, publish_xls_form +from kobo.apps.openrosa.libs.utils.logger_tools import ( + safe_create_instance, + publish_xls_form, +) from kobo.apps.subsequences.utils import stream_with_extras from kobo.apps.trackers.models import NLPUsageCounter from kpi.constants import ( diff --git a/kpi/permissions.py b/kpi/permissions.py index c668ef7f74..2652b52bbc 100644 --- a/kpi/permissions.py +++ b/kpi/permissions.py @@ -5,7 +5,6 @@ from django.conf import settings from django.http import Http404 -from kobo_service_account.utils import get_real_user from rest_framework import exceptions, permissions from rest_framework.permissions import IsAuthenticated as DRFIsAuthenticated @@ -475,7 +474,7 @@ def has_object_permission(self, request, view, obj): except (DeploymentNotFound, AttributeError): require_auth = True - real_user = get_real_user(request) + real_user = request.user # If authentication is required, `request.user` should have # 'add_submission' permission on `obj` From 3e1d9d13305493cf20e49aa3e5412a4c68365064 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 6 Aug 2024 09:01:04 -0400 Subject: [PATCH 021/119] Fix bug on redeploy --- kpi/deployment_backends/openrosa_backend.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 97e8d8d6e7..bc95174140 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -835,11 +835,9 @@ def redeploy(self, active=None): XForm.objects.filter(pk=self.xform.id).update( downloadable=active, title=self.asset.name, - has_kpi_hooks=self.asset.has_active_hooks, ) self.xform.downloadable = active self.xform.title = self.asset.name - self.xform.has_kpi_hooks = self.asset.has_active_hooks publish_xls_form(xlsx_file, self.asset.owner, self.xform.id_string) From 276d8e9a308803ae6847e02d73c38f6e3480a5fb Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 6 Aug 2024 09:01:04 -0400 Subject: [PATCH 022/119] Fix bug on redeploy --- kpi/deployment_backends/openrosa_backend.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 83c906194a..ca0c93e3e7 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -838,11 +838,9 @@ def redeploy(self, active=None): XForm.objects.filter(pk=self.xform.id).update( downloadable=active, title=self.asset.name, - has_kpi_hooks=self.asset.has_active_hooks, ) self.xform.downloadable = active self.xform.title = self.asset.name - self.xform.has_kpi_hooks = self.asset.has_active_hooks publish_xls_form(xlsx_file, self.asset.owner, self.xform.id_string) From 726fb9a81e3d57eeccd7cc121d97f2da20b533fa Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 6 Aug 2024 10:33:11 -0400 Subject: [PATCH 023/119] Refactor MockDeploymentBackend: fix unit tests --- kobo/apps/hook/tests/hook_test_case.py | 29 +- kobo/apps/hook/tests/test_parser.py | 49 +- kobo/apps/kobo_auth/__init__.py | 1 + kobo/apps/kobo_auth/signals.py | 58 ++ .../apps/api/viewsets/xform_submission_api.py | 19 +- .../openrosa/apps/logger/models/instance.py | 23 +- .../apps/openrosa/apps/logger/models/xform.py | 3 +- kobo/apps/openrosa/apps/logger/signals.py | 2 +- .../logger/tests/test_simple_submission.py | 3 +- .../openrosa/apps/logger/utils/instance.py | 35 +- .../apps/logger/xform_instance_parser.py | 14 +- .../openrosa/apps/main/models/meta_data.py | 3 +- kobo/apps/openrosa/libs/utils/logger_tools.py | 58 +- kobo/apps/openrosa/libs/utils/string.py | 17 + .../tests/api/v2/test_api.py | 12 +- .../stripe/tests/test_organization_usage.py | 26 +- .../tests/test_submission_extras_api_post.py | 13 +- .../tests/test_submission_stream.py | 7 +- kobo/apps/subsequences/utils/__init__.py | 39 +- kobo/apps/trackers/submission_utils.py | 82 +- kobo/apps/trackers/tests/test_trackers.py | 2 +- kobo/settings/base.py | 6 - kobo/settings/testing.py | 4 +- kpi/deployment_backends/base_backend.py | 5 + kpi/deployment_backends/mock_backend.py | 858 +++--------------- kpi/deployment_backends/openrosa_backend.py | 72 +- kpi/fixtures/test_data.json | 27 + kpi/signals.py | 51 -- kpi/tests/api/v1/test_api_assets.py | 12 +- kpi/tests/api/v1/test_api_submissions.py | 42 +- kpi/tests/api/v2/test_api_asset_counts.py | 9 + kpi/tests/api/v2/test_api_asset_usage.py | 13 +- kpi/tests/api/v2/test_api_attachments.py | 154 +--- kpi/tests/api/v2/test_api_service_usage.py | 86 +- kpi/tests/api/v2/test_api_submissions.py | 575 +++++++----- kpi/tests/kpi_test_case.py | 5 +- kpi/tests/test_asset_versions.py | 23 +- kpi/tests/test_deployment_backends.py | 45 +- kpi/tests/test_mock_data.py | 606 +++++++++---- ...t_mock_data_conflicting_version_exports.py | 3 +- kpi/tests/test_mock_data_exports.py | 134 +-- kpi/tests/test_mongo_helper.py | 17 +- kpi/tests/utils/dicts.py | 35 + kpi/tests/utils/mock.py | 28 +- kpi/tests/utils/xml.py | 2 +- kpi/utils/files.py | 10 +- kpi/views/v2/attachment.py | 2 +- kpi/views/v2/data.py | 10 +- 48 files changed, 1541 insertions(+), 1788 deletions(-) create mode 100644 kobo/apps/kobo_auth/signals.py create mode 100644 kpi/tests/utils/dicts.py diff --git a/kobo/apps/hook/tests/hook_test_case.py b/kobo/apps/hook/tests/hook_test_case.py index c5c31d420a..66450390b5 100644 --- a/kobo/apps/hook/tests/hook_test_case.py +++ b/kobo/apps/hook/tests/hook_test_case.py @@ -1,5 +1,6 @@ # coding: utf-8 import json +import uuid import pytest import responses @@ -30,16 +31,16 @@ def setUp(self): self.asset = self.create_asset( "some_asset", content=json.dumps({'survey': [ - {'type': 'text', 'name': 'q1'}, - {'type': 'begin_group', 'name': 'group1'}, - {'type': 'text', 'name': 'q2'}, - {'type': 'text', 'name': 'q3'}, + {'type': 'text', 'label': 'q1', 'name': 'q1'}, + {'type': 'begin_group', 'label': 'group1', 'name': 'group1'}, + {'type': 'text', 'label': 'q2', 'name': 'q2'}, + {'type': 'text', 'label': 'q3', 'name': 'q3'}, {'type': 'end_group'}, - {'type': 'begin_group', 'name': 'group2'}, - {'type': 'begin_group', 'name': 'subgroup1'}, - {'type': 'text', 'name': 'q4'}, - {'type': 'text', 'name': 'q5'}, - {'type': 'text', 'name': 'q6'}, + {'type': 'begin_group', 'label': 'group2', 'name': 'group2'}, + {'type': 'begin_group', 'label': 'subgroup1', 'name': 'subgroup1'}, + {'type': 'text', 'label': 'q4', 'name': 'q4'}, + {'type': 'text', 'label': 'q5', 'name': 'q5'}, + {'type': 'text', 'label': 'q6', 'name': 'q6'}, {'type': 'end_group'}, {'type': 'end_group'}, ]}), @@ -83,8 +84,9 @@ def _create_hook(self, return_response_only=False, **kwargs): if return_response_only: return response else: - self.assertEqual(response.status_code, status.HTTP_201_CREATED, - msg=response.data) + self.assertEqual( + response.status_code, status.HTTP_201_CREATED, msg=response.data + ) hook = self.asset.hooks.last() self.assertTrue(hook.active) return hook @@ -158,9 +160,10 @@ def _send_and_wait_for_retry(self): def __prepare_submission(self): v_uid = self.asset.latest_deployed_version.uid - submission = { + self.submission = { '__version__': v_uid, 'q1': '¿Qué tal?', + '_uuid': str(uuid.uuid4()), 'group1/q2': '¿Cómo está en el grupo uno la primera vez?', 'group1/q3': '¿Cómo está en el grupo uno la segunda vez?', 'group2/subgroup1/q4': '¿Cómo está en el subgrupo uno la primera vez?', @@ -168,4 +171,4 @@ def __prepare_submission(self): 'group2/subgroup1/q6': '¿Cómo está en el subgrupo uno la tercera vez?', 'group2/subgroup11/q1': '¿Cómo está en el subgrupo once?', } - self.asset.deployment.mock_submissions([submission]) + self.asset.deployment.mock_submissions([self.submission]) diff --git a/kobo/apps/hook/tests/test_parser.py b/kobo/apps/hook/tests/test_parser.py index 33f406a8c1..8f890ec778 100644 --- a/kobo/apps/hook/tests/test_parser.py +++ b/kobo/apps/hook/tests/test_parser.py @@ -16,10 +16,10 @@ def test_json_parser(self): ServiceDefinition = hook.get_service_definition() submissions = hook.asset.deployment.get_submissions(hook.asset.owner) - uuid = submissions[0]['_id'] - service_definition = ServiceDefinition(hook, uuid) + submission_id = submissions[0]['_id'] + service_definition = ServiceDefinition(hook, submission_id) expected_data = { - '_id': 1, + '_id': submission_id, 'group1/q3': u'¿Cómo está en el grupo uno la segunda vez?', 'group2/subgroup1/q4': u'¿Cómo está en el subgrupo uno la primera vez?', 'group2/subgroup1/q5': u'¿Cómo está en el subgrupo uno la segunda vez?', @@ -29,26 +29,25 @@ def test_json_parser(self): def test_xml_parser(self): self.asset = self.create_asset( - "some_asset_with_xml_submissions", + 'some_asset_with_xml_submissions', content=json.dumps(self.asset.content), - format="json") + format='json', + ) self.asset.deploy(backend='mock', active=True) self.asset.save() - hook = self._create_hook(subset_fields=['_id', 'subgroup1', 'q3'], - format_type=SUBMISSION_FORMAT_TYPE_XML) + hook = self._create_hook( + subset_fields=['meta', 'subgroup1', 'q3'], + format_type=SUBMISSION_FORMAT_TYPE_XML, + ) ServiceDefinition = hook.get_service_definition() - submissions = hook.asset.deployment.get_submissions( - self.asset.owner, format_type=SUBMISSION_FORMAT_TYPE_XML) - xml_doc = etree.fromstring(submissions[0].encode()) - tree = etree.ElementTree(xml_doc) - uuid = tree.find('_id').text - - service_definition = ServiceDefinition(hook, uuid) + submissions = hook.asset.deployment.get_submissions(self.asset.owner) + submission_id = submissions[0]['_id'] + submission_uuid = submissions[0]['_uuid'] + service_definition = ServiceDefinition(hook, submission_id) expected_etree = etree.fromstring( - f'<{self.asset.uid}>' - f' <_id>{uuid}' + f'<{self.asset.uid} id="{self.asset.uid}">' f' ' f' ¿Cómo está en el grupo uno la segunda vez?' f' ' @@ -59,13 +58,23 @@ def test_xml_parser(self): f' ¿Cómo está en el subgrupo uno la tercera vez?' f' ' f' ' + f' ' + f' uuid:{submission_uuid}' + f' ' f'' ) - expected_xml = etree.tostring(expected_etree, pretty_print=True, - xml_declaration=True, encoding='utf-8') + + expected_xml = etree.tostring( + expected_etree, + pretty_print=True, + xml_declaration=True, + encoding='utf-8', + ) def remove_whitespace(str_): return re.sub(r'>\s+<', '><', to_str(str_)) - self.assertEqual(remove_whitespace(service_definition._get_data()), - remove_whitespace(expected_xml.decode())) + self.assertEqual( + remove_whitespace(service_definition._get_data()), + remove_whitespace(expected_xml.decode()), + ) diff --git a/kobo/apps/kobo_auth/__init__.py b/kobo/apps/kobo_auth/__init__.py index cbbf997004..4b7550c50b 100644 --- a/kobo/apps/kobo_auth/__init__.py +++ b/kobo/apps/kobo_auth/__init__.py @@ -6,4 +6,5 @@ class KoboAuthAppConfig(AppConfig): verbose_name = 'Authentication and authorization' def ready(self): + from . import signals super().ready() diff --git a/kobo/apps/kobo_auth/signals.py b/kobo/apps/kobo_auth/signals.py new file mode 100644 index 0000000000..bc4a80d21c --- /dev/null +++ b/kobo/apps/kobo_auth/signals.py @@ -0,0 +1,58 @@ +from django.conf import settings +from django.db.models.signals import post_save +from django.dispatch import receiver +from rest_framework.authtoken.models import Token + +from kobo.apps.kobo_auth.shortcuts import User +from kobo.apps.openrosa.apps.main.models.user_profile import UserProfile +from kpi.deployment_backends.kc_access.utils import ( + grant_kc_model_level_perms, + kc_transaction_atomic, +) +from kpi.utils.permissions import ( + grant_default_model_level_perms, + is_user_anonymous, +) + + +@receiver(post_save, sender=User) +def create_auth_token(sender, instance=None, created=False, **kwargs): + if is_user_anonymous(instance): + return + + if created: + Token.objects.get_or_create(user_id=instance.pk) + + +@receiver(post_save, sender=User) +def default_permissions_post_save(sender, instance, created, raw, **kwargs): + """ + Users must have both model-level and object-level permissions to satisfy + DRF, so assign the newly-created user all available collection and asset + permissions at the model level + """ + if raw: + # `raw` means we can't touch (so make sure your fixtures include + # all necessary permissions!) + return + if not created: + # We should only grant default permissions when the user is first + # created + return + grant_default_model_level_perms(instance) + + +@receiver(post_save, sender=User) +def save_kobocat_user(sender, instance, created, raw, **kwargs): + """ + Sync auth_user table between KPI and KC, and, if the user is newly created, + grant all KoboCAT model-level permissions for the content types listed in + `settings.KOBOCAT_DEFAULT_PERMISSION_CONTENT_TYPES` + """ + + if not settings.TESTING: + with kc_transaction_atomic(): + instance.sync_to_openrosa_db() + if created: + grant_kc_model_level_perms(instance) + UserProfile.objects.get_or_create(user=instance) diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py b/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py index dc8b816d21..5ca73aaa58 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py @@ -26,6 +26,7 @@ safe_create_instance, UnauthenticatedEditAttempt, ) +from kobo.apps.openrosa.libs.utils.string import dict_lists2strings from kpi.authentication import DigestAuthentication from kpi.utils.object_permission import get_database_user from ..utils.rest_framework.viewsets import OpenRosaGenericViewSet @@ -37,25 +38,11 @@ def is_json(request): return 'application/json' in request.content_type.lower() -def dict_lists2strings(d): - """Convert lists in a dict to joined strings. - - :param d: The dict to convert. - :returns: The converted dict.""" - for k, v in d.items(): - if isinstance(v, list) and all([isinstance(e, str) for e in v]): - d[k] = ' '.join(v) - elif isinstance(v, dict): - d[k] = dict_lists2strings(v) - - return d - - def create_instance_from_xml(username, request): xml_file_list = request.FILES.pop('xml_submission_file', []) xml_file = xml_file_list[0] if len(xml_file_list) else None media_files = request.FILES.values() - return safe_create_instance(username, xml_file, media_files, None, request) + return safe_create_instance(username, xml_file, media_files, None, request=request) def create_instance_from_json(username, request): @@ -73,7 +60,7 @@ def create_instance_from_json(username, request): xml_string = dict2xform(submission_joined, dict_form.get('id')) xml_file = io.StringIO(xml_string) - return safe_create_instance(username, xml_file, [], None, request) + return safe_create_instance(username, xml_file, [], None, request=request) class XFormSubmissionApi( diff --git a/kobo/apps/openrosa/apps/logger/models/instance.py b/kobo/apps/openrosa/apps/logger/models/instance.py index 7b685d261b..79a99d72b9 100644 --- a/kobo/apps/openrosa/apps/logger/models/instance.py +++ b/kobo/apps/openrosa/apps/logger/models/instance.py @@ -6,6 +6,7 @@ from backports.zoneinfo import ZoneInfo import reversion +from django.apps import apps from django.contrib.gis.db import models from django.contrib.gis.geos import GeometryCollection, Point from django.utils import timezone @@ -21,8 +22,11 @@ from kobo.apps.openrosa.apps.logger.fields import LazyDefaultBooleanField from kobo.apps.openrosa.apps.logger.models.survey_type import SurveyType from kobo.apps.openrosa.apps.logger.models.xform import XForm -from kobo.apps.openrosa.apps.logger.xform_instance_parser import XFormInstanceParser, \ - clean_and_parse_xml, get_uuid_from_xml +from kobo.apps.openrosa.apps.logger.xform_instance_parser import ( + XFormInstanceParser, + clean_and_parse_xml, + get_uuid_from_xml, +) from kobo.apps.openrosa.libs.utils.common_tags import ( ATTACHMENTS, GEOLOCATION, @@ -129,12 +133,15 @@ def check_active(self, force): return if self.xform and not self.xform.downloadable: raise FormInactiveError() - try: - profile = self.xform.user.profile - except self.xform.user.profile.RelatedObjectDoesNotExist: - return - if profile.metadata.get('submissions_suspended', False): - raise TemporarilyUnavailableError() + + # FIXME Access `self.xform.user.profile` directly could raise a + # `RelatedObjectDoesNotExist` error if profile does not exist even if + # wrapped in try/except + UserProfile = apps.get_model('main', 'UserProfile') # noqa - Avoid circular imports + if profile := UserProfile.objects.filter(user=self.xform.user).first(): + if profile.metadata.get('submissions_suspended', False): + raise TemporarilyUnavailableError() + return def _set_geom(self): xform = self.xform diff --git a/kobo/apps/openrosa/apps/logger/models/xform.py b/kobo/apps/openrosa/apps/logger/models/xform.py index 9a53b64b0c..4c376d8c18 100644 --- a/kobo/apps/openrosa/apps/logger/models/xform.py +++ b/kobo/apps/openrosa/apps/logger/models/xform.py @@ -28,6 +28,7 @@ from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) +from kpi.fields.file import ExtendedFileField from kpi.utils.xml import XMLFormWithDisclaimer XFORM_TITLE_LENGTH = 255 @@ -53,7 +54,7 @@ class XForm(models.Model): CLONED_SUFFIX = '_cloned' MAX_ID_LENGTH = 100 - xls = models.FileField( + xls = ExtendedFileField( storage=default_storage, upload_to=upload_to, null=True ) json = models.TextField(default='') diff --git a/kobo/apps/openrosa/apps/logger/signals.py b/kobo/apps/openrosa/apps/logger/signals.py index 760e407205..4e72c598a6 100644 --- a/kobo/apps/openrosa/apps/logger/signals.py +++ b/kobo/apps/openrosa/apps/logger/signals.py @@ -185,7 +185,7 @@ def update_xform_submission_count(sender, instance, created, **kwargs): last_submission_time=instance.date_created, ) # Hack to avoid circular imports - UserProfile = User.profile.related.related_model + UserProfile = User.profile.related.related_model # noqa profile, created = UserProfile.objects.only('pk').get_or_create( user_id=xform.user_id ) diff --git a/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py b/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py index 1630ea1a28..a400b9c7ee 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py @@ -120,7 +120,8 @@ def test_corrupted_submission(self): request = RequestFactory().post('/') request.user = self.user error, instance = safe_create_instance( - self.user.username, TempFileProxy(xml), None, None, request) + self.user.username, TempFileProxy(xml), None, None, request=request + ) # No `DjangoUnicodeDecodeError` errors are raised anymore. # An `ExpatError` is raised instead text = 'Improperly formatted XML' diff --git a/kobo/apps/openrosa/apps/logger/utils/instance.py b/kobo/apps/openrosa/apps/logger/utils/instance.py index f54d87b0d4..f87c71612f 100644 --- a/kobo/apps/openrosa/apps/logger/utils/instance.py +++ b/kobo/apps/openrosa/apps/logger/utils/instance.py @@ -118,23 +118,22 @@ def set_instance_validation_statuses( xform: XForm, request_data: dict, request_username: str ) -> int: - try: - new_validation_status_uid = request_data['validation_status.uid'] - except KeyError: - raise MissingValidationStatusPayloadError - - # Create new validation_status object - new_validation_status = get_validation_status( - new_validation_status_uid, request_username - ) + try: + new_validation_status_uid = request_data['validation_status.uid'] + except KeyError: + raise MissingValidationStatusPayloadError - postgres_query, mongo_query = build_db_queries(xform, request_data) + # Create new validation_status object + new_validation_status = get_validation_status( + new_validation_status_uid, request_username + ) + postgres_query, mongo_query = build_db_queries(xform, request_data) - # Update Postgres & Mongo - updated_records_count = Instance.objects.filter( - **postgres_query - ).update(validation_status=new_validation_status) - ParsedInstance.bulk_update_validation_statuses( - mongo_query, new_validation_status - ) - return updated_records_count + # Update Postgres & Mongo + updated_records_count = Instance.objects.filter( + **postgres_query + ).update(validation_status=new_validation_status) + ParsedInstance.bulk_update_validation_statuses( + mongo_query, new_validation_status + ) + return updated_records_count diff --git a/kobo/apps/openrosa/apps/logger/xform_instance_parser.py b/kobo/apps/openrosa/apps/logger/xform_instance_parser.py index ecfeda23b0..bd1e6d0077 100644 --- a/kobo/apps/openrosa/apps/logger/xform_instance_parser.py +++ b/kobo/apps/openrosa/apps/logger/xform_instance_parser.py @@ -1,8 +1,11 @@ -# coding: utf-8 +from __future__ import annotations + import logging import re import sys +from datetime import datetime from xml.dom import Node +from typing import Optional import dateutil.parser import six @@ -71,6 +74,7 @@ def get_meta_from_xml(xml_str, meta_name): def get_uuid_from_xml(xml): + def _uuid_only(uuid, regex): matches = regex.match(uuid) if matches and len(matches.groups()) > 0: @@ -94,7 +98,7 @@ def _uuid_only(uuid, regex): return None -def get_submission_date_from_xml(xml): +def get_submission_date_from_xml(xml) -> Optional[datetime]: # check in survey_node attributes xml = clean_and_parse_xml(xml) children = xml.childNodes @@ -103,9 +107,9 @@ def get_submission_date_from_xml(xml): if children.length == 0: raise ValueError(t("XML string must have a survey element.")) survey_node = children[0] - submissionDate = survey_node.getAttribute('submissionDate') - if submissionDate != '': - return dateutil.parser.parse(submissionDate) + submission_date = survey_node.getAttribute('submissionDate') + if submission_date != '': + return dateutil.parser.parse(submission_date) return None diff --git a/kobo/apps/openrosa/apps/main/models/meta_data.py b/kobo/apps/openrosa/apps/main/models/meta_data.py index be49726e3c..aaf7545e7d 100644 --- a/kobo/apps/openrosa/apps/main/models/meta_data.py +++ b/kobo/apps/openrosa/apps/main/models/meta_data.py @@ -19,6 +19,7 @@ from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) +from kpi.fields.file import ExtendedFileField CHUNK_SIZE = 1024 @@ -136,7 +137,7 @@ class MetaData(models.Model): xform = models.ForeignKey(XForm, on_delete=models.CASCADE) data_type = models.CharField(max_length=255) data_value = models.CharField(max_length=255) - data_file = models.FileField( + data_file = ExtendedFileField( storage=default_storage, upload_to=upload_to, blank=True, diff --git a/kobo/apps/openrosa/libs/utils/logger_tools.py b/kobo/apps/openrosa/libs/utils/logger_tools.py index 0950733991..3fd880394e 100644 --- a/kobo/apps/openrosa/libs/utils/logger_tools.py +++ b/kobo/apps/openrosa/libs/utils/logger_tools.py @@ -7,7 +7,7 @@ import sys import traceback from datetime import date, datetime, timezone -from typing import Generator, Optional +from typing import Generator, Optional, Union from xml.etree import ElementTree as ET from xml.parsers.expat import ExpatError try: @@ -110,6 +110,7 @@ def check_submission_permissions( :returns: None. :raises: PermissionDenied based on the above criteria. """ + if not xform.require_auth: # Anonymous submissions are allowed! return @@ -198,8 +199,15 @@ def create_instance( existing_instance.parsed_instance.save(asynchronous=False) return existing_instance else: - instance = save_submission(request, xform, xml, media_files, new_uuid, - status, date_created_override) + instance = save_submission( + request, + xform, + xml, + media_files, + new_uuid, + status, + date_created_override, + ) return instance @@ -211,12 +219,14 @@ def disposition_ext_and_date(name, extension, show_date=True): return 'attachment; filename=%s.%s' % (name, extension) -def dict2xform(jsform, form_id): - dd = {'form_id': form_id} - xml_head = "\n<%(form_id)s id='%(form_id)s'>\n" % dd - xml_tail = "\n" % dd +def dict2xform(submission: dict, xform_id_string: str) -> str: + xml_head = ( + f'\n' + f' <{xform_id_string} id="{xform_id_string}">\n' + ) + xml_tail = f'\n\n' - return xml_head + dict2xml(jsform) + xml_tail + return xml_head + dict2xml(submission) + xml_tail def get_instance_or_404(**criteria): @@ -456,6 +466,7 @@ def publish_xls_form(xls_file, user, id_string=None): with transaction.atomic(): dd = DataDictionary.objects.create(user=user, xls=xls_file) except IntegrityError as e: + breakpoint() raise e return dd @@ -528,10 +539,11 @@ def response_with_mimetype_and_name( def safe_create_instance( - username, - xml_file, - media_files, + username: str, + xml_file: File, + media_files: Union[list, Generator[File]], uuid: Optional[str] = None, + date_created_override: Optional[datetime] = None, request: Optional['rest_framework.request.Request'] = None, ): """Create an instance and catch exceptions. @@ -543,7 +555,12 @@ def safe_create_instance( try: instance = create_instance( - username, xml_file, media_files, uuid=uuid, request=request + username, + xml_file, + media_files, + uuid=uuid, + date_created_override=date_created_override, + request=request, ) except InstanceInvalidUserError: error = OpenRosaResponseBadRequest(t("Username or ID required.")) @@ -630,7 +647,7 @@ def save_submission( request: 'rest_framework.request.Request', xform: XForm, xml: str, - media_files: Generator[File], + media_files: Union[list, Generator[File]], new_uuid: str, status: str, date_created_override: datetime, @@ -666,13 +683,15 @@ def save_submission( if not dj_timezone.is_aware(date_created_override): # default to utc? date_created_override = dj_timezone.make_aware( - date_created_override, timezone.utc) + date_created_override, timezone.utc + ) instance.date_created = date_created_override - instance.save() + instance.save(update_fields=['date_created']) if instance.xform is not None: pi, created = ParsedInstance.objects.get_or_create( - instance=instance) + instance=instance + ) if not created: pi.save(asynchronous=False) @@ -820,7 +839,12 @@ def _has_edit_xform_permission( if request.user.is_superuser: return True - return request.user.has_perm('logger.change_xform', xform) + if request.user.has_perm('logger.change_xform', xform): + return True + + # User's permissions have been already checked when calling KPI endpoint + # If `has_partial_perms` is True, user is allowed to perform the action. + return getattr(request.user, 'has_partial_perms', False) return False diff --git a/kobo/apps/openrosa/libs/utils/string.py b/kobo/apps/openrosa/libs/utils/string.py index 60813cfc06..cb5047ad7c 100644 --- a/kobo/apps/openrosa/libs/utils/string.py +++ b/kobo/apps/openrosa/libs/utils/string.py @@ -13,6 +13,23 @@ def base64_decodestring(obj): return base64.b64decode(obj).decode() +def dict_lists2strings(d: dict) -> dict: + """ + Convert lists in a dict to joined strings. + + :param d: The dict to convert. + :returns: The converted dict. + """ + + for k, v in d.items(): + if isinstance(v, list) and all([isinstance(e, str) for e in v]): + d[k] = ' '.join(v) + elif isinstance(v, dict): + d[k] = dict_lists2strings(v) + + return d + + def str2bool(v): return v.lower() in ( 'yes', 'true', 't', '1') if isinstance(v, str) else v diff --git a/kobo/apps/project_ownership/tests/api/v2/test_api.py b/kobo/apps/project_ownership/tests/api/v2/test_api.py index 92a531e470..3420843798 100644 --- a/kobo/apps/project_ownership/tests/api/v2/test_api.py +++ b/kobo/apps/project_ownership/tests/api/v2/test_api.py @@ -333,14 +333,12 @@ def __add_submissions(self): 'formhub/uuid': self.asset.uid, '_attachments': [ { - 'id': 1, 'download_url': 'http://testserver/someuser/audio_conversion_test_clip.3gp', 'filename': 'someuser/audio_conversion_test_clip.3gp', 'mimetype': 'video/3gpp', 'bytes': 5000, }, { - 'id': 2, 'download_url': 'http://testserver/someuser/audio_conversion_test_image.jpg', 'filename': 'someuser/audio_conversion_test_image.jpg', 'mimetype': 'image/jpeg', @@ -353,14 +351,6 @@ def __add_submissions(self): self.asset.deployment.mock_submissions(submissions) self.submissions = submissions - @patch( - 'kpi.serializers.v2.service_usage.ServiceUsageSerializer._get_storage_usage', - new=MockServiceUsageSerializer._get_storage_usage - ) - @patch( - 'kpi.serializers.v2.service_usage.ServiceUsageSerializer._get_submission_counters', - new=MockServiceUsageSerializer._get_submission_counters - ) @patch( 'kobo.apps.project_ownership.models.transfer.reset_kc_permissions', MagicMock() @@ -385,7 +375,7 @@ def test_account_usage_transferred_to_new_user(self): 'asr_seconds_all_time': 120, 'mt_characters_all_time': 1000, }, - 'total_storage_bytes': 15000, + 'total_storage_bytes': 191642, 'total_submission_count': { 'all_time': 1, 'current_year': 1, diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index f9764c98e5..5cc13d1e52 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -1,4 +1,5 @@ import timeit +import itertools import pytest from django.core.cache import cache @@ -10,14 +11,19 @@ from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.organizations.models import Organization, OrganizationUser -from kobo.apps.stripe.tests.utils import generate_enterprise_subscription, generate_plan_subscription -from kobo.apps.trackers.submission_utils import create_mock_assets, add_mock_submissions +from kobo.apps.stripe.tests.utils import ( + generate_enterprise_subscription, + generate_plan_subscription, +) +from kobo.apps.trackers.submission_utils import ( + create_mock_assets, + add_mock_submissions, +) from kpi.tests.api.v2.test_api_service_usage import ServiceUsageAPIBase from kpi.tests.api.v2.test_api_asset_usage import AssetUsageAPITestCase from rest_framework import status - class OrganizationServiceUsageAPITestCase(ServiceUsageAPIBase): """ Test organization service usage when Stripe is enabled. @@ -26,9 +32,10 @@ class OrganizationServiceUsageAPITestCase(ServiceUsageAPIBase): when Stripe is installed. """ - user_count = 5 - assets_per_user = 5 - submissions_per_asset = 5 + names = ['alice', 'bob'] + user_count = len(names) + assets_per_user = 2 + submissions_per_asset = 2 org_id = 'orgAKWMFskafsngf' @classmethod @@ -40,7 +47,12 @@ def setUpTestData(cls): cls.organization.add_user(cls.anotheruser, is_admin=True) assets = create_mock_assets([cls.anotheruser], cls.assets_per_user) - users = baker.make(User, _quantity=cls.user_count - 1, _bulk_create=True) + users = baker.make( + User, + username=itertools.cycle(cls.names), + _quantity=cls.user_count - 1, + _bulk_create=True, + ) baker.make( OrganizationUser, user=users.__iter__(), diff --git a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py index 1e951ff389..1502fcccf5 100644 --- a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py +++ b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py @@ -1,3 +1,4 @@ +import uuid from copy import deepcopy from unittest.mock import patch @@ -31,7 +32,10 @@ class ValidateSubmissionTest(APITestCase): def setUp(self): user = User.objects.create_user(username='someuser', email='user@example.com') self.asset = Asset( - owner=user, content={'survey': [{'type': 'audio', 'name': 'q1'}]} + owner=user, + content={ + 'survey': [{'type': 'audio', 'label': 'q1', 'name': 'q1'}] + }, ) self.asset.advanced_features = {} self.asset.save() @@ -388,7 +392,7 @@ class GoogleTranscriptionSubmissionTest(APITestCase): def setUp(self): self.user = User.objects.create_user(username='someuser', email='user@example.com') self.asset = Asset( - content={'survey': [{'type': 'audio', 'label': 'q1'}]} + content={'survey': [{'type': 'audio', 'label': 'q1', 'name': 'q1'}]} ) self.asset.advanced_features = {'transcript': {'values': ['q1']}} self.asset.owner = self.user @@ -418,7 +422,6 @@ def test_google_transcript_post(self, m1, m2): '_uuid': submission_id, '_attachments': [ { - 'id': 1, 'filename': 'someuser/audio_conversion_test_clip.3gp', 'mimetype': 'video/3gpp', }, @@ -431,10 +434,10 @@ def test_google_transcript_post(self, m1, m2): 'submission': submission_id, 'q1': {GOOGLETS: {'status': 'requested', 'languageCode': ''}} } - with self.assertNumQueries(FuzzyInt(210, 215)): + with self.assertNumQueries(FuzzyInt(55, 65)): res = self.client.post(url, data, format='json') self.assertContains(res, 'complete') - with self.assertNumQueries(FuzzyInt(20, 26)): + with self.assertNumQueries(FuzzyInt(25, 35)): self.client.post(url, data, format='json') @override_settings(CACHES={'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}}) diff --git a/kobo/apps/subsequences/tests/test_submission_stream.py b/kobo/apps/subsequences/tests/test_submission_stream.py index 4542d603e1..dc6f770873 100644 --- a/kobo/apps/subsequences/tests/test_submission_stream.py +++ b/kobo/apps/subsequences/tests/test_submission_stream.py @@ -85,6 +85,7 @@ def _create_asset(self): }, ) self.asset.deploy(backend='mock', active=True) + self.asset.save() def _create_mock_submissions(self): self.asset.deployment.mock_submissions( @@ -95,8 +96,6 @@ def _create_mock_submissions(self): 'meta/instanceID': ( 'uuid:1c05898e-b43c-491d-814c-79595eb84e81' ), - # `MockDeploymentBackend` should probably add `_uuid`, but - # it doesn't. It's going away soon enough, though. '_uuid': '1c05898e-b43c-491d-814c-79595eb84e81', }, ] @@ -283,5 +282,5 @@ def test_stream_with_extras_handles_duplicated_submission_uuids(self): for v in qual_response['val']: assert isinstance(v['uuid'], str) - # Clear all mocked submissions to avoid duplicate submission errors - self.asset.deployment.mock_submissions([]) + ## Clear all mocked submissions to avoid duplicate submission errors + #self.asset.deployment.mock_submissions([]) diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index bddf773004..cb1aecd456 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -1,11 +1,10 @@ from collections import defaultdict from copy import deepcopy + from ..actions.automatic_transcription import AutomaticTranscriptionAction from ..actions.translation import TranslationAction from ..actions.qual import QualAction -from ..actions.unknown_action import UnknownAction - AVAILABLE_ACTIONS = ( AutomaticTranscriptionAction, @@ -42,14 +41,16 @@ # if not action.test_submission_passes_action(submission): # return action + def advanced_feature_instances(content, actions): action_instances = [] for action_id, action_params in actions.items(): action_kls = ACTIONS_BY_ID[action_id] - if action_params == True: + if action_params is True: action_params = action_kls.build_params({}, content) yield action_kls(action_params) + def populate_paths(_content): content = deepcopy(_content) group_stack = [] @@ -76,6 +77,7 @@ def populate_paths(_content): row['qpath'] = '-'.join([*group_stack, rowname]) return content + def advanced_submission_jsonschema(content, actions, url=None): actions = deepcopy(actions) action_instances = [] @@ -90,7 +92,7 @@ def advanced_submission_jsonschema(content, actions, url=None): for action_id, action_params in actions.items(): action_kls = ACTIONS_BY_ID[action_id] - if action_params == True: + if action_params is True: action_params = action_kls.build_params({}, content) if 'values' not in action_params: action_params['values'] = action_kls.get_values_for_content(content) @@ -100,26 +102,32 @@ def advanced_submission_jsonschema(content, actions, url=None): # def _empty_obj(): # return {'type': 'object', 'properties': {}, 'additionalProperties': False} + def get_jsonschema(action_instances=(), url=None): sub_props = {} if url is None: url = '/advanced_submission_post/' - schema = {'type': 'object', - '$description': FEATURE_JSONSCHEMA_DESCRIPTION, - 'url': url, - 'properties': { - 'submission': {'type': 'string', - 'description': 'the uuid of the submission'}, - }, - 'additionalProperties': False, - 'required': ['submission'], - } + schema = { + 'type': 'object', + '$description': FEATURE_JSONSCHEMA_DESCRIPTION, + 'url': url, + 'properties': { + 'submission': { + 'type': 'string', + 'description': 'the uuid of the submission', + }, + }, + 'additionalProperties': False, + 'required': ['submission'], + } for instance in action_instances: schema = instance.modify_jsonschema(schema) return schema + SUPPLEMENTAL_DETAILS_KEY = '_supplementalDetails' + def stream_with_extras(submission_stream, asset): extras = dict( asset.submission_extras.values_list('submission_uuid', 'content') @@ -145,11 +153,14 @@ def stream_with_extras(submission_stream, asset): c['uuid']: c for c in choices } qual_questions_by_uuid[qual_q['uuid']] = qual_q + for submission in submission_stream: if SUBMISSION_UUID_FIELD in submission: uuid = submission[SUBMISSION_UUID_FIELD] else: uuid = submission['_uuid'] + + all_supplemental_details = deepcopy(extras.get(uuid, {})) for qpath, supplemental_details in all_supplemental_details.items(): try: diff --git a/kobo/apps/trackers/submission_utils.py b/kobo/apps/trackers/submission_utils.py index a47118a28e..18702cf842 100644 --- a/kobo/apps/trackers/submission_utils.py +++ b/kobo/apps/trackers/submission_utils.py @@ -1,4 +1,6 @@ +import itertools import os +import time import uuid from django.conf import settings @@ -31,17 +33,29 @@ def create_mock_assets(users: list, assets_per_user: int = 1): ] } assets = [] - for user in users: + + def _get_uid(count): + uids = [] + for i in range(count): + _, random = str(time.time()).split('.') + uids.append(f'a{random}_{i}') + return uids + + for idx, user in enumerate(users): assets = assets + baker.make( Asset, content=content_source_asset, owner=user, asset_type='survey', name='test', + uid=itertools.cycle(_get_uid(assets_per_user)), _quantity=assets_per_user, ) + print([a.uid for a in assets]) + breakpoint() for asset in assets: + print('DEPLOYING ', asset.uid, flush=True) asset.deploy(backend='mock', active=True) asset.deployment.set_namespace(ROUTER_URL_NAMESPACE) asset.save() # might be redundant? @@ -60,70 +74,6 @@ def expected_file_size(submissions: int = 1): )) * submissions -def update_xform_counters( - asset: Asset, xform: XForm = None, submissions: int = 1 -): - """ - Create/update the daily submission counter and the shadow xform we use to query it - """ - today = timezone.now() - if xform: - xform.attachment_storage_bytes += ( - expected_file_size(submissions) - ) - xform.save() - else: - xform_xml = ( - f'' - f'' - f'' - f' XForm test' - f' ' - f' ' - f' <{asset.uid} id="{asset.uid}" />' - f' ' - f' ' - f'' - f'' - f'' - f'' - ) - - xform = baker.make( - 'logger.XForm', - attachment_storage_bytes=( - expected_file_size(submissions) - ), - kpi_asset_uid=asset.uid, - date_created=today, - date_modified=today, - user_id=asset.owner_id, - xml=xform_xml, - json={} - ) - xform.save() - - counter = DailyXFormSubmissionCounter.objects.filter( - date=today.date(), - user_id=asset.owner.id, - ).first() - - if counter: - counter.counter += submissions - counter.save() - else: - counter = ( - baker.make( - 'logger.DailyXFormSubmissionCounter', - date=today.date(), - counter=submissions, - xform=xform, - user_id=asset.owner_id, - ) - ) - counter.save() - - def add_mock_submissions(assets: list, submissions_per_asset: int = 1): """ Add one (default) or more submissions to an asset @@ -158,6 +108,6 @@ def add_mock_submissions(assets: list, submissions_per_asset: int = 1): asset.deployment.mock_submissions(asset_submissions, flush_db=False) all_submissions = all_submissions + asset_submissions - update_xform_counters(asset, submissions=submissions_per_asset) + # update_xform_counters(asset, submissions=submissions_per_asset) return all_submissions diff --git a/kobo/apps/trackers/tests/test_trackers.py b/kobo/apps/trackers/tests/test_trackers.py index 7209416f1d..083ec16b7c 100644 --- a/kobo/apps/trackers/tests/test_trackers.py +++ b/kobo/apps/trackers/tests/test_trackers.py @@ -23,7 +23,7 @@ def setUp(self): def _create_asset(self): asset = Asset.objects.create( - content={'survey': [{"type": "text", "name": "q1"}]}, + content={'survey': [{'type': 'text', 'label': 'q1', 'name': 'q1'}]}, owner=self.user, asset_type='survey', name='тєѕт αѕѕєт', diff --git a/kobo/settings/base.py b/kobo/settings/base.py index 30dc1cbb06..812c5de440 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -1615,12 +1615,6 @@ def dj_stripe_request_callback_method(): # Django 3.2 required settings DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' -SERVICE_ACCOUNT = { - 'BACKEND': env.cache_url( - 'SERVICE_ACCOUNT_BACKEND_URL', default='redis://redis_cache:6380/6' - ), - 'WHITELISTED_HOSTS': env.list('SERVICE_ACCOUNT_WHITELISTED_HOSTS', default=[]), -} AUTH_PASSWORD_VALIDATORS = [ { diff --git a/kobo/settings/testing.py b/kobo/settings/testing.py index 4ae7e54ea8..421adcaeb6 100644 --- a/kobo/settings/testing.py +++ b/kobo/settings/testing.py @@ -47,10 +47,8 @@ 'LOADER_CLASS' ] = 'webpack_loader.loader.FakeWebpackLoader' -# Kobocat settings +# KoboCAT settings TEST_HTTP_HOST = 'testserver' TEST_USERNAME = 'bob' -SERVICE_ACCOUNT['WHITELISTED_HOSTS'] = ['testserver'] -SERVICE_ACCOUNT['NAMESPACE'] = 'kobo-service-account-test' OPENROSA_DB_ALIAS = DEFAULT_DB_ALIAS diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index f7860fdf71..b26dd84fdf 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -117,6 +117,11 @@ def bulk_update_submissions( # Reset query, because all the submission ids have been already # retrieve data['query'] = {} + + # Set `has_partial_perms` flag on `request.user` to grant them + # permissions while calling `logger_tool.py::_has_edit_xform_permission()` + if request := kwargs.get('request'): + request.user.has_partial_perms = True else: submission_ids = data['submission_ids'] diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 296e8fbb25..702ce886c4 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -1,365 +1,31 @@ -# coding: utf-8 from __future__ import annotations -import copy +import io import os -import time -import uuid -from collections import defaultdict -from contextlib import contextmanager -from datetime import date, datetime -from typing import Optional, Union -try: - from zoneinfo import ZoneInfo -except ImportError: - from backports.zoneinfo import ZoneInfo +from typing import Optional +from uuid import uuid4 -from deepmerge import always_merger -from dict2xml import dict2xml as dict2xml_real -from django.db.models import Q from django.conf import settings -from django.core.files.base import ContentFile -from django.db.models import Sum -from django.db.models.functions import Coalesce -from django.urls import reverse -from rest_framework import status +from django.contrib.auth.models import AnonymousUser +from django.utils.dateparse import parse_datetime -from kobo.apps.openrosa.apps.logger.models import Attachment, Instance, XForm -from kobo.apps.openrosa.apps.logger.models.attachment import upload_to -from kobo.apps.openrosa.apps.main.models import UserProfile -from kobo.apps.trackers.models import NLPUsageCounter -from kpi.constants import ( - SUBMISSION_FORMAT_TYPE_JSON, - SUBMISSION_FORMAT_TYPE_XML, - PERM_CHANGE_SUBMISSIONS, - PERM_DELETE_SUBMISSIONS, - PERM_VALIDATE_SUBMISSIONS, +from kobo.apps.kobo_auth.shortcuts import User +from kobo.apps.openrosa.libs.utils.logger_tools import ( + dict2xform, + safe_create_instance, ) -from kpi.exceptions import ( - AttachmentNotFoundException, - InvalidXPathException, - SubmissionNotFoundException, - XPathNotFoundException, -) -from kpi.interfaces.sync_backend_media import SyncBackendMediaInterface -from kpi.models.asset_file import AssetFile -from kpi.utils.mongo_helper import MongoHelper, drop_mock_only -from kpi.utils.xml import fromstring_preserve_root_xmlns -from .base_backend import BaseDeploymentBackend - +from kpi.constants import PERM_ADD_SUBMISSIONS, SUBMISSION_FORMAT_TYPE_JSON +from kpi.tests.utils.dicts import nested_dict_from_keys +from .openrosa_backend import OpenRosaDeploymentBackend +from ..utils.files import ExtendedContentFile -def dict2xml(*args, **kwargs): - """To facilitate mocking in unit tests""" - return dict2xml_real(*args, **kwargs) - -class MockDeploymentBackend(BaseDeploymentBackend): - """ - Only used for unit testing and interface testing. - """ - - @property - def attachment_storage_bytes(self): - submissions = self.get_submissions(self.asset.owner) - storage_bytes = 0 - for submission in submissions: - attachments = self.get_attachment_objects_from_dict(submission) - storage_bytes += sum( - [attachment.media_file_size for attachment in attachments] - ) - return storage_bytes - - def bulk_assign_mapped_perms(self): - pass - - def calculated_submission_count( - self, user: settings.AUTH_USER_MODEL, **kwargs - ) -> int: - params = self.validate_submission_list_params( - user, validate_count=True, **kwargs - ) - return MongoHelper.get_count(self.mongo_userform_id, **params) - - def connect(self, active=False): - def generate_uuid_for_form(): - # From KoboCAT's onadata.libs.utils.model_tools - return uuid.uuid4().hex - - self.store_data( - { - 'backend': 'mock', - 'active': active, - 'backend_response': { - 'downloadable': active, - 'kpi_asset_uid': self.asset.uid, - 'uuid': generate_uuid_for_form(), - # TODO use XForm object and get its primary key - 'formid': self.asset.pk - }, - 'version': self.asset.version_id, - } - ) - - @property - def form_uuid(self): - return 'formhub-uuid' # to match existing tests - - def nlp_tracking_data(asset_ids=None): - """ - Get the NLP tracking data since a specified date - If no date is provided, get all-time data - """ - filter_args = {} - if start_date: - filter_args = {'date__gte': start_date} - try: - nlp_tracking = ( - NLPUsageCounter.objects.only( - 'total_asr_seconds', 'total_mt_characters' - ) - .filter(asset_id=self.asset.id, **filter_args) - .aggregate( - total_nlp_asr_seconds=Coalesce(Sum('total_asr_seconds'), 0), - total_nlp_mt_characters=Coalesce( - Sum('total_mt_characters'), 0 - ), - ) - ) - except NLPUsageCounter.DoesNotExist: - return { - 'total_nlp_asr_seconds': 0, - 'total_nlp_mt_characters': 0, - } - else: - return nlp_tracking - - def submission_count_since_date(self, start_date=None): - # FIXME, does not reproduce KoBoCAT behaviour. - # Deleted submissions are not taken into account but they should be - monthly_counter = len(self.get_submissions(self.asset.owner)) - return monthly_counter - - @drop_mock_only - def delete_submission( - self, submission_id: int, user: settings.AUTH_USER_MODEL - ) -> dict: - """ - Delete a submission - """ - self.validate_access_with_partial_perms( - user=user, - perm=PERM_DELETE_SUBMISSIONS, - submission_ids=[submission_id], - ) - - if not settings.MONGO_DB.instances.find_one({'_id': submission_id}): - return { - 'content_type': 'application/json', - 'status': status.HTTP_404_NOT_FOUND, - 'data': {'detail': 'Not found'}, - } - - settings.MONGO_DB.instances.delete_one({'_id': submission_id}) - - return { - 'content_type': 'application/json', - 'status': status.HTTP_204_NO_CONTENT, - } - - def delete_submissions( - self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs - ) -> dict: - """ - Bulk delete provided submissions authenticated by `user`'s API token. - - `data` should contains the submission ids or the query to get the subset - of submissions to delete - Example: - {"submission_ids": [1, 2, 3]} - or - {"query": {"Question": "response"} - """ - submission_ids = self.validate_access_with_partial_perms( - user=user, - perm=PERM_DELETE_SUBMISSIONS, - submission_ids=data['submission_ids'], - query=data['query'], - ) - - if not submission_ids: - submission_ids = data['submission_ids'] - else: - data['query'] = {} - - # Retrieve the subset of submissions to delete - submissions = self.get_submissions( - user, submission_ids=submission_ids, query=data['query'] - ) - - # If no submissions have been fetched, user is not allowed to perform - # the request - if not submissions: - return { - 'content_type': 'application/json', - 'status': status.HTTP_404_NOT_FOUND, - } - - # We could use `delete_many()` but we would have to recreate the query - # with submission ids or query. - for submission in submissions: - submission_id = submission['_id'] - settings.MONGO_DB.instances.delete_one({'_id': submission_id}) - - return { - 'content_type': 'application/json', - 'status': status.HTTP_200_OK, - } - - def duplicate_submission( - self, submission_id: int, request: 'rest_framework.request.Request', - ) -> dict: - # TODO: Make this operate on XML somehow and reuse code from - # KobocatDeploymentBackend, to catch issues like #3054 - user = request.user - self.validate_access_with_partial_perms( - user=user, - perm=PERM_CHANGE_SUBMISSIONS, - submission_ids=[submission_id], - ) - - submission = self.get_submission(submission_id, user=user) - _attachments = submission.get('_attachments', []) - dup_att = [] - if _attachments: - # not exactly emulating database id incrementing but probably good - # enough for the mock tests - max_attachment_id = max(a['id'] for a in _attachments) - for i, att in enumerate(_attachments, 1): - dup_att.append({**att, 'id': max_attachment_id + i}) - - duplicated_submission = copy.deepcopy(submission) - updated_time = datetime.now(tz=ZoneInfo('UTC')).isoformat( - 'T', 'milliseconds' - ) - next_id = ( - max( - ( - sub['_id'] - for sub in self.get_submissions( - self.asset.owner, fields=['_id'] - ) - ) - ) - + 1 - ) - duplicated_submission.update( - { - '_id': next_id, - 'start': updated_time, - 'end': updated_time, - self.SUBMISSION_CURRENT_UUID_XPATH: f'uuid:{uuid.uuid4()}', - self.SUBMISSION_DEPRECATED_UUID_XPATH: submission[ - self.SUBMISSION_CURRENT_UUID_XPATH - ], - '_attachments': dup_att, - } - ) - - self.asset.deployment.mock_submissions([duplicated_submission]) - return duplicated_submission +class MockDeploymentBackend(OpenRosaDeploymentBackend): @property def enketo_id(self): return 'self' - def get_attachment( - self, - submission_id_or_uuid: Union[int, str], - user: settings.AUTH_USER_MODEL, - attachment_id: Optional[int] = None, - xpath: Optional[str] = None, - ) -> 'logger.Attachment': - submission_json = None - # First try to get the json version of the submission. - # It helps to retrieve the id if `submission_id_or_uuid` is a `UUIDv4` - try: - submission_id_or_uuid = int(submission_id_or_uuid) - except ValueError: - submissions = self.get_submissions( - user, - format_type=SUBMISSION_FORMAT_TYPE_JSON, - query={'_uuid': submission_id_or_uuid}, - ) - if submissions: - submission_json = submissions[0] - else: - submission_json = self.get_submission( - submission_id_or_uuid, - user, - format_type=SUBMISSION_FORMAT_TYPE_JSON, - ) - - if not submission_json: - raise SubmissionNotFoundException - - submission_xml = self.get_submission( - submission_json['_id'], user, format_type=SUBMISSION_FORMAT_TYPE_XML - ) - - if xpath: - submission_root = fromstring_preserve_root_xmlns(submission_xml) - try: - element = submission_root.find(xpath) - except KeyError: - raise InvalidXPathException - - try: - attachment_filename = element.text - except AttributeError: - raise XPathNotFoundException - - attachments = submission_json['_attachments'] - for attachment in attachments: - filename = os.path.basename(attachment['filename']) - - if xpath: - is_good_file = attachment_filename == filename - else: - is_good_file = int(attachment['id']) == int(attachment_id) - - if is_good_file: - return self._get_attachment_object( - attachment_id=attachment['id'], - submission_xml=submission_xml, - submission_id=submission_json['_id'], - filename=filename, - mimetype=attachment.get('mimetype'), - ) - - raise AttachmentNotFoundException - - def get_attachment_objects_from_dict(self, submission: dict) -> list: - if not submission.get('_attachments'): - return [] - attachments = submission.get('_attachments') - submission_xml = self.get_submission( - submission['_id'], self.asset.owner, format_type=SUBMISSION_FORMAT_TYPE_XML - ) - - return [ - self._get_attachment_object( - attachment_id=attachment['id'], - submission_xml=submission_xml, - submission_id=submission['_id'], - filename=os.path.basename(attachment['filename']), - mimetype=attachment.get('mimetype'), - ) - for attachment in attachments - ] - - def get_data_download_links(self): - return {} - def get_enketo_survey_links(self): return { 'offline_url': f'https://example.org/_/#{self.enketo_id}', @@ -368,161 +34,103 @@ def get_enketo_survey_links(self): 'preview_url': f'https://example.org/preview/::#{self.enketo_id}', } - def get_submission_detail_url(self, submission_id: int) -> str: - # This doesn't really need to be implemented. - # We keep it to stay close to `KobocatDeploymentBackend` - url = f'{self.submission_list_url}{submission_id}/' - return url - - def get_submission_validation_status_url(self, submission_id: int) -> str: - url = '{detail_url}validation_status/'.format( - detail_url=self.get_submission_detail_url(submission_id) - ) - return url - - def get_daily_counts( - self, user: settings.AUTH_USER_MODEL, timeframe: tuple[date, date] - ) -> dict: - submissions = self.get_submissions(user=self.asset.owner) - daily_counts = defaultdict(int) - for submission in submissions: - submission_date = datetime.strptime( - submission['_submission_time'], '%Y-%m-%dT%H:%M:%S' - ) - daily_counts[str(submission_date.date())] += 1 - - return daily_counts - def get_submissions( self, user: settings.AUTH_USER_MODEL, format_type: str = SUBMISSION_FORMAT_TYPE_JSON, - submission_ids: list = [], + submission_ids: list = None, request: Optional['rest_framework.request.Request'] = None, - **mongo_query_params, + **mongo_query_params ) -> list: + # Overload parent to cast generator to a list. Many tests are expecting + # a list + return list(super().get_submissions( + user, format_type, submission_ids, request, **mongo_query_params + )) + + def mock_submissions( + self, submissions, create_uuids: bool = True, flush_db: bool = True + ): """ - Retrieve submissions that `user` is allowed to access. - - The format `format_type` can be either: - - 'json' (See `kpi.constants.SUBMISSION_FORMAT_TYPE_JSON`) - - 'xml' (See `kpi.constants.SUBMISSION_FORMAT_TYPE_XML`) - - Results can be filtered by submission ids. Moreover MongoDB filters can - be passed through `mongo_query_params` to narrow down the results. + Simulate client (i.e.: Enketo or Collect) data submission. - If `user` has no access to these submissions or no matches are found, - an empty list is returned. - If `format_type` is 'json', a list of dictionaries is returned. - Otherwise, if `format_type` is 'xml', a list of strings is returned. + Read test data and convert it to proper XML to be saved as a real + Instance object. """ - mongo_query_params['submission_ids'] = submission_ids - params = self.validate_submission_list_params( - user, format_type=format_type, **mongo_query_params - ) - - mongo_cursor, total_count = MongoHelper.get_instances( - self.mongo_userform_id, **params - ) - - # Python-only attribute used by `kpi.views.v2.data.DataViewSet.list()` - self.current_submission_count = total_count - - submissions = [ - self._rewrite_json_attachment_urls( - MongoHelper.to_readable_dict(submission), - request, - ) - for submission in mongo_cursor - ] - - if format_type != SUBMISSION_FORMAT_TYPE_XML: - return submissions + class FakeRequest: + pass - return [ - dict2xml( - self.__prepare_xml(submission), - wrap=self.asset.uid, - newlines=False, - ) - for submission in submissions - ] + request = FakeRequest() + owner_username = self.asset.owner.username - def get_validation_status( - self, submission_id: int, user: settings.AUTH_USER_MODEL - ) -> dict: - submission = self.get_submission(submission_id, user) - return { - 'content_type': 'application/json', - 'data': submission.get('_validation_status'), - } - - @drop_mock_only - def mock_submissions(self, submissions: list, flush_db: bool = True): - """ - Insert dummy submissions into deployment data - """ - if flush_db: - settings.MONGO_DB.instances.drop() - count = settings.MONGO_DB.instances.count_documents({}) + for submission in submissions: + sub_copy = nested_dict_from_keys(submission) - for idx, submission in enumerate(submissions): - submission[MongoHelper.USERFORM_ID] = self.mongo_userform_id - # Some data already provide `_id`. Use it if it is present. - # There could be conflicts if some submissions come with an id - # or others do not. - # MockMongo will raise a DuplicateKey error - if '_id' not in submission: - submission['_id'] = count + idx + 1 - settings.MONGO_DB.instances.insert_one(submission) - # Do not add `MongoHelper.USERFORM_ID` to original `submissions` - del submission[MongoHelper.USERFORM_ID] + if create_uuids: + if 'formhub/uuid' not in submission: + sub_copy['formhub'] = {'uuid': self.xform.uuid} - @property - def mongo_userform_id(self): - return f'{self.asset.owner.username}_{self.asset.uid}' + if 'meta/instanceID' not in submission: + try: + uuid_ = submission['_uuid'] + except KeyError: + uuid_ = str(uuid4()) + else: + uuid_ = submission['meta/instanceID'].replace('uuid:', '') - def redeploy(self, active: bool = None): - """ - Replace (overwrite) the deployment, and - optionally changing whether the deployment is active - """ - if active is None: - active = self.active + sub_copy['meta'] = {'instanceID': f'uuid:{uuid_}'} + submission['_uuid'] = uuid_ - self.store_data( - { - 'active': active, - 'version': self.asset.version_id, - } - ) - - self.set_asset_uid() - - def rename_enketo_id_key(self, previous_owner_username: str): - pass + assign_perm = False + try: + submitted_by = sub_copy['_submitted_by'] + except KeyError: + request.user = self.asset.owner + submitted_by = self.asset.owner.username + else: + if not submitted_by: + request.user = AnonymousUser() + submitted_by = '' + elif owner_username != submitted_by: + request.user = User.objects.get(username=submitted_by) + else: + request.user = self.asset.owner + + if not self.asset.has_perm(request.user, PERM_ADD_SUBMISSIONS): + # We want `request.user` to be able to add submissions + # (temporarily) to match `_submitted_by` value while saving + # in DB + self.asset.assign_perm(request.user, PERM_ADD_SUBMISSIONS) + assign_perm = True + + media_files = self._get_media_files(sub_copy) + + xml_string = dict2xform(sub_copy, self.xform.id_string) + xml_file = io.StringIO(xml_string) + error, instance = safe_create_instance( + owner_username, + xml_file, + media_files, + date_created_override=parse_datetime( + submission.get('_submission_time', '') # Returns None if empty + ), + request=request, + ) + if error: + raise Exception(error) - def set_active(self, active: bool): - self.save_to_db( - { - 'active': bool(active), - } - ) + # Inject (or update) real PK in submission + # FIXME TRY TO ASSIGN Instance.PK if it already exists + submission['_id'] = instance.pk - def set_asset_uid(self, **kwargs) -> bool: - backend_response = self.backend_response - backend_response.update( - { - 'kpi_asset_uid': self.asset.uid, - } - ) - self.store_data({'backend_response': backend_response}) + # Reassign attachment PKs + if '_attachments' in submission: + for idx, attachment in enumerate(instance.attachments.all()): + submission['_attachments'][idx]['id'] = attachment.pk - def set_enketo_open_rosa_server( - self, require_auth: bool, enketo_id: str = None - ): - pass + if assign_perm: + self.asset.remove_perm(request.user, PERM_ADD_SUBMISSIONS) def set_namespace(self, namespace): self.store_data( @@ -531,225 +139,23 @@ def set_namespace(self, namespace): } ) - def set_validation_status( - self, - submission_id: int, - user: settings.AUTH_USER_MODEL, - data: dict, - method: str, - ) -> dict: - self.validate_access_with_partial_perms( - user=user, - perm=PERM_VALIDATE_SUBMISSIONS, - submission_ids=[submission_id], - ) - - validation_status = {} - status_code = status.HTTP_204_NO_CONTENT - - if method != 'DELETE': - validation_status = { - 'timestamp': int(time.time()), - 'uid': data['validation_status.uid'], - 'by_whom': user.username, - } - status_code = status.HTTP_200_OK - - settings.MONGO_DB.instances.update_one( - {'_id': submission_id}, - {'$set': {'_validation_status': validation_status}}, - ) - return { - 'content_type': 'application/json', - 'status': status_code, - 'data': validation_status, - } - - def set_validation_statuses( - self, user: settings.AUTH_USER_MODEL, data: dict - ) -> dict: - """ - Bulk update validation status for provided submissions. - - `data` should contains either the submission ids or the query to - retrieve the subset of submissions chosen by then user. - If none of them are provided, all the submissions are selected - Examples: - {"submission_ids": [1, 2, 3]} - {"query":{"_validation_status.uid":"validation_status_not_approved"} - - """ - - submission_ids = self.validate_access_with_partial_perms( - user=user, - perm=PERM_VALIDATE_SUBMISSIONS, - submission_ids=data['submission_ids'], - query=data['query'], - ) - - if not submission_ids: - submission_ids = data['submission_ids'] - else: - # Reset query because submission ids are provided from partial - # perms validation - data['query'] = {} - - submissions = self.get_submissions( - user=user, - submission_ids=submission_ids, - query=data['query'], - fields=['_id'], - ) - - submission_count = 0 - - for submission in submissions: - if not data['validation_status.uid']: - validation_status = {} - else: - validation_status = { - 'timestamp': int(time.time()), - 'uid': data['validation_status.uid'], - 'by_whom': user.username, - } - settings.MONGO_DB.instances.update_one( - {'_id': submission['_id']}, - {'$set': {'_validation_status': validation_status}}, - ) - - submission_count += 1 - - return { - 'content_type': 'application/json', - 'status': status.HTTP_200_OK, - 'data': { - 'detail': f'{submission_count} submissions have been updated' - }, - } - - def store_submission( - self, user, xml_submission, submission_uuid, attachments=None, **kwargs - ): - """ - Return a mock response without actually storing anything - """ - - return { - 'uuid': submission_uuid, - 'status_code': status.HTTP_201_CREATED, - 'message': 'Successful submission', - 'updated_submission': xml_submission, - } - @property - def submission_count(self): - return self.calculated_submission_count(self.asset.owner) + def _backend_identifier(self): + return 'mock' - @property - def submission_list_url(self): - # This doesn't really need to be implemented. - # We keep it to stay close to `KobocatDeploymentBackend` - view_name = 'submission-list' - namespace = self.get_data('namespace', None) - if namespace is not None: - view_name = '{}:{}'.format(namespace, view_name) - return reverse( - view_name, kwargs={'parent_lookup_asset': self.asset.uid} - ) + def _get_media_files(self, submission): - @property - def submission_model(self): - class MockLoggerInstance: - @classmethod - def get_app_label_and_model_name(cls): - return 'mocklogger', 'instance' - - return MockLoggerInstance - - @staticmethod - @contextmanager - def suspend_submissions(user_ids: list[int]): try: - yield - finally: - pass - - def sync_media_files(self, file_type: str = AssetFile.FORM_MEDIA): - queryset = self._get_metadata_queryset(file_type=file_type) - for obj in queryset: - assert issubclass(obj.__class__, SyncBackendMediaInterface) - - def transfer_counters_ownership(self, new_owner: 'kobo_auth.User'): - NLPUsageCounter.objects.filter( - asset=self.asset, user=self.asset.owner - ).update(user=new_owner) - - # Kobocat models are not implemented, but mocked in unit tests. - - def transfer_submissions_ownership( - self, previous_owner_username: str - ) -> bool: - - results = settings.MONGO_DB.instances.update_many( - {'_userform_id': f'{previous_owner_username}_{self.xform_id_string}'}, - { - '$set': { - '_userform_id': self.mongo_userform_id - } - }, - ) - - return ( - results.matched_count == 0 or - ( - results.matched_count > 0 - and results.matched_count == results.modified_count - ) - ) - - @property - def xform(self): - """ - Create related XForm on the fly - """ - if not ( - xform := XForm.objects.filter(id_string=self.asset.uid).first() - ): - UserProfile.objects.get_or_create(user_id=self.asset.owner_id) - xform = XForm() - xform.xml = self.asset.snapshot().xml - xform.user_id = self.asset.owner_id - xform.kpi_asset_uid = self.asset.uid - xform.save() - - return xform + attachments = submission['_attachments'] + except KeyError: + return [] - @property - def xform_id_string(self): - return self.xform.id_string + for attachment in attachments: + filename = attachment['filename'] - def _get_attachment_object( - self, - submission_xml: str, - submission_id: int, - attachment_id: Optional[int, str] = None, - filename: Optional[str] = None, - mimetype: Optional[str] = None, - ): - if not ( - attachment := Attachment.objects.filter( - Q(pk=attachment_id) | Q(media_file_basename=filename) - ).first() - ): - if not ( - instance := Instance.objects.filter(pk=submission_id).first() - ): - instance = Instance.objects.create( - pk=submission_id, xml=submission_xml, xform=self.xform - ) + if filename == 'path/to/image.png': + continue - attachment = Attachment() - attachment.instance = instance basename = os.path.basename(filename) file_ = os.path.join( settings.BASE_DIR, @@ -757,58 +163,12 @@ def _get_attachment_object( 'tests', basename ) - with open(file_, 'rb') as f: - attachment.media_file = ContentFile( - f.read(), name=upload_to(attachment, basename) + if not os.path.isfile(file_): + raise Exception( + f'File `filename` does not exist! Use `path/to/image.png` if' + f' you need a fake attachment, or ' + f'`audio_conversion_test_image.(jpg|3gp)` for real attachment' ) - if mimetype: - attachment.mimetype = mimetype - attachment.save() - - return attachment - @classmethod - def __prepare_bulk_update_data(cls, updates: dict) -> dict: - """ - Preparing the request payload for bulk updating of submissions - """ - # Sanitizing the payload of potentially destructive keys - sanitized_updates = copy.deepcopy(updates) - for key in updates: - if ( - key in cls.PROTECTED_XML_FIELDS - or '/' in key - and key.split('/')[0] in cls.PROTECTED_XML_FIELDS - ): - sanitized_updates.pop(key) - - return sanitized_updates - - @staticmethod - def prepare_bulk_update_response(kc_responses: list) -> dict: - total_update_attempts = len(kc_responses) - total_successes = total_update_attempts # all will be successful - return { - 'status': status.HTTP_200_OK, - 'data': { - 'count': total_update_attempts, - 'successes': total_successes, - 'failures': total_update_attempts - total_successes, - 'results': kc_responses, - }, - } - - @staticmethod - def __prepare_xml(submission: dict) -> dict: - submission_copy = copy.deepcopy(submission) - - for k, v in submission_copy.items(): - if '/' not in k: - continue - value = v - for key in reversed(k.strip('/').split('/')): - value = {key: value} - always_merger.merge(submission, value) - del submission[k] - - return submission + with open(file_, 'rb') as f: + yield ExtendedContentFile(f.read(), name=basename) diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index ca0c93e3e7..bf4933a52b 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -57,6 +57,7 @@ from kpi.exceptions import ( AttachmentNotFoundException, InvalidXFormException, + InvalidXPathException, SubmissionIntegrityError, SubmissionNotFoundException, XPathNotFoundException, @@ -157,7 +158,7 @@ def connect(self, active=False): self.store_data( { - 'backend': 'openrosa', + 'backend': self._backend_identifier, 'active': active, 'backend_response': { 'formid': self._xform.pk, @@ -209,7 +210,7 @@ def delete_submission( self, submission_id: int, user: settings.AUTH_USER_MODEL ) -> dict: """ - Delete a submission through KoBoCAT proxy + Delete a submission It returns a dictionary which can used as Response object arguments """ @@ -220,7 +221,13 @@ def delete_submission( submission_ids=[submission_id] ) - Instance.objects.filter(pk=submission_id).delete() + count, _ = Instance.objects.filter(pk=submission_id).delete() + if not count: + return { + 'data': {'detail': 'Submission not found'}, + 'content_type': 'application/json', + 'status': status.HTTP_404_NOT_FOUND, + } return { 'content_type': 'application/json', @@ -268,15 +275,15 @@ def duplicate_submission( self, submission_id: int, request: 'rest_framework.request.Request', ) -> dict: """ - Duplicates a single submission proxied through KoBoCAT. The submission - with the given `submission_id` is duplicated and the `start`, `end` and - `instanceID` parameters of the submission are reset before being posted - to KoBoCAT. + Duplicates a single submission. The submission with the given + `submission_id` is duplicated and the `start`, `end` and + `instanceID` parameters of the submission are reset before being + saving the instance. - Returns a dict with message response from KoBoCAT and uuid of created + Returns a dict with uuid of created submission if successful - """ + user = request.user self.validate_access_with_partial_perms( user=user, @@ -321,14 +328,16 @@ def duplicate_submission( ) safe_create_instance( - username=user.username, + username=self.asset.owner.username, xml_file=ContentFile(xml_tostring(xml_parsed)), media_files=attachments, uuid=_uuid, request=request, ) + + # Cast to list to help unit tests to pass. return self._rewrite_json_attachment_urls( - next(self.get_submissions(user, query={'_uuid': _uuid})), request + list(self.get_submissions(user, query={'_uuid': _uuid}))[0], request ) def edit_submission( @@ -477,7 +486,11 @@ def get_attachment( if xpath: submission_root = fromstring_preserve_root_xmlns(submission_xml) - element = submission_root.find(xpath) + try: + element = submission_root.find(xpath) + except KeyError: + raise InvalidXPathException + if element is None: raise XPathNotFoundException attachment_filename = element.text @@ -701,14 +714,16 @@ def get_orphan_postgres_submissions(self) -> Optional[QuerySet, bool]: return None def get_submission_detail_url(self, submission_id: int) -> str: - url = f'{self.submission_list_url}/{submission_id}' - return url + 1/0 + #url = f'{self.submission_list_url}/{submission_id}' + #return url def get_submission_validation_status_url(self, submission_id: int) -> str: - url = '{detail_url}/validation_status'.format( - detail_url=self.get_submission_detail_url(submission_id) - ) - return url + 1/0 + #url = '{detail_url}/validation_status'.format( + # detail_url=self.get_submission_detail_url(submission_id) + #) + #return url def get_submissions( self, @@ -848,7 +863,7 @@ def redeploy(self, active=None): # after calling this method in `DeployableMixin.deploy()` self.store_data( { - 'backend': 'openrosa', + 'backend': self._backend_identifier, 'active': active, 'backend_response': { 'formid': self.xform.pk, @@ -1115,7 +1130,7 @@ def set_validation_statuses( # TODO handle errors update_instances = set_instance_validation_statuses( - self.xform, data, user + self.xform, data, user.username ) return { @@ -1134,7 +1149,7 @@ def store_submission( ) return safe_create_instance( - username=user.username, + username=self.asset.owner.username, xml_file=ContentFile(xml_submission), media_files=media_files, uuid=submission_uuid, @@ -1174,11 +1189,12 @@ def submission_count_since_date(self, start_date=None): @property def submission_list_url(self): - url = '{kc_base}/api/v1/data/{formid}'.format( - kc_base=settings.KOBOCAT_INTERNAL_URL, - formid=self.backend_response['formid'] - ) - return url + 1/0 + #url = '{kc_base}/api/v1/data/{formid}'.format( + # kc_base=settings.KOBOCAT_INTERNAL_URL, + # formid=self.backend_response['formid'] + #) + #return url @property def submission_model(self): @@ -1377,6 +1393,10 @@ def transfer_counters_ownership(self, new_owner: 'kobo_auth.User'): + self.xform.attachment_storage_bytes ) + @property + def _backend_identifier(self): + return 'openrosa' + def _delete_openrosa_metadata( self, metadata_file_: dict, file_: Union[AssetFile, PairedData] = None ): diff --git a/kpi/fixtures/test_data.json b/kpi/fixtures/test_data.json index 0ea399393e..2c6fee0074 100644 --- a/kpi/fixtures/test_data.json +++ b/kpi/fixtures/test_data.json @@ -75,6 +75,33 @@ "model": "kobo_auth.user", "pk": 3 }, + { + "fields": { + "name": "Administrator", + "validated_password": true, + "user": 1 + }, + "model": "main.userprofile", + "pk": 1 + }, + { + "fields": { + "name": "Some User", + "validated_password": true, + "user": 2 + }, + "model": "main.userprofile", + "pk": 2 + }, + { + "fields": { + "name": "Another User", + "validated_password": true, + "user": 3 + }, + "model": "main.userprofile", + "pk": 3 + }, { "model": "kpi.asset", "pk": 1, diff --git a/kpi/signals.py b/kpi/signals.py index 89be23937d..2c41091ad7 100644 --- a/kpi/signals.py +++ b/kpi/signals.py @@ -4,68 +4,17 @@ from django.contrib.auth.models import AnonymousUser from django.db.models.signals import post_save, post_delete from django.dispatch import receiver -from rest_framework.authtoken.models import Token from taggit.models import Tag -from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.hook.models.hook import Hook from kpi.constants import PERM_ADD_SUBMISSIONS - -from kpi.deployment_backends.kc_access.utils import ( - grant_kc_model_level_perms, - kc_transaction_atomic, -) from kpi.exceptions import DeploymentNotFound from kpi.models import Asset, TagUid from kpi.utils.object_permission import post_assign_perm, post_remove_perm from kpi.utils.permissions import ( - grant_default_model_level_perms, is_user_anonymous, ) -@receiver(post_save, sender=User) -def create_auth_token(sender, instance=None, created=False, **kwargs): - if is_user_anonymous(instance): - return - - if created: - Token.objects.get_or_create(user_id=instance.pk) - - -@receiver(post_save, sender=User) -def default_permissions_post_save(sender, instance, created, raw, **kwargs): - """ - Users must have both model-level and object-level permissions to satisfy - DRF, so assign the newly-created user all available collection and asset - permissions at the model level - """ - if raw: - # `raw` means we can't touch (so make sure your fixtures include - # all necessary permissions!) - return - if not created: - # We should only grant default permissions when the user is first - # created - return - grant_default_model_level_perms(instance) - - -@receiver(post_save, sender=User) -def save_kobocat_user(sender, instance, created, raw, **kwargs): - """ - Sync auth_user table between KPI and KC, and, if the user is newly created, - grant all KoboCAT model-level permissions for the content types listed in - `settings.KOBOCAT_DEFAULT_PERMISSION_CONTENT_TYPES` - """ - - if not settings.TESTING: - with kc_transaction_atomic(): - instance.sync_to_openrosa_db() - if created: - grant_kc_model_level_perms(instance) - - @receiver(post_save, sender=Tag) def tag_uid_post_save(sender, instance, created, raw, **kwargs): """ Make sure we have a TagUid object for each newly-created Tag """ diff --git a/kpi/tests/api/v1/test_api_assets.py b/kpi/tests/api/v1/test_api_assets.py index 70d5a998d3..74dedbd253 100644 --- a/kpi/tests/api/v1/test_api_assets.py +++ b/kpi/tests/api/v1/test_api_assets.py @@ -256,7 +256,7 @@ def setUp(self): self.client.login(username='someuser', password='someuser') self.user = User.objects.get(username='someuser') self.asset = Asset.objects.create( - content={'survey': [{"type": "text", "name": "q1"}]}, + content={'survey': [{'type': 'text', 'label': 'q1', 'name': 'q1'}]}, owner=self.user, asset_type='survey', name='тєѕт αѕѕєт' @@ -264,11 +264,12 @@ def setUp(self): self.asset.deploy(backend='mock', active=True) self.asset.save() v_uid = self.asset.latest_deployed_version.uid - submission = { + self.submission = { '__version__': v_uid, - 'q1': '¿Qué tal?' + 'q1': '¿Qué tal?', + '_submission_time': '2024-08-07T23:42:21', } - self.asset.deployment.mock_submissions([submission]) + self.asset.deployment.mock_submissions([self.submission], ) def test_owner_can_create_export(self): post_url = reverse('exporttask-list') @@ -292,9 +293,8 @@ def test_owner_can_create_export(self): version_uid = self.asset.latest_deployed_version_uid expected_content = ''.join([ '"q1";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"\r\n', - f'"¿Qué tal?";"1";"";"";"";"";"";"";"{version_uid}";"";"1"\r\n', + f'"¿Qué tal?";"{self.submission["_id"]}";"{self.submission["_uuid"]}";"2024-08-07T23:42:21";"";"";"submitted_via_web";"someuser";"{version_uid}";"";"1"\r\n', ]) - self.assertEqual(result_content, expected_content) return detail_response diff --git a/kpi/tests/api/v1/test_api_submissions.py b/kpi/tests/api/v1/test_api_submissions.py index 2504f6a128..4c5ee14ae0 100644 --- a/kpi/tests/api/v1/test_api_submissions.py +++ b/kpi/tests/api/v1/test_api_submissions.py @@ -1,6 +1,7 @@ # coding: utf-8 import pytest from django.conf import settings +from django.urls import reverse from rest_framework import status from kpi.constants import ( @@ -27,14 +28,18 @@ def test_retrieve_submission_with_partial_permissions_as_anotheruser(self): def test_list_submissions_as_owner(self): response = self.client.get(self.submission_list_url, {"format": "json"}) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data, self.submissions) + expected_ids = [s['_id'] for s in self.submissions] + response_ids = [r['_id'] for r in response.data] + assert sorted(response_ids) == sorted(expected_ids) def test_list_submissions_shared_as_anotheruser(self): self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self._log_in_as_another_user() response = self.client.get(self.submission_list_url, {"format": "json"}) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data, self.submissions) + expected_ids = [s['_id'] for s in self.submissions] + response_ids = [r['_id'] for r in response.data] + assert sorted(response_ids) == sorted(expected_ids) def test_list_submissions_limit(self): limit = settings.SUBMISSION_LIST_LIMIT @@ -42,7 +47,7 @@ def test_list_submissions_limit(self): asset = Asset.objects.create( name='Lots of submissions', owner=self.asset.owner, - content={'survey': [{'name': 'q', 'type': 'integer'}]}, + content={'survey': [{'label': 'q', 'name': 'q', 'type': 'integer'}]}, ) asset.deploy(backend='mock', active=True) asset.deployment.set_namespace(self.URL_NAMESPACE) @@ -56,15 +61,21 @@ def test_list_submissions_limit(self): asset.deployment.mock_submissions(submissions) # Server-wide limit should apply if no limit specified - response = self.client.get( - asset.deployment.submission_list_url, {'format': 'json'} + url = reverse( + self._get_endpoint('submission-list'), + kwargs={'format': 'json', 'parent_lookup_asset': asset.uid}, ) + response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), limit) # Limit specified in query parameters should not be able to exceed # server-wide limit + url = reverse( + self._get_endpoint('submission-list'), + kwargs={'parent_lookup_asset': asset.uid, 'format': 'json'}, + ) response = self.client.get( - asset.deployment.submission_list_url, + url, {'limit': limit + excess, 'format': 'json'} ) @@ -79,7 +90,7 @@ def test_list_submissions_as_owner_with_params(self): 'limit': 5, 'sort': '{"q1": -1}', 'fields': '["q1", "_submitted_by"]', - 'query': '{"_submitted_by": {"$in": ["", "someuser", "another"]}}', + 'query': '{"_submitted_by": {"$in": ["unknown", "someuser", "another"]}}', } ) # ToDo add more assertions. E.g. test whether sort, limit, start really work @@ -88,8 +99,13 @@ def test_list_submissions_as_owner_with_params(self): def test_delete_submission_as_owner(self): submission = self.get_random_submission(self.asset.owner) - url = self.asset.deployment.get_submission_detail_url( - submission['_id']) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) response = self.client.delete(url, HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) @@ -101,7 +117,13 @@ def test_delete_submission_shared_as_anotheruser(self): self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self._log_in_as_another_user() submission = self.get_random_submission(self.asset.owner) - url = self.asset.deployment.get_submission_detail_url(submission['_id']) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) response = self.client.delete(url, HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) response = self.client.get(self.submission_list_url, {'format': 'json'}) diff --git a/kpi/tests/api/v2/test_api_asset_counts.py b/kpi/tests/api/v2/test_api_asset_counts.py index 617dc6d11e..d5e3ffe8bc 100644 --- a/kpi/tests/api/v2/test_api_asset_counts.py +++ b/kpi/tests/api/v2/test_api_asset_counts.py @@ -1,4 +1,5 @@ from django.urls import reverse +from django.test import override_settings from rest_framework import status from kobo.apps.kobo_auth.shortcuts import User @@ -56,7 +57,11 @@ def setUp(self): self.asset.deployment.mock_submissions(submissions) + @override_settings(DEFAULT_SUBMISSIONS_COUNT_NUMBER_OF_DAYS=10000) def test_count_endpoint_owner(self): + # Submission submitted time is 2022-09-12. + # DEFAULT_SUBMISSIONS_COUNT_NUMBER_OF_DAYS must be big enough to include + # this date. count_url = reverse( self._get_endpoint('asset-counts-list'), kwargs={'parent_lookup_asset': self.asset.uid} @@ -102,7 +107,11 @@ def test_count_endpoint_another_user_no_perms(self): response = self.client.get(count_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + @override_settings(DEFAULT_SUBMISSIONS_COUNT_NUMBER_OF_DAYS=10000) def test_count_endpoint_another_with_perms(self): + # Submission submitted time is 2022-09-12. + # DEFAULT_SUBMISSIONS_COUNT_NUMBER_OF_DAYS must be big enough to include + # this date. count_url = reverse( self._get_endpoint('asset-counts-list'), kwargs={'parent_lookup_asset': self.asset.uid} diff --git a/kpi/tests/api/v2/test_api_asset_usage.py b/kpi/tests/api/v2/test_api_asset_usage.py index 3a34712f8c..261db675b4 100644 --- a/kpi/tests/api/v2/test_api_asset_usage.py +++ b/kpi/tests/api/v2/test_api_asset_usage.py @@ -19,12 +19,7 @@ class AssetUsageAPITestCase(BaseAssetTestCase): URL_NAMESPACE = ROUTER_URL_NAMESPACE def setUp(self): - try: - self.anotheruser = User.objects.get(username='anotheruser') - except: - self.anotheruser = User.objects.create_user( - username='anotheruser', password='anotheruser' - ) + self.anotheruser = User.objects.get(username='anotheruser') self.client.login(username='anotheruser', password='anotheruser') def __add_nlp_trackers(self): @@ -133,7 +128,11 @@ def __create_asset(self): self.asset.save() self.asset.deployment.set_namespace(self.URL_NAMESPACE) - self.submission_list_url = self.asset.deployment.submission_list_url + self.submission_list_url = reverse( + self._get_endpoint('submission-list'), + kwargs={'parent_lookup_asset': self.asset.uid, 'format': 'json'}, + ) + self._deployment = self.asset.deployment def __expected_file_size(self): diff --git a/kpi/tests/api/v2/test_api_attachments.py b/kpi/tests/api/v2/test_api_attachments.py index 0f3c891cf7..49f83ff3b8 100644 --- a/kpi/tests/api/v2/test_api_attachments.py +++ b/kpi/tests/api/v2/test_api_attachments.py @@ -1,22 +1,18 @@ import uuid -from django.conf import settings -from django.core.files.base import File from django.http import QueryDict from django.urls import reverse +from mock import patch from rest_framework import status from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.apps.main.models import UserProfile -from kobo.apps.openrosa.apps.logger.models import XForm, Instance, Attachment -from kobo.apps.openrosa.apps.logger.models.attachment import upload_to -from kobo.apps.openrosa.apps.viewer.models import ParsedInstance from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) from kpi.models import Asset from kpi.tests.base_test_case import BaseAssetTestCase from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE +from kpi.tests.utils.mock import guess_type_mock class AttachmentApiTests(BaseAssetTestCase): @@ -43,8 +39,12 @@ def setUp(self) -> None: self.__add_submissions() self.asset.deployment.set_namespace(self.URL_NAMESPACE) - self.submission_list_url = self.asset.deployment.submission_list_url + self.submission_list_url = reverse( + self._get_endpoint('submission-list'), + kwargs={'format': 'json', 'parent_lookup_asset': self.asset.uid}, + ) self._deployment = self.asset.deployment + self.submission_id = self.submissions[0]['_id'] def __add_submissions(self): submissions = [] @@ -59,13 +59,11 @@ def __add_submissions(self): 'meta/instanceID': f'uuid:{_uuid}', '_attachments': [ { - 'id': 1, 'download_url': 'http://testserver/someuser/audio_conversion_test_clip.3gp', 'filename': 'someuser/audio_conversion_test_clip.3gp', 'mimetype': 'video/3gpp', }, { - 'id': 2, 'download_url': 'http://testserver/someuser/audio_conversion_test_image.jpg', 'filename': 'someuser/audio_conversion_test_image.jpg', 'mimetype': 'image/jpeg', @@ -74,7 +72,11 @@ def __add_submissions(self): '_submitted_by': 'someuser' } submissions.append(submission) - self.asset.deployment.mock_submissions(submissions) + + with patch('mimetypes.guess_type') as guess_mock: + guess_mock.side_effect = guess_type_mock + self.asset.deployment.mock_submissions(submissions) + self.submissions = submissions def test_convert_mp4_to_mp3(self): @@ -90,11 +92,12 @@ def test_convert_mp4_to_mp3(self): self._get_endpoint('attachment-list'), kwargs={ 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_data': 1, + 'parent_lookup_data': self.submission_id, }, ), querystring=query_dict.urlencode() ) + response = self.client.get(url) assert response.status_code == status.HTTP_200_OK assert response['Content-Type'] == 'audio/mpeg' @@ -112,7 +115,7 @@ def test_reject_image_with_conversion(self): self._get_endpoint('attachment-list'), kwargs={ 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_data': 1, + 'parent_lookup_data': self.submission_id, }, ), querystring=query_dict.urlencode() @@ -136,7 +139,7 @@ def test_get_mp4_without_conversion(self): self._get_endpoint('attachment-list'), kwargs={ 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_data': 1, + 'parent_lookup_data': self.submission_id, }, ), querystring=query_dict.urlencode() @@ -147,12 +150,13 @@ def test_get_mp4_without_conversion(self): assert response['Content-Type'] == 'video/3gpp' def test_get_attachment_with_id(self): + attachment_id = self.submissions[0]['_attachments'][0]['id'] url = reverse( self._get_endpoint('attachment-detail'), kwargs={ 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_data': 1, - 'pk': 1, + 'parent_lookup_data': self.submission_id, + 'pk': attachment_id, }, ) @@ -177,14 +181,16 @@ def test_duplicate_attachment_with_submission(self): original_file = response.data # Duplicate the submission - duplicate_url = reverse( - self._get_endpoint('submission-duplicate'), - kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'pk': submission['_id'], - }, - ) - response = self.client.post(duplicate_url, {'format': 'json'}) + with patch('mimetypes.guess_type') as guess_mock: + guess_mock.side_effect = guess_type_mock + duplicate_url = reverse( + self._get_endpoint('submission-duplicate'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) + response = self.client.post(duplicate_url, {'format': 'json'}) duplicate_submission = response.data # Increment the max attachment id of the original submission to get the @@ -205,7 +211,7 @@ def test_duplicate_attachment_with_submission(self): duplicate_file = response.data # Ensure that the files are the same - assert original_file == duplicate_file + assert original_file.read() == duplicate_file.read() def test_xpath_not_found(self): query_dict = QueryDict('', mutable=True) @@ -220,7 +226,7 @@ def test_xpath_not_found(self): self._get_endpoint('attachment-list'), kwargs={ 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_data': 1, + 'parent_lookup_data': self.submission_id, }, ), querystring=query_dict.urlencode() @@ -244,7 +250,7 @@ def test_invalid_xpath_syntax(self): self._get_endpoint('attachment-list'), kwargs={ 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_data': 1, + 'parent_lookup_data': self.submission_id, }, ), querystring=query_dict.urlencode() @@ -262,7 +268,7 @@ def test_get_attachment_with_submission_uuid(self): kwargs={ 'parent_lookup_asset': self.asset.uid, 'parent_lookup_data': submission['_uuid'], - 'pk': 1, + 'pk': submission['_attachments'][0]['id'], }, ) @@ -271,86 +277,17 @@ def test_get_attachment_with_submission_uuid(self): assert response['Content-Type'] == 'video/3gpp' def test_thumbnail_creation_on_demand(self): - media_file = settings.BASE_DIR + '/kpi/tests/audio_conversion_test_image.jpg' - - xform_xml = f""" - - - Project with attachments - - - <{self.asset.uid} id="{self.asset.uid}"> - - - - - <__version__/> - - - - - - - - - - - - - - - - - - """ - - instance_xml = f""" - <{self.asset.uid} xmlns:jr="http://openrosa.org/javarosa" xmlns:orx="http://openrosa.org/xforms" id="{self.asset.uid}"> - - 027e8acb31b24acebb7f6b2a74ac1ff3 - - audio_conversion_test_image.jpg - <__version__>vd3dpf3fL2C8abWG4EPJWC - - uuid:ba82fbca-9a05-45c7-afb6-295c90f838e5 - - - """ - - UserProfile.objects.get_or_create(user=self.someuser) - xform = XForm.objects.create( - user=self.someuser, - xml=xform_xml, - id_string=self.asset.uid, - kpi_asset_uid=self.asset.uid - ) - instance = Instance.objects.create(xform=xform, xml=instance_xml) - attachment = Attachment.objects.create(instance=instance) - attachment.media_file = File( - open(media_file, 'rb'), upload_to(attachment, media_file) - ) - attachment.save() - - pi = ParsedInstance.objects.create(instance=instance) - self.asset.deployment.mock_submissions( - [pi.to_dict_for_mongo()] - ) - detail_url = reverse( + submission = self.submissions[0] + url = reverse( self._get_endpoint('attachment-detail'), - args=( - self.asset.uid, - instance.pk, - attachment.pk - ), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_data': submission['_id'], + 'pk': submission['_attachments'][1]['id'], + }, ) - self.client.get(detail_url) - filename = attachment.media_file.name.replace('.jpg', '') + response = self.client.get(url) + filename = response.data.name.replace('.jpg', '') thumbnail = f'{filename}-small.jpg' # Thumbs should not exist yet self.assertFalse(default_storage.exists(thumbnail)) @@ -359,14 +296,11 @@ def test_thumbnail_creation_on_demand(self): self._get_endpoint('attachment-thumb'), args=( self.asset.uid, - instance.pk, - attachment.pk, + submission['_id'], + submission['_attachments'][1]['id'], 'small' ), ) self.client.get(thumb_url) # Thumbs should exist self.assertTrue(default_storage.exists(thumbnail)) - - # Clean-up - attachment.delete() diff --git a/kpi/tests/api/v2/test_api_service_usage.py b/kpi/tests/api/v2/test_api_service_usage.py index 63826cd516..0d56a71133 100644 --- a/kpi/tests/api/v2/test_api_service_usage.py +++ b/kpi/tests/api/v2/test_api_service_usage.py @@ -9,10 +9,6 @@ from rest_framework import status from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.apps.logger.models import ( - XForm, - DailyXFormSubmissionCounter, -) from kobo.apps.trackers.models import NLPUsageCounter from kpi.models import Asset from kpi.tests.base_test_case import BaseAssetTestCase @@ -27,7 +23,6 @@ class ServiceUsageAPIBase(BaseAssetTestCase): URL_NAMESPACE = ROUTER_URL_NAMESPACE - xform = None counter = None attachment_id = 0 @@ -69,7 +64,10 @@ def _create_asset(self, user=None): self.asset.save() self.asset.deployment.set_namespace(self.URL_NAMESPACE) - self.submission_list_url = self.asset.deployment.submission_list_url + self.submission_list_url = reverse( + self._get_endpoint('submission-list'), + kwargs={'format': 'json', 'parent_lookup_asset': self.asset.uid}, + ) self._deployment = self.asset.deployment def add_nlp_trackers(self): @@ -140,63 +138,9 @@ def add_submissions(self, count=2): submissions.append(submission) self.asset.deployment.mock_submissions(submissions, flush_db=False) - self.update_xform_counters(self.asset, submissions=count) - - def update_xform_counters(self, asset: Asset, submissions: int = 0): - """ - Create/update the daily submission counter and the shadow xform we use to query it - """ - today = timezone.now() - if self.xform: - self.xform.attachment_storage_bytes += ( - self.expected_file_size() * submissions - ) - self.xform.save() - else: - xform_xml = ( - f'' - f'' - f'' - f' XForm test' - f' ' - f' ' - f' <{asset.uid} id="{asset.uid}" />' - f' ' - f' ' - f'' - f'' - f'' - f'' - ) - self.xform = XForm.objects.create( - attachment_storage_bytes=( - self.expected_file_size() * submissions - ), - kpi_asset_uid=asset.uid, - date_created=today, - date_modified=today, - user_id=asset.owner_id, - xml=xform_xml, - json={} - ) - self.xform.save() - - if self.counter: - self.counter.counter += submissions - self.counter.save() - else: - self.counter = ( - DailyXFormSubmissionCounter.objects.create( - date=today.date(), - counter=submissions, - xform=self.xform, - user_id=asset.owner_id, - ) - ) - self.counter.save() - - def expected_file_size(self): + @staticmethod + def expected_file_size(): """ Calculate the expected combined file size for the test audio clip and image """ @@ -270,14 +214,16 @@ def test_multiple_forms(self): def test_service_usages_with_projects_in_trash_bin(self): self.test_multiple_forms() # Simulate trash bin - self.asset.pending_delete = True - self.asset.save( - update_fields=['pending_delete'], - create_version=False, - adjust_content=False, - ) - self.xform.pending_delete = True - self.xform.save(update_fields=['pending_delete']) + for asset in self.anotheruser.assets.all(): + asset.pending_delete = True + asset.save( + update_fields=['pending_delete'], + create_version=False, + adjust_content=False, + ) + if asset.has_deployment: + asset.deployment.xform.pending_delete = True + asset.deployment.xform.save(update_fields=['pending_delete']) # Retry endpoint url = reverse(self._get_endpoint('service-usage-list')) diff --git a/kpi/tests/api/v2/test_api_submissions.py b/kpi/tests/api/v2/test_api_submissions.py index 9a1c0409dd..9ffad5722d 100644 --- a/kpi/tests/api/v2/test_api_submissions.py +++ b/kpi/tests/api/v2/test_api_submissions.py @@ -5,7 +5,6 @@ import mock import random import string -import time import uuid from datetime import datetime try: @@ -15,13 +14,14 @@ import pytest import responses -from dict2xml import dict2xml from django.conf import settings from django.urls import reverse from django_digest.test import Client as DigestClient from rest_framework import status from kobo.apps.audit_log.models import AuditLog +from kobo.apps.openrosa.apps.main.models.user_profile import UserProfile +from kobo.apps.openrosa.libs.utils.logger_tools import dict2xform from kobo.apps.kobo_auth.shortcuts import User from kpi.constants import ( ASSET_TYPE_SURVEY, @@ -33,6 +33,7 @@ PERM_VALIDATE_SUBMISSIONS, PERM_VIEW_ASSET, PERM_VIEW_SUBMISSIONS, + SUBMISSION_FORMAT_TYPE_JSON, SUBMISSION_FORMAT_TYPE_XML, ) from kpi.models import Asset @@ -48,15 +49,9 @@ ) -def dict2xml_with_encoding_declaration(*args, **kwargs): - return '' + dict2xml( - *args, **kwargs - ) - - -def dict2xml_with_namespace(*args, **kwargs): - xml_string = dict2xml(*args, **kwargs) - xml_root = lxml.etree.fromstring(xml_string) +def dict2xform_with_namespace(submission: dict, xform_id_string: str) -> str: + xml_string = dict2xform(submission, xform_id_string) + xml_root = lxml.etree.fromstring(xml_string.encode()) xml_root.set('xmlns', 'http://opendatakit.org/submissions') return lxml.etree.tostring(xml_root).decode() @@ -73,21 +68,27 @@ class BaseSubmissionTestCase(BaseTestCase): URL_NAMESPACE = ROUTER_URL_NAMESPACE def setUp(self): - self.client.login(username="someuser", password="someuser") - self.someuser = User.objects.get(username="someuser") - self.anotheruser = User.objects.get(username="anotheruser") + self.client.login(username='someuser', password='someuser') + self.someuser = User.objects.get(username='someuser') + self.anotheruser = User.objects.get(username='anotheruser') + self.unknown_user = User.objects.create(username='unknown_user') + UserProfile.objects.create(user=self.unknown_user) + content_source_asset = Asset.objects.get(id=1) - self.asset = Asset.objects.create(content=content_source_asset.content, - owner=self.someuser, - asset_type='survey') + self.asset = Asset.objects.create( + content=content_source_asset.content, + owner=self.someuser, + asset_type='survey', + ) self.asset.deploy(backend='mock', active=True) self.asset.save() - self.__add_submissions() - self.asset.deployment.set_namespace(self.URL_NAMESPACE) - self.submission_list_url = self.asset.deployment.submission_list_url + self.submission_list_url = reverse( + self._get_endpoint('submission-list'), + kwargs={'parent_lookup_asset': self.asset.uid, 'format': 'json'}, + ) self._deployment = self.asset.deployment def get_random_submission(self, user: settings.AUTH_USER_MODEL) -> dict: @@ -98,26 +99,20 @@ def get_random_submissions( ) -> list: """ Get random submissions within all generated submissions. - If user is the owner, we only return submissions submitted by unknown. + + If user is not the owner, we only return submissions submitted by them. It is useful to ensure restricted users fail tests with forbidden submissions. """ query = {} - if self.asset.owner == user: - query = {'_submitted_by': ''} + if self.asset.owner != user: + query = {'_submitted_by': user.username} submissions = self.asset.deployment.get_submissions(user, query=query) random.shuffle(submissions) return submissions[:limit] - def _log_in_as_another_user(self): - """ - Helper to switch user from `someuser` to `anotheruser`. - """ - self.client.logout() - self.client.login(username="anotheruser", password="anotheruser") - - def __add_submissions(self): + def _add_submissions(self, other_fields: dict = None): letters = string.ascii_letters submissions = [] v_uid = self.asset.latest_deployed_version.uid @@ -125,7 +120,7 @@ def __add_submissions(self): self.submissions_submitted_by_unknown = [] self.submissions_submitted_by_anotheruser = [] - submitted_by_choices = ['', 'someuser', 'anotheruser'] + submitted_by_choices = ['unknown_user', 'someuser', 'anotheruser'] for i in range(20): # We want to have at least one submission from each if i <= 2: @@ -139,20 +134,15 @@ def __add_submissions(self): 'q2': ''.join(random.choice(letters) for l in range(10)), 'meta/instanceID': f'uuid:{uuid_}', '_uuid': str(uuid_), - '_validation_status': { - 'by_whom': 'someuser', - 'timestamp': int(time.time()), - 'uid': 'validation_status_on_hold', - 'color': '#0000ff', - 'label': 'On Hold' - }, '_submitted_by': submitted_by } + if other_fields is not None: + submission.update(**other_fields) if submitted_by == 'someuser': self.submissions_submitted_by_someuser.append(submission) - if submitted_by == '': + if submitted_by == 'unknown_user': self.submissions_submitted_by_unknown.append(submission) if submitted_by == 'anotheruser': @@ -163,6 +153,13 @@ def __add_submissions(self): self.asset.deployment.mock_submissions(submissions) self.submissions = submissions + def _log_in_as_another_user(self): + """ + Helper to switch user from `someuser` to `anotheruser`. + """ + self.client.logout() + self.client.login(username='anotheruser', password='anotheruser') + class BulkDeleteSubmissionsApiTests(BaseSubmissionTestCase): @@ -170,6 +167,8 @@ class BulkDeleteSubmissionsApiTests(BaseSubmissionTestCase): def setUp(self): super().setUp() + + self._add_submissions() self.submission_list_url = reverse( self._get_endpoint('submission-list'), kwargs={'parent_lookup_asset': self.asset.uid, 'format': 'json'}, @@ -187,9 +186,9 @@ def test_delete_submissions_as_owner(self): someuser can delete their own data """ data = {'payload': {'confirm': True}} - response = self.client.delete(self.submission_bulk_url, - data=data, - format='json') + response = self.client.delete( + self.submission_bulk_url, data=data, format='json' + ) self.assertEqual(response.status_code, status.HTTP_200_OK) response = self.client.get(self.submission_list_url, {'format': 'json'}) @@ -341,7 +340,7 @@ def test_delete_some_allowed_submissions_with_partial_perms_as_anotheruser(self) ) # Try first submission submitted by unknown - random_submissions = self.get_random_submissions(self.asset.owner, 3) + random_submissions = self.get_random_submissions(self.unknown_user, 3) data = { 'payload': { 'submission_ids': [rs['_id'] for rs in random_submissions] @@ -425,6 +424,10 @@ def test_cannot_delete_view_only_submissions_with_partial_perms_as_anotheruser(s class SubmissionApiTests(BaseSubmissionTestCase): + def setUp(self): + super().setUp() + self._add_submissions() + def test_cannot_create_submission(self): """ someuser is the owner of the project. @@ -459,8 +462,9 @@ def test_list_submissions_as_owner(self): """ response = self.client.get(self.submission_list_url, {"format": "json"}) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data.get('results'), self.submissions) - self.assertEqual(response.data.get('count'), len(self.submissions)) + response_ids = [r['_id'] for r in response.data.get('results')] + submissions_ids = [s['_id'] for s in self.submissions] + self.assertEqual(sorted(response_ids), sorted(submissions_ids)) def test_list_submissions_as_owner_with_params(self): """ @@ -475,7 +479,7 @@ def test_list_submissions_as_owner_with_params(self): 'limit': 5, 'sort': '{"q1": -1}', 'fields': '["q1", "_submitted_by"]', - 'query': '{"_submitted_by": {"$in": ["", "someuser", "another"]}}', + 'query': '{"_submitted_by": {"$in": ["unknown", "someuser", "another"]}}', } ) # ToDo add more assertions. E.g. test whether sort, limit, start really work @@ -492,7 +496,7 @@ def test_list_submissions_limit(self): asset = Asset.objects.create( name='Lots of submissions', owner=self.asset.owner, - content={'survey': [{'name': 'q', 'type': 'integer'}]}, + content={'survey': [{'label': 'q', 'type': 'integer'}]}, ) asset.deploy(backend='mock', active=True) asset.deployment.set_namespace(self.URL_NAMESPACE) @@ -504,17 +508,19 @@ def test_list_submissions_limit(self): } for i in range(limit + excess) ] asset.deployment.mock_submissions(submissions) + submission_list_url = reverse( + self._get_endpoint('submission-list'), + kwargs={'parent_lookup_asset': asset.uid, 'format': 'json'}, + ) # Server-wide limit should apply if no limit specified - response = self.client.get( - asset.deployment.submission_list_url, {'format': 'json'} - ) + response = self.client.get(submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data['results']), limit) # Limit specified in query parameters should not be able to exceed # server-wide limit response = self.client.get( - asset.deployment.submission_list_url, + submission_list_url, {'limit': limit + excess, 'format': 'json'} ) @@ -541,8 +547,9 @@ def test_list_submissions_shared_as_anotheruser(self): self._log_in_as_another_user() response = self.client.get(self.submission_list_url, {"format": "json"}) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data.get('results'), self.submissions) - self.assertEqual(response.data.get('count'), len(self.submissions)) + response_ids = [r['_id'] for r in response.data.get('results')] + submissions_ids = [s['_id'] for s in self.submissions] + self.assertEqual(sorted(response_ids), sorted(submissions_ids)) def test_list_submissions_with_partial_permissions_as_anotheruser(self): """ @@ -663,18 +670,23 @@ def test_list_query_elem_match(self): """ Ensure query is able to filter on an array """ - submission = self.submissions[0] + submission = copy.deepcopy(self.submissions_submitted_by_someuser[0]) + del submission['_id'] + uuid_ = str(uuid.uuid4()) + submission['meta/instanceID'] = f'uuid:{uuid_}' + submission['_uuid'] = str(uuid_) group = 'group_lx4sf58' question = 'q3' submission[group] = [ { - f'{group}/{question}': 'whap.gif', + f'{question}': 'whap.gif', }, ] - self.asset.deployment.mock_submissions(self.submissions) + self.asset.deployment.mock_submissions([submission]) + # FIXME with attachments data = { - 'query': f'{{"{group}":{{"$elemMatch":{{"{group}/{question}":{{"$exists":true}}}}}}}}', + 'query': f'{{"{group}/{question}":{{"$exists":true}}}}', 'format': 'json', } response = self.client.get(self.submission_list_url, data) @@ -691,9 +703,14 @@ def test_retrieve_submission_as_owner(self): someuser can view one of their submission. """ submission = self.get_random_submission(self.asset.owner) - url = self.asset.deployment.get_submission_detail_url(submission['_id']) - - response = self.client.get(url, {"format": "json"}) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) + response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, submission) @@ -703,11 +720,17 @@ def test_retrieve_submission_by_uuid(self): someuser can view one of their submission. """ submission = self.submissions[0] - url = self.asset.deployment.get_submission_detail_url(submission['_uuid']) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_uuid'], + }, + ) response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data, submission) + self.assertEqual(response.data['_id'], submission['_id']) def test_retrieve_submission_not_shared_as_anotheruser(self): """ @@ -716,9 +739,15 @@ def test_retrieve_submission_not_shared_as_anotheruser(self): someuser's data existence should not be revealed. """ self._log_in_as_another_user() - submission = self.get_random_submission(self.asset.owner) - url = self.asset.deployment.get_submission_detail_url(submission['_id']) - response = self.client.get(url, {"format": "json"}) + submission = self.get_random_submission(self.unknown_user) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) + response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_retrieve_submission_shared_as_anotheruser(self): @@ -728,9 +757,15 @@ def test_retrieve_submission_shared_as_anotheruser(self): """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self._log_in_as_another_user() - submission = self.get_random_submission(self.asset.owner) - url = self.asset.deployment.get_submission_detail_url(submission['_id']) - response = self.client.get(url, {"format": "json"}) + submission = self.get_random_submission(self.unknown_user) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) + response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, submission) @@ -748,15 +783,27 @@ def test_retrieve_submission_with_partial_permissions_as_anotheruser(self): partial_perms=partial_perms) # Try first submission submitted by unknown - submission = self.get_random_submission(self.asset.owner) - url = self._deployment.get_submission_detail_url(submission['_id']) - response = self.client.get(url, {"format": "json"}) + submission = self.get_random_submission(self.unknown_user) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) + response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) # Try second submission submitted by another submission = self.submissions_submitted_by_anotheruser[0] - url = self._deployment.get_submission_detail_url(submission['_id']) - response = self.client.get(url, {"format": "json"}) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) + response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_delete_submission_as_owner(self): @@ -765,7 +812,13 @@ def test_delete_submission_as_owner(self): someuser can delete their own data. """ submission = self.submissions_submitted_by_someuser[0] - url = self.asset.deployment.get_submission_detail_url(submission['_id']) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) response = self.client.delete(url, HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) @@ -800,7 +853,13 @@ def test_delete_not_existing_submission_as_owner(self): someuser should receive a 404 if they try to delete a non-existing submission. """ - url = self.asset.deployment.get_submission_detail_url(9999) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': 9999, + }, + ) response = self.client.delete(url, HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -813,7 +872,13 @@ def test_delete_submission_as_anonymous(self): """ self.client.logout() submission = self.get_random_submission(self.asset.owner) - url = self.asset.deployment.get_submission_detail_url(submission['_id']) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) response = self.client.delete(url, HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -826,8 +891,14 @@ def test_delete_submission_not_shared_as_anotheruser(self): someuser's data existence should not be revealed. """ self._log_in_as_another_user() - submission = self.get_random_submission(self.asset.owner) - url = self.asset.deployment.get_submission_detail_url(submission['_id']) + submission = self.get_random_submission(self.unknown_user) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) response = self.client.delete(url, HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -840,8 +911,14 @@ def test_delete_submission_shared_as_anotheruser(self): """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self._log_in_as_another_user() - submission = self.get_random_submission(self.asset.owner) - url = self.asset.deployment.get_submission_detail_url(submission['_id']) + submission = self.get_random_submission(self.unknown_user) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) response = self.client.delete(url, HTTP_ACCEPT='application/json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -878,19 +955,31 @@ def test_delete_submission_with_partial_perms_as_anotheruser(self): # Try first submission submitted by unknown submission = self.submissions_submitted_by_unknown[0] - url = self._deployment.get_submission_detail_url(submission['_id']) - response = self.client.delete(url, - content_type='application/json', - HTTP_ACCEPT='application/json') + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) + response = self.client.delete( + url, content_type='application/json', HTTP_ACCEPT='application/json' + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) # Try second submission submitted by anotheruser anotheruser_submission_count = len(self.submissions_submitted_by_anotheruser) submission = self.get_random_submission(self.anotheruser) - url = self._deployment.get_submission_detail_url(submission['_id']) - response = self.client.delete(url, - content_type='application/json', - HTTP_ACCEPT='application/json') + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) + response = self.client.delete( + url, content_type='application/json', HTTP_ACCEPT='application/json' + ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual( @@ -953,7 +1042,6 @@ def test_attachments_rewrite(self): v_uid = asset.latest_deployed_version.uid submission = { - '_id': 1000, '__version__': v_uid, '_xform_id_string': asset.uid, 'formhub/uuid': 'formhub-uuid', @@ -1018,7 +1106,13 @@ def test_attachments_rewrite(self): asset.deployment.set_namespace(self.URL_NAMESPACE) self._log_in_as_another_user() - url = asset.deployment.get_submission_detail_url(submission['_id']) + url = reverse( + self._get_endpoint('submission-detail'), + kwargs={ + 'parent_lookup_asset': asset.uid, + 'pk': submission['_id'], + }, + ) response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -1051,7 +1145,9 @@ class SubmissionEditApiTests(BaseSubmissionTestCase): def setUp(self): super().setUp() - self.submission = self.get_random_submission(self.asset.owner) + + self._add_submissions() + self.submission = self.submissions_submitted_by_someuser[0] self.submission_url_legacy = reverse( self._get_endpoint('submission-enketo-edit'), kwargs={ @@ -1191,7 +1287,7 @@ def test_get_edit_link_with_partial_perms_as_anotheruser(self): ) # Try first submission submitted by unknown - submission = self.get_random_submission(self.asset.owner) + submission = self.get_random_submission(self.unknown_user) url = reverse( self._get_endpoint('submission-enketo-edit'), kwargs={ @@ -1408,61 +1504,65 @@ def test_edit_submission_with_different_root_name(self): @responses.activate def test_edit_submission_with_xml_encoding_declaration(self): - with mock.patch( - 'kpi.deployment_backends.mock_backend.dict2xml' - ) as mock_dict2xml: - mock_dict2xml.side_effect = dict2xml_with_encoding_declaration - submission = self.submissions[-1] - submission_xml = self.asset.deployment.get_submissions( - user=self.asset.owner, - format_type=SUBMISSION_FORMAT_TYPE_XML, - submission_ids=[submission['_id']], - )[0] - assert submission_xml.startswith( - '' - ) + submission = self.submissions[-1] + submission_xml = self.asset.deployment.get_submissions( + user=self.asset.owner, + format_type=SUBMISSION_FORMAT_TYPE_XML, + submission_ids=[submission['_id']], + )[0] + assert submission_xml.startswith( + '' + ) - # Get edit endpoint - edit_url = reverse( - self._get_endpoint('submission-enketo-edit'), - kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'pk': submission['_id'], - }, - ) + # Get edit endpoint + edit_url = reverse( + self._get_endpoint('submission-enketo-edit'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, + ) - # Set up a mock Enketo response and attempt the edit request - ee_url = f'{settings.ENKETO_URL}/{settings.ENKETO_EDIT_INSTANCE_ENDPOINT}' - responses.add_callback( - responses.POST, - ee_url, - callback=enketo_edit_instance_response_with_uuid_validation, - content_type='application/json', - ) - response = self.client.get(edit_url, {'format': 'json'}) - assert response.status_code == status.HTTP_200_OK + # Set up a mock Enketo response and attempt the edit request + ee_url = f'{settings.ENKETO_URL}/{settings.ENKETO_EDIT_INSTANCE_ENDPOINT}' + responses.add_callback( + responses.POST, + ee_url, + callback=enketo_edit_instance_response_with_uuid_validation, + content_type='application/json', + ) + response = self.client.get(edit_url, {'format': 'json'}) + assert response.status_code == status.HTTP_200_OK @responses.activate def test_edit_submission_with_xml_missing_uuids(self): # Make a new submission without UUIDs submission = copy.deepcopy(self.submissions[-1]) submission['_id'] += 1 + del submission['meta/instanceID'] + del submission['_uuid'] + submission['find_this'] = 'hello!' # The form UUID is already omitted by these tests, but fail if that # changes in the future assert 'formhub/uuid' not in submission.keys() - self.asset.deployment.mock_submissions([submission]) + self.asset.deployment.mock_submissions([submission], create_uuids=False) # Find and verify the new submission submission_xml = self.asset.deployment.get_submissions( user=self.asset.owner, format_type=SUBMISSION_FORMAT_TYPE_XML, - find_this='hello!', + query={"find_this": "hello!"}, + )[0] + submission_json = self.asset.deployment.get_submissions( + user=self.asset.owner, + format_type=SUBMISSION_FORMAT_TYPE_JSON, + query={"find_this": "hello!"}, )[0] - submission_xml_root = lxml.etree.fromstring(submission_xml) - submission_id = int(submission_xml_root.find('./_id').text) - assert submission_id == submission['_id'] + + submission_xml_root = lxml.etree.fromstring(submission_xml.encode()) + assert submission_json['_id'] == submission['_id'] assert submission_xml_root.find('./find_this').text == 'hello!' assert submission_xml_root.find('./meta/instanceID') is None assert submission_xml_root.find('./formhub/uuid') is None @@ -1472,7 +1572,7 @@ def test_edit_submission_with_xml_missing_uuids(self): self._get_endpoint('submission-enketo-edit'), kwargs={ 'parent_lookup_asset': self.asset.uid, - 'pk': submission_id, + 'pk': submission_json['_id'], }, ) @@ -1515,9 +1615,10 @@ def test_get_edit_link_submission_with_latest_asset_deployment(self): { 'type': 'note', 'name': 'n', - 'label': 'A new note', + 'label': ['A new note'], } ) + self.asset.save() assert self.asset.asset_versions.count() == original_versions_count + 1 assert ( @@ -1558,7 +1659,8 @@ class SubmissionViewApiTests(BaseSubmissionTestCase): def setUp(self): super().setUp() - self.submission = self.get_random_submission(self.asset.owner) + self._add_submissions() + self.submission = self.submissions_submitted_by_someuser[0] self.submission_view_link_url = reverse( self._get_endpoint('submission-enketo-view'), kwargs={ @@ -1699,22 +1801,16 @@ def test_get_view_link_with_partial_perms_as_anotheruser(self): self.assertEqual(response.data, expected_response) -class SubmissionDuplicateApiTests(BaseSubmissionTestCase): +class SubmissionDuplicateBaseApiTests(BaseSubmissionTestCase): def setUp(self): super().setUp() current_time = datetime.now(tz=ZoneInfo('UTC')).isoformat('T', 'milliseconds') - # TODO: also test a submission that's missing `start` or `end`; see - # #3054. Right now that would be useless, though, because the - # MockDeploymentBackend doesn't use XML at all and won't fail if an - # expected field is missing - for submission in self.submissions: - submission['start'] = current_time - submission['end'] = current_time - - self.asset.deployment.mock_submissions(self.submissions) + self._add_submissions( + other_fields={'start': current_time, 'end': current_time} + ) - self.submission = self.get_random_submission(self.asset.owner) + self.submission = self.submissions_submitted_by_someuser[0] self.submission_url = reverse( self._get_endpoint('submission-duplicate'), kwargs={ @@ -1735,11 +1831,43 @@ def _check_duplicate(self, response, submission: dict = None): expected_next_id = max((sub['_id'] for sub in self.submissions)) + 1 assert submission['_id'] != duplicate_submission['_id'] assert duplicate_submission['_id'] == expected_next_id + assert submission['meta/instanceID'] != duplicate_submission['meta/instanceID'] - assert submission['meta/instanceID'] == duplicate_submission['meta/deprecatedID'] assert submission['start'] != duplicate_submission['start'] assert submission['end'] != duplicate_submission['end'] + +class SubmissionDuplicateWithXMLNamespaceApiTests( + SubmissionDuplicateBaseApiTests +): + + def setUp(self): + with mock.patch( + 'kpi.deployment_backends.mock_backend.dict2xform' + ) as mock_dict2xform: + mock_dict2xform.side_effect = dict2xform_with_namespace + super().setUp() + + def test_duplicate_submission_with_xml_namespace(self): + + submission_xml = self.asset.deployment.get_submissions( + user=self.asset.owner, + format_type=SUBMISSION_FORMAT_TYPE_XML, + submission_ids=[self.submission['_id']], + )[0] + assert ( + 'xmlns="http://opendatakit.org/submissions"' in submission_xml + ) + response = self.client.post(self.submission_url, {'format': 'json'}) + assert response.status_code == status.HTTP_201_CREATED + self._check_duplicate(response) + + +class SubmissionDuplicateApiTests(SubmissionDuplicateBaseApiTests): + + def setUp(self): + super().setUp() + def test_duplicate_submission_as_owner_allowed(self): """ someuser is the owner of the project. @@ -1750,34 +1878,15 @@ def test_duplicate_submission_as_owner_allowed(self): self._check_duplicate(response) def test_duplicate_submission_with_xml_encoding(self): - with mock.patch( - 'kpi.deployment_backends.mock_backend.dict2xml' - ) as mock_dict2xml: - mock_dict2xml.side_effect = dict2xml_with_encoding_declaration - submission_xml = self.asset.deployment.get_submissions( - user=self.asset.owner, - format_type=SUBMISSION_FORMAT_TYPE_XML, - submission_ids=[self.submission['_id']], - )[0] - assert submission_xml.startswith( - '' - ) - self.test_duplicate_submission_as_owner_allowed() - - def test_duplicate_submission_with_xml_namespace(self): - with mock.patch( - 'kpi.deployment_backends.mock_backend.dict2xml' - ) as mock_dict2xml: - mock_dict2xml.side_effect = dict2xml_with_namespace - submission_xml = self.asset.deployment.get_submissions( - user=self.asset.owner, - format_type=SUBMISSION_FORMAT_TYPE_XML, - submission_ids=[self.submission['_id']], - )[0] - assert ( - 'xmlns="http://opendatakit.org/submissions"' in submission_xml - ) - self.test_duplicate_submission_as_owner_allowed() + submission_xml = self.asset.deployment.get_submissions( + user=self.asset.owner, + format_type=SUBMISSION_FORMAT_TYPE_XML, + submission_ids=[self.submission['_id']], + )[0] + assert submission_xml.startswith( + '' + ) + self.test_duplicate_submission_as_owner_allowed() def test_duplicate_submission_as_anotheruser_not_allowed(self): """ @@ -1861,7 +1970,7 @@ def test_duplicate_submission_as_anotheruser_with_partial_perms(self): ) # Try first submission submitted by unknown - submission = self.get_random_submission(self.asset.owner) + submission = self.get_random_submission(self.unknown_user) url = reverse( self._get_endpoint('submission-duplicate'), kwargs={ @@ -1890,6 +1999,7 @@ class BulkUpdateSubmissionsApiTests(BaseSubmissionTestCase): def setUp(self): super().setUp() + self._add_submissions() self.submission_url = reverse( self._get_endpoint('submission-bulk'), kwargs={ @@ -1933,22 +2043,18 @@ def test_bulk_update_submissions_allowed_as_owner(self): ) ) def test_bulk_update_submissions_with_xml_encoding(self): - with mock.patch( - 'kpi.deployment_backends.mock_backend.dict2xml' - ) as mock_dict2xml: - mock_dict2xml.side_effect = dict2xml_with_encoding_declaration - submission = self.submissions[ - self.updated_submission_data['submission_ids'][-1] - ] - submission_xml = self.asset.deployment.get_submissions( - user=self.asset.owner, - format_type=SUBMISSION_FORMAT_TYPE_XML, - submission_ids=[submission['_id']], - )[0] - assert submission_xml.startswith( - '' - ) - self.test_bulk_update_submissions_allowed_as_owner() + submission = self.submissions[ + self.updated_submission_data['submission_ids'][-1] + ] + submission_xml = self.asset.deployment.get_submissions( + user=self.asset.owner, + format_type=SUBMISSION_FORMAT_TYPE_XML, + submission_ids=[submission['_id']], + )[0] + assert submission_xml.startswith( + '' + ) + self.test_bulk_update_submissions_allowed_as_owner() @pytest.mark.skip( reason=( @@ -1958,9 +2064,9 @@ def test_bulk_update_submissions_with_xml_encoding(self): ) def test_bulk_update_submissions_with_xml_namespace(self): with mock.patch( - 'kpi.deployment_backends.mock_backend.dict2xml' - ) as mock_dict2xml: - mock_dict2xml.side_effect = dict2xml_with_namespace + 'kpi.deployment_backends.mock_backend.dict2xform' + ) as mock_dict2xform: + mock_dict2xform.side_effect = dict2xform_with_namespace submission = self.submissions[ self.updated_submission_data['submission_ids'][-1] ] @@ -2070,11 +2176,14 @@ class SubmissionValidationStatusApiTests(BaseSubmissionTestCase): def setUp(self): super().setUp() - self.submission = self.get_random_submission(self.asset.owner) - self.validation_status_url = ( - self._deployment.get_submission_validation_status_url( - self.submission['_id'] - ) + self._add_submissions() + self.submission = self.submissions_submitted_by_someuser[0] + self.validation_status_url = reverse( + self._get_endpoint('submission-validation-status'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': self.submission['_id'], + }, ) def test_retrieve_status_as_owner(self): @@ -2084,7 +2193,7 @@ def test_retrieve_status_as_owner(self): """ response = self.client.get(self.validation_status_url) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data, self.submission.get("_validation_status")) + self.assertEqual(response.data, {}) def test_cannot_retrieve_status_of_not_shared_submission_as_anotheruser(self): """ @@ -2108,7 +2217,7 @@ def test_retrieve_status_of_shared_submission_as_anotheruser(self): self._log_in_as_another_user() response = self.client.get(self.validation_status_url) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data, self.submission.get("_validation_status")) + self.assertEqual(response.data, {}) def test_cannot_retrieve_status_of_shared_submission_as_anonymous(self): """ @@ -2247,28 +2356,35 @@ def test_edit_status_with_partial_perms_as_anotheruser(self): PERM_VALIDATE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } # Allow anotheruser to validate someuser's data - self.asset.assign_perm(self.anotheruser, PERM_PARTIAL_SUBMISSIONS, - partial_perms=partial_perms) + self.asset.assign_perm( + self.anotheruser, + PERM_PARTIAL_SUBMISSIONS, + partial_perms=partial_perms, + ) data = { 'validation_status.uid': 'validation_status_not_approved' } # Try first submission submitted by unknown submission = self.submissions_submitted_by_unknown[0] - url = ( - self._deployment.get_submission_validation_status_url( - submission['_id'] - ) + url = reverse( + self._get_endpoint('submission-validation-status'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, ) response = self.client.patch(url, data=data) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) # Try second submission submitted by anotheruser submission = self.submissions_submitted_by_anotheruser[0] - url = ( - self._deployment.get_submission_validation_status_url( - submission['_id'] - ) + url = reverse( + self._get_endpoint('submission-validation-status'), + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'pk': submission['_id'], + }, ) response = self.client.patch(url, data=data) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -2284,9 +2400,7 @@ class SubmissionValidationStatusesApiTests(BaseSubmissionTestCase): def setUp(self): super().setUp() - for submission in self.submissions: - submission['_validation_status']['uid'] = 'validation_status_not_approved' - self.asset.deployment.mock_submissions(self.submissions) + self._add_submissions() self.validation_statuses_url = reverse( self._get_endpoint('submission-validation-statuses'), kwargs={'parent_lookup_asset': self.asset.uid, 'format': 'json'}, @@ -2296,6 +2410,18 @@ def setUp(self): kwargs={'parent_lookup_asset': self.asset.uid, 'format': 'json'}, ) + # Make the owner change validation status of all submissions + data = { + 'payload': { + 'validation_status.uid': 'validation_status_not_approved', + 'confirm': True, + } + } + response = self.client.patch( + self.validation_statuses_url, data=data, format='json' + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_delete_all_status_as_owner(self): """ someuser is the owner of the project. @@ -2597,8 +2723,11 @@ def test_edit_all_submission_validation_statuses_with_partial_perms_as_anotherus {'_submitted_by': 'anotheruser'}] } # Allow anotheruser to validate their own data - self.asset.assign_perm(self.anotheruser, PERM_PARTIAL_SUBMISSIONS, - partial_perms=partial_perms) + self.asset.assign_perm( + self.anotheruser, + PERM_PARTIAL_SUBMISSIONS, + partial_perms=partial_perms, + ) data = { 'payload': { 'validation_status.uid': 'validation_status_approved', @@ -2607,20 +2736,19 @@ def test_edit_all_submission_validation_statuses_with_partial_perms_as_anotherus } # Update all submissions anotheruser is allowed to edit - response = self.client.patch(self.validation_statuses_url, - data=data, - format='json') + response = self.client.patch( + self.validation_statuses_url, data=data, format='json' + ) self.assertEqual(response.status_code, status.HTTP_200_OK) - count = self._deployment.calculated_submission_count( - self.anotheruser) + count = self._deployment.calculated_submission_count(self.anotheruser) expected_response = {'detail': f'{count} submissions have been updated'} self.assertEqual(response.data, expected_response) # Get all submissions and ensure only the ones that anotheruser is # allowed to edit have been modified self.client.logout() - self.client.login(username="someuser", password="someuser") + self.client.login(username='someuser', password='someuser') response = self.client.get(self.submission_list_url) for submission in response.data['results']: validation_status = submission['_validation_status'] @@ -2751,7 +2879,12 @@ def setUp(self): ] a.deployment.mock_submissions(self.submissions) a.deployment.set_namespace(self.URL_NAMESPACE) - self.submission_list_url = a.deployment.submission_list_url + self.submission_list_url = reverse( + self._get_endpoint('submission-list'), + kwargs={ + 'parent_lookup_asset': a.uid, + }, + ) def test_list_submissions_geojson_defaults(self): response = self.client.get( diff --git a/kpi/tests/kpi_test_case.py b/kpi/tests/kpi_test_case.py index 5c90b2b79f..2b005d6c54 100644 --- a/kpi/tests/kpi_test_case.py +++ b/kpi/tests/kpi_test_case.py @@ -105,8 +105,9 @@ def create_collection(self, name, owner=None, owner_password=None, collection = self.url_to_obj(response.data['url']) return collection - def create_asset(self, name, content=None, owner=None, - owner_password=None, **kwargs): + def create_asset( + self, name, content=None, owner=None, owner_password=None, **kwargs + ): if owner and owner_password: if isinstance(owner, str): self.login(owner, owner_password) diff --git a/kpi/tests/test_asset_versions.py b/kpi/tests/test_asset_versions.py index 905d9ed5bb..a1ddb8d26a 100644 --- a/kpi/tests/test_asset_versions.py +++ b/kpi/tests/test_asset_versions.py @@ -9,8 +9,9 @@ from django.test import TestCase from django.utils import timezone - from formpack.utils.expand_content import SCHEMA_VERSION + +from kobo.apps.kobo_auth.shortcuts import User from kpi.exceptions import BadAssetTypeException from kpi.utils.hash import calculate_hash from ..models import Asset @@ -57,23 +58,27 @@ def test_init_asset_version(self): self.assertEqual(av_count + 2, AssetVersion.objects.count()) def test_asset_deployment(self): - self.asset = Asset.objects.create(asset_type='survey', content={ - 'survey': [{'type': 'note', 'label': 'Read me', 'name': 'n1'}] - }) + bob = User.objects.create(username='bob') + self.asset = Asset.objects.create( + asset_type='survey', + content={ + 'survey': [{'type': 'note', 'label': ['Read me'], 'name': 'n1'}] + }, + owner=bob + ) self.assertEqual(self.asset.asset_versions.count(), 1) self.assertEqual(self.asset.latest_version.deployed, False) - self.asset.content['survey'].append({'type': 'note', - 'label': 'Read me 2', - 'name': 'n2'}) + self.asset.content['survey'].append( + {'type': 'note', 'label': ['Read me 2'], 'name': 'n2'} + ) self.asset.save() self.assertEqual(self.asset.asset_versions.count(), 2) v2 = self.asset.latest_version self.assertEqual(self.asset.latest_version.deployed, False) self.asset.deploy(backend='mock', active=True) - self.asset.save(create_version=False, - adjust_content=False) + self.asset.save(create_version=False, adjust_content=False) # version did not increment self.assertEqual(self.asset.asset_versions.count(), 2) diff --git a/kpi/tests/test_deployment_backends.py b/kpi/tests/test_deployment_backends.py index 9763691edc..2a8f63126d 100644 --- a/kpi/tests/test_deployment_backends.py +++ b/kpi/tests/test_deployment_backends.py @@ -2,6 +2,7 @@ import pytest from django.test import TestCase +from kobo.apps.kobo_auth.shortcuts import User from kpi.exceptions import DeploymentDataException from kpi.models.asset import Asset from kpi.models.asset_version import AssetVersion @@ -9,12 +10,11 @@ class CreateDeployment(TestCase): def setUp(self): - self.asset = Asset(content={ - 'survey': [ - {'type':'text', 'name': 'q1', - 'label': 'Q1.',} - ] - }) + someuser = User.objects.create(username='someuser') + self.asset = Asset( + content={'survey': [{'type': 'text', 'name': 'q1', 'label': 'Q1.'}]}, + owner=someuser, + ) def test_invalid_backend_fails(self): self.asset.save() @@ -33,15 +33,20 @@ def test_mock_deployment_inits(self): @pytest.mark.django_db def test_initial_kuids(): initial_kuid = 'aaaa1111' - asset = Asset.objects.create(content={ - 'survey': [ - {'type': 'text', - 'name': 'q1', - 'label': 'Q1.', - '$kuid': initial_kuid, - } + someuser = User.objects.create(username='someuser') + asset = Asset.objects.create( + content={ + 'survey': [ + { + 'type': 'text', + 'name': 'q1', + 'label': 'Q1.', + '$kuid': initial_kuid, + } ] - }) + }, + owner=someuser, + ) assert asset.content['survey'][0]['$kuid'] == initial_kuid asset.deploy(backend='mock', active=False) @@ -53,13 +58,11 @@ def test_initial_kuids(): class MockDeployment(TestCase): def setUp(self): - self.asset = Asset.objects.create(content={ - 'survey': [ - {'type': 'text', 'name': 'q1', - 'label': 'Q1.' - } - ] - }) + someuser = User.objects.create(username='someuser') + self.asset = Asset.objects.create( + content={'survey': [{'type': 'text', 'name': 'q1', 'label': 'Q1.'}]}, + owner=someuser, + ) self.asset.deploy(backend='mock', active=False) self.asset.save() diff --git a/kpi/tests/test_mock_data.py b/kpi/tests/test_mock_data.py index 47e4bcb05f..a7de62bc7d 100644 --- a/kpi/tests/test_mock_data.py +++ b/kpi/tests/test_mock_data.py @@ -9,113 +9,250 @@ from kobo.apps.reports import report_data from kpi.models import Asset -F1 = {'survey': [{'$kuid': 'Uf89NP4VX', 'type': 'start', 'name': 'start'}, - {'$kuid': 'ZtZBY7XHX', 'type': 'end', 'name': 'end'}, - {'name': 'Select_one', 'select_from_list_name': 'choice_list_1', 'required': 'true', - 'label': ['Select one', 'Seleccione uno', - '\u0627\u062e\u062a\u0631 \u0648\u0627\u062d\u062f\u0627'], '$kuid': 'WXOeQ4Nc0', - 'type': 'select_one'}, - {'name': 'Select_Many', 'select_from_list_name': 'choice_list_2', 'required': 'true', - 'label': ['Select Many', 'Muchos seleccione', - '\u0627\u062e\u062a\u0631 \u0627\u0644\u0639\u062f\u064a\u062f'], '$kuid': 'BC6BNP91R', - 'type': 'select_multiple'}, - {'$kuid': '0e7sTrQzo', 'required': 'true', 'type': 'text', 'name': 'Text', - 'label': ['Text', 'Texto', '\u0646\u0635']}, - {'$kuid': 'ZzKb8DeQu', 'required': 'true', 'type': 'integer', 'name': 'Number', - 'label': ['Number', 'N\xfamero', '\u0639\u062f\u062f']}, - {'$kuid': 'gLEDxsNZo', 'required': 'true', 'type': 'decimal', 'name': 'Decimal', - 'label': ['Decimal', 'Decimal', '\u0639\u062f\u062f \u0639\u0634\u0631\u064a']}, - {'$kuid': 'pt2w8z3Xk', 'required': 'true', 'type': 'date', 'name': 'Date', - 'label': ['Date', 'Fecha', '\u062a\u0627\u0631\u064a\u062e']}, - {'$kuid': '3xn0tP9AI', 'required': 'true', 'type': 'time', 'name': 'Time', - 'label': ['Time', 'Hora', '\u0645\u0631\u0629']}, - {'$kuid': 'w0nYPBtT0', 'required': 'true', 'type': 'datetime', 'name': 'Date_and_time', - 'label': ['Date and time', 'Fecha y hora', - '\u0627\u0644\u062a\u0627\u0631\u064a\u062e \u0648 \u0627\u0644\u0648\u0642\u062a']}, - {'$kuid': '0dovjhXG6', 'required': 'false', 'type': 'geopoint', 'name': 'GPS', - 'label': ['GPS', 'GPS', - '\u0646\u0638\u0627\u0645 \u062a\u062d\u062f\u064a\u062f \u0627\u0644\u0645\u0648\u0627\u0642\u0639']}, - {'$kuid': 'NI2fsrYZI', 'required': 'true', 'type': 'image', 'name': 'Photo', - 'label': ['Photo', 'Foto', - '\u0635\u0648\u0631\u0629 \u0641\u0648\u062a\u0648\u063a\u0631\u0627\u0641\u064a\u0629']}, - {'$kuid': 'FlfOVztW3', 'required': 'true', 'type': 'audio', 'name': 'Audio', - 'label': ['Audio', 'Audio', '\u0633\u0645\u0639\u064a']}, - {'$kuid': 'GdNV76Ily', 'required': 'true', 'type': 'video', 'name': 'Video', - 'label': ['Video', 'V\xeddeo', '\u0641\u064a\u062f\u064a\u0648']}, - {'$kuid': 'EDuWkTREB', 'required': 'false', 'type': 'note', - 'name': 'Note_Should_not_be_displayed', - 'label': ['Note (Should not be displayed!)', 'Nota (no se represente!)', - '\u0645\u0644\u0627\u062d\u0638\u0629 (\u064a\u062c\u0628 \u0623\u0646 \u0644\u0627 \u064a\u062a\u0645 \u0639\u0631\u0636!)']}, - {'$kuid': 'hwik7tNXF', 'required': 'true', 'type': 'barcode', 'name': 'Barcode', - 'label': ['Barcode', 'C\xf3digo de barras', '\u0627\u0644\u0628\u0627\u0631\u0643\u0648\u062f']}, - {'$kuid': 'NTBElbRcj', 'required': 'true', 'type': 'acknowledge', 'name': 'Acknowledge', - 'label': ['Acknowledge', 'Reconocer', '\u0627\u0639\u062a\u0631\u0641']}, - {'calculation': '1', '$kuid': 'x6zr1MtmP', 'required': 'false', 'type': 'calculate', - 'name': 'calculation'}], 'translations': [None, 'Espa\xf1ol', 'Arabic'], 'choices': [ - {'$kuid': 'xm4h0m4kK', 'list_name': 'choice_list_1', 'name': 'option_1', - 'label': ['First option', 'Primera opci\xf3n', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644']}, - {'$kuid': 'slcf0IezR', 'list_name': 'choice_list_1', 'name': 'option_2', - 'label': ['Second option', 'Segunda opci\xf3n', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a']}, - {'$kuid': 'G7myzY2qX', 'list_name': 'choice_list_2', 'name': 'option_1', - 'label': ['First option', 'Primera opci\xf3n', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644']}, - {'$kuid': 'xUd28PPBs', 'list_name': 'choice_list_2', 'name': 'option_2', - 'label': ['Second option', 'Segunda opci\xf3n', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a']}]} +F1 = { + 'survey': [ + {'$kuid': 'Uf89NP4VX', 'type': 'start', 'name': 'start'}, + {'$kuid': 'ZtZBY7XHX', 'type': 'end', 'name': 'end'}, + { + 'name': 'Select_one', + 'select_from_list_name': 'choice_list_1', + 'required': 'true', + 'label': [ + 'Select one', + 'Seleccione uno', + '\u0627\u062e\u062a\u0631 \u0648\u0627\u062d\u062f\u0627', + ], + '$kuid': 'WXOeQ4Nc0', + 'type': 'select_one', + }, + { + 'name': 'Select_Many', + 'select_from_list_name': 'choice_list_2', + 'required': 'true', + 'label': [ + 'Select Many', + 'Muchos seleccione', + '\u0627\u062e\u062a\u0631 \u0627\u0644\u0639\u062f\u064a\u062f', + ], + '$kuid': 'BC6BNP91R', + 'type': 'select_multiple', + }, + { + '$kuid': '0e7sTrQzo', + 'required': 'true', + 'type': 'text', + 'name': 'Text', + 'label': ['Text', 'Texto', '\u0646\u0635'], + }, + { + '$kuid': 'ZzKb8DeQu', + 'required': 'true', + 'type': 'integer', + 'name': 'Number', + 'label': ['Number', 'N\xfamero', '\u0639\u062f\u062f'], + }, + { + '$kuid': 'gLEDxsNZo', + 'required': 'true', + 'type': 'decimal', + 'name': 'Decimal', + 'label': [ + 'Decimal', + 'Decimal', + '\u0639\u062f\u062f \u0639\u0634\u0631\u064a', + ], + }, + { + '$kuid': 'pt2w8z3Xk', + 'required': 'true', + 'type': 'date', + 'name': 'Date', + 'label': ['Date', 'Fecha', '\u062a\u0627\u0631\u064a\u062e'], + }, + { + '$kuid': '3xn0tP9AI', + 'required': 'true', + 'type': 'time', + 'name': 'Time', + 'label': ['Time', 'Hora', '\u0645\u0631\u0629'], + }, + { + '$kuid': 'w0nYPBtT0', + 'required': 'true', + 'type': 'datetime', + 'name': 'Date_and_time', + 'label': [ + 'Date and time', + 'Fecha y hora', + '\u0627\u0644\u062a\u0627\u0631\u064a\u062e \u0648 \u0627\u0644\u0648\u0642\u062a', + ], + }, + { + '$kuid': '0dovjhXG6', + 'required': 'false', + 'type': 'geopoint', + 'name': 'GPS', + 'label': [ + 'GPS', + 'GPS', + '\u0646\u0638\u0627\u0645 \u062a\u062d\u062f\u064a\u062f \u0627\u0644\u0645\u0648\u0627\u0642\u0639', + ], + }, + { + '$kuid': 'NI2fsrYZI', + 'required': 'true', + 'type': 'image', + 'name': 'Photo', + 'label': [ + 'Photo', + 'Foto', + '\u0635\u0648\u0631\u0629 \u0641\u0648\u062a\u0648\u063a\u0631\u0627\u0641\u064a\u0629', + ], + }, + { + '$kuid': 'FlfOVztW3', + 'required': 'true', + 'type': 'audio', + 'name': 'Audio', + 'label': ['Audio', 'Audio', '\u0633\u0645\u0639\u064a'], + }, + { + '$kuid': 'GdNV76Ily', + 'required': 'true', + 'type': 'video', + 'name': 'Video', + 'label': ['Video', 'V\xeddeo', '\u0641\u064a\u062f\u064a\u0648'], + }, + { + '$kuid': 'EDuWkTREB', + 'required': 'false', + 'type': 'note', + 'name': 'Note_Should_not_be_displayed', + 'label': [ + 'Note (Should not be displayed!)', + 'Nota (no se represente!)', + '\u0645\u0644\u0627\u062d\u0638\u0629 (\u064a\u062c\u0628 \u0623\u0646 \u0644\u0627 \u064a\u062a\u0645 \u0639\u0631\u0636!)', + ], + }, + { + '$kuid': 'hwik7tNXF', + 'required': 'true', + 'type': 'barcode', + 'name': 'Barcode', + 'label': [ + 'Barcode', + 'C\xf3digo de barras', + '\u0627\u0644\u0628\u0627\u0631\u0643\u0648\u062f', + ], + }, + { + '$kuid': 'NTBElbRcj', + 'required': 'true', + 'type': 'acknowledge', + 'name': 'Acknowledge', + 'label': [ + 'Acknowledge', + 'Reconocer', + '\u0627\u0639\u062a\u0631\u0641', + ], + }, + { + 'calculation': '1', + '$kuid': 'x6zr1MtmP', + 'required': 'false', + 'type': 'calculate', + 'name': 'calculation', + }, + ], + 'translations': [None, 'Espa\xf1ol', 'Arabic'], + 'choices': [ + { + '$kuid': 'xm4h0m4kK', + 'list_name': 'choice_list_1', + 'name': 'option_1', + 'label': [ + 'First option', + 'Primera opci\xf3n', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644', + ], + }, + { + '$kuid': 'slcf0IezR', + 'list_name': 'choice_list_1', + 'name': 'option_2', + 'label': [ + 'Second option', + 'Segunda opci\xf3n', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', + ], + }, + { + '$kuid': 'G7myzY2qX', + 'list_name': 'choice_list_2', + 'name': 'option_1', + 'label': [ + 'First option', + 'Primera opci\xf3n', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644', + ], + }, + { + '$kuid': 'xUd28PPBs', + 'list_name': 'choice_list_2', + 'name': 'option_2', + 'label': [ + 'Second option', + 'Segunda opci\xf3n', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', + ], + }, + ], +} -SUBMISSION_DATA = OrderedDict([ - ("start", - ["2016-06-0%dT12:00:00.000-04:00" % n for n in [1, 2, 3, 4]]), - ("end", - ["2016-06-0%dT11:0%d:00.000-04:00" % (n, n) for n in [1, 2, 3, 4]]), - ("Select_one", - ["option_1", "option_1", "option_2", "option_1"]), - ("Select_Many", - ["option_1", "option_2", "option_1 option_2", ""]), - ("Text", - ["a", "b", "c", "a"]), - ("Number", - [1, 2, 3, 2]), - ("Decimal", - [1.5, 2.5, 3.5, 3.5]), - ("Date", - ["2016-06-0%d" % n for n in [1, 2, 3, 5]]), - ("Time", - ["%d:00:00" % n for n in [1, 2, 3, 5]]), - ("Date_and_time", - ["2016-06-0%dT12:00:00.000-04:00" % n for n in [1, 2, 3, 5]]), - ("GPS", - ["1%d.43 -2%d.54 1 0" % (n, n) for n in [5, 7, 8, 5]]), - ("Photo", - ["photo_%d.jpg" % (n) for n in [1, 2, 3, 4]]), - ("Audio", - ["audio_%d.jpg" % (n) for n in [4, 3, 2, 1]]), - ("Video", - ["video_%d.jpg" % (n) for n in [6, 7, 8, 9]]), - ("Note_Should_not_be_displayed", - [None, None, None, None]), - ("Barcode", - ["barcode%d" % (n) for n in [9, 7, 7, 6]]), - ("Acknowledge", - [None, None, None, None]), - ("calculation", - ["1", "1", "1", "1"]), -]) +SUBMISSION_DATA = OrderedDict( + [ + ("start", ["2016-06-0%dT12:00:00.000-04:00" % n for n in [1, 2, 3, 4]]), + ( + "end", + ["2016-06-0%dT11:0%d:00.000-04:00" % (n, n) for n in [1, 2, 3, 4]], + ), + ("Select_one", ["option_1", "option_1", "option_2", "option_1"]), + ("Select_Many", ["option_1", "option_2", "option_1 option_2", ""]), + ("Text", ["a", "b", "c", "a"]), + ("Number", [1, 2, 3, 2]), + ("Decimal", [1.5, 2.5, 3.5, 3.5]), + ("Date", ["2016-06-0%d" % n for n in [1, 2, 3, 5]]), + ("Time", ["%d:00:00" % n for n in [1, 2, 3, 5]]), + ( + "Date_and_time", + ["2016-06-0%dT12:00:00.000-04:00" % n for n in [1, 2, 3, 5]], + ), + ("GPS", ["1%d.43 -2%d.54 1 0" % (n, n) for n in [5, 7, 8, 5]]), + ("Photo", ["photo_%d.jpg" % (n) for n in [1, 2, 3, 4]]), + ("Audio", ["audio_%d.jpg" % (n) for n in [4, 3, 2, 1]]), + ("Video", ["video_%d.jpg" % (n) for n in [6, 7, 8, 9]]), + ("Note_Should_not_be_displayed", [None, None, None, None]), + ("Barcode", ["barcode%d" % (n) for n in [9, 7, 7, 6]]), + ("Acknowledge", [None, None, None, None]), + ("calculation", ["1", "1", "1", "1"]), + ] +) -def _get_stats_object(pack, version_ids, submissions=None, lang=None, split_by=None): +def _get_stats_object( + pack, version_ids, submissions=None, lang=None, split_by=None +): if submissions == None: raise ValueError('submissions must be provided') report = pack.autoreport(versions=version_ids) field_names = [field.name for field in pack.get_fields_for_versions(-1)] stats = [] - for (field, name_or_label, data) in report.get_stats(submissions, - field_names, - lang=lang, - split_by=split_by, - ).stats: + for field, name_or_label, data in report.get_stats( + submissions, + field_names, + lang=lang, + split_by=split_by, + ).stats: stats.append((field.name, data)) return stats @@ -132,17 +269,20 @@ def setUp(self): submissions = [] for i in range(0, num_submissions): - submissions.append(OrderedDict([ - (key, SUBMISSION_DATA[key][i]) for key in SUBMISSION_DATA.keys() - ])) + submissions.append( + OrderedDict( + [ + (key, SUBMISSION_DATA[key][i]) + for key in SUBMISSION_DATA.keys() + ] + ) + ) self.asset.deploy(backend='mock', active=True) self.asset.save() v_uid = self.asset.latest_deployed_version.uid for submission in submissions: - submission.update({ - '__version__': v_uid - }) + submission.update({'__version__': v_uid}) self.asset.deployment.mock_submissions(submissions) schemas = [v.to_formpack_schema() for v in self.asset.deployed_versions] self.fp = FormPack(versions=schemas, id_string=self.asset.uid) @@ -150,73 +290,125 @@ def setUp(self): self.submissions = self.asset.deployment.get_submissions(self.user) def test_kobo_apps_reports_report_data(self): - values = report_data.data_by_identifiers(self.asset, - submission_stream=self.submissions) - expected_names = ["start", "end", "Select_one", "Select_Many", "Text", - "Number", "Decimal", "Date", "Time", "Date_and_time", - "GPS", "Photo", "Audio", "Video", "Barcode", - "Acknowledge", "calculation"] + values = report_data.data_by_identifiers( + self.asset, submission_stream=self.submissions + ) + expected_names = [ + "start", + "end", + "Select_one", + "Select_Many", + "Text", + "Number", + "Decimal", + "Date", + "Time", + "Date_and_time", + "GPS", + "Photo", + "Audio", + "Video", + "Barcode", + "Acknowledge", + "calculation", + ] self.assertEqual([v['name'] for v in values], expected_names) self.assertEqual(len(values), 17) def test_kobo_apps_reports_report_data_split_by(self): - values = report_data.data_by_identifiers(self.asset, - split_by="Select_one", - field_names=["Date"], - submission_stream=self.submissions) - self.assertEqual(values[0]['data']['values'], [ - ('2016-06-01', - {'responses': ('First option', 'Second option'), - 'frequencies': (1, 0), - 'percentages': (25.0, 0.0)}), - ('2016-06-02', - {'responses': ('First option', 'Second option'), - 'frequencies': (1, 0), - 'percentages': (25.0, 0.0)}), - ('2016-06-03', - {'responses': ('First option', 'Second option'), - 'frequencies': (0, 1), - 'percentages': (0.0, 25.0)}), - ('2016-06-05', - {'responses': ('First option', 'Second option'), - 'frequencies': (1, 0), - 'percentages': (25.0, 0.0)}), - ]) + values = report_data.data_by_identifiers( + self.asset, + split_by="Select_one", + field_names=["Date"], + submission_stream=self.submissions, + ) + self.assertEqual( + values[0]['data']['values'], + [ + ( + '2016-06-01', + { + 'responses': ('First option', 'Second option'), + 'frequencies': (1, 0), + 'percentages': (25.0, 0.0), + }, + ), + ( + '2016-06-02', + { + 'responses': ('First option', 'Second option'), + 'frequencies': (1, 0), + 'percentages': (25.0, 0.0), + }, + ), + ( + '2016-06-03', + { + 'responses': ('First option', 'Second option'), + 'frequencies': (0, 1), + 'percentages': (0.0, 25.0), + }, + ), + ( + '2016-06-05', + { + 'responses': ('First option', 'Second option'), + 'frequencies': (1, 0), + 'percentages': (25.0, 0.0), + }, + ), + ], + ) def test_kobo_apps_reports_report_data_split_by_translated(self): - values = report_data.data_by_identifiers(self.asset, - split_by="Select_one", - lang="Arabic", - field_names=["Date"], - submission_stream=self.submissions) + values = report_data.data_by_identifiers( + self.asset, + split_by="Select_one", + lang="Arabic", + field_names=["Date"], + submission_stream=self.submissions, + ) responses = set() for rv in OrderedDict(values[0]['data']['values']).values(): responses.update(rv.get('responses')) - expected = set(['\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a']) + expected = set( + [ + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', + ] + ) self.assertEqual(responses, expected) def test_kobo_apps_reports_report_data_subset(self): - values = report_data.data_by_identifiers(self.asset, - field_names=('Select_one',), - submission_stream=self.submissions) + values = report_data.data_by_identifiers( + self.asset, + field_names=('Select_one',), + submission_stream=self.submissions, + ) self.assertEqual(values[0]['data']['frequencies'], (3, 1)) self.assertEqual(values[0]['row']['type'], 'select_one') self.assertEqual(values[0]['data']['percentages'], (75, 25)) - self.assertEqual(values[0]['data']['responses'], ('First option', 'Second option')) + self.assertEqual( + values[0]['data']['responses'], ('First option', 'Second option') + ) def test_kobo_apps_reports_report_data_translation(self): - values = report_data.data_by_identifiers(self.asset, - lang='Arabic', - field_names=('Select_one',), - submission_stream=self.submissions) - self.assertEqual(values[0]['data']['responses'], - ( # response 1 in Arabic - '\u0627\u0644\u062e\u064a\u0627\u0631 ' - '\u0627\u0644\u0623\u0648\u0644', - # response 2 in Arabic - '\u0627\u0644\u062e\u064a\u0627\u0631 ' - '\u0627\u0644\u062b\u0627\u0646\u064a')) + values = report_data.data_by_identifiers( + self.asset, + lang='Arabic', + field_names=('Select_one',), + submission_stream=self.submissions, + ) + self.assertEqual( + values[0]['data']['responses'], + ( # response 1 in Arabic + '\u0627\u0644\u062e\u064a\u0627\u0631 ' + '\u0627\u0644\u0623\u0648\u0644', + # response 2 in Arabic + '\u0627\u0644\u062e\u064a\u0627\u0631 ' + '\u0627\u0644\u062b\u0627\u0646\u064a', + ), + ) def test_export_works_if_no_version_value_provided_in_submission(self): submissions = self.asset.deployment.get_submissions(self.asset.owner) @@ -224,30 +416,43 @@ def test_export_works_if_no_version_value_provided_in_submission(self): for submission in submissions: del submission['__version__'] - values = report_data.data_by_identifiers(self.asset, - field_names=['Date', 'Decimal'], - submission_stream=submissions) + values = report_data.data_by_identifiers( + self.asset, + field_names=['Date', 'Decimal'], + submission_stream=submissions, + ) (date_stats, decimal_stats) = values - self.assertEqual(date_stats['data'], { - 'provided': 4, - 'total_count': 4, - 'stdev': 0.9574271077563381, - 'median': 3.0, - 'show_graph': False, - 'mode': 3.5, - 'not_provided': 0, - 'mean': 2.75, - }) - self.assertEqual(decimal_stats['data'], { - 'provided': 4, - 'frequencies': (1, 1, 1, 1), - 'show_graph': True, - 'not_provided': 0, - 'total_count': 4, - 'responses': ('2016-06-01', '2016-06-02', '2016-06-03', '2016-06-05'), - 'percentages': (25.0, 25.0, 25.0, 25.0), - }) + self.assertEqual( + date_stats['data'], + { + 'provided': 4, + 'total_count': 4, + 'stdev': 0.9574271077563381, + 'median': 3.0, + 'show_graph': False, + 'mode': 3.5, + 'not_provided': 0, + 'mean': 2.75, + }, + ) + self.assertEqual( + decimal_stats['data'], + { + 'provided': 4, + 'frequencies': (1, 1, 1, 1), + 'show_graph': True, + 'not_provided': 0, + 'total_count': 4, + 'responses': ( + '2016-06-01', + '2016-06-02', + '2016-06-03', + '2016-06-05', + ), + 'percentages': (25.0, 25.0, 25.0, 25.0), + }, + ) def test_has_report_styles(self): self.assertTrue(self.asset.report_styles is not None) @@ -256,32 +461,45 @@ def test_formpack_results(self): submissions = self.asset.deployment.get_submissions(self.asset.owner) def _get_autoreport_values(qname, key, lang=None, index=False): - stats = OrderedDict(_get_stats_object(self.fp, - self.vs, - submissions=submissions, - lang=lang)) + stats = OrderedDict( + _get_stats_object( + self.fp, self.vs, submissions=submissions, lang=lang + ) + ) if index is False: return stats[qname][key] else: return [s[index] for s in stats[qname][key]] - self.assertEqual(_get_autoreport_values('Select_one', 'frequency', None, 0), - ['First option', 'Second option']) - self.assertEqual(_get_autoreport_values('Select_one', 'frequency', 'Espa\xf1ol', 0), - ['Primera opci\xf3n', 'Segunda opci\xf3n']) - self.assertEqual(_get_autoreport_values('Select_one', 'frequency', 'Arabic', 0), - ['\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a']) + self.assertEqual( + _get_autoreport_values('Select_one', 'frequency', None, 0), + ['First option', 'Second option'], + ) + self.assertEqual( + _get_autoreport_values('Select_one', 'frequency', 'Espa\xf1ol', 0), + ['Primera opci\xf3n', 'Segunda opci\xf3n'], + ) + self.assertEqual( + _get_autoreport_values('Select_one', 'frequency', 'Arabic', 0), + [ + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', + ], + ) self.assertEqual(_get_autoreport_values('Decimal', 'median', None), 3.0) - self.assertEqual(_get_autoreport_values('Date', 'percentage', None), [ - ("2016-06-01", 25.0), - ("2016-06-02", 25.0), - ("2016-06-03", 25.0), - ("2016-06-05", 25.0) - ]) + self.assertEqual( + _get_autoreport_values('Date', 'percentage', None), + [ + ("2016-06-01", 25.0), + ("2016-06-02", 25.0), + ("2016-06-03", 25.0), + ("2016-06-05", 25.0), + ], + ) def test_has_version_and_submissions(self): self.assertEqual(self.asset.asset_versions.count(), 2) self.assertTrue(self.asset.has_deployment) + self.asset.deployment.xform.refresh_from_db() self.assertEqual(self.asset.deployment.submission_count, 4) diff --git a/kpi/tests/test_mock_data_conflicting_version_exports.py b/kpi/tests/test_mock_data_conflicting_version_exports.py index 90fee4b17a..38fff9b67c 100644 --- a/kpi/tests/test_mock_data_conflicting_version_exports.py +++ b/kpi/tests/test_mock_data_conflicting_version_exports.py @@ -30,7 +30,8 @@ def setUp(self): # To avoid cluttering the fixture, assign permissions here self.asset.assign_perm(self.user, PERM_VIEW_SUBMISSIONS) self.submissions = self.asset.deployment.get_submissions( - self.asset.owner) + self.asset.owner + ) self.submission_id_field = '_id' self.formpack, self.submission_stream = report_data.build_formpack( self.asset, diff --git a/kpi/tests/test_mock_data_exports.py b/kpi/tests/test_mock_data_exports.py index d0bd787a1a..816f3ab0c2 100644 --- a/kpi/tests/test_mock_data_exports.py +++ b/kpi/tests/test_mock_data_exports.py @@ -35,7 +35,6 @@ class MockDataExportsBase(TestCase): fixtures = ['test_data'] - forms = { 'Identificación de animales': { 'content': { @@ -187,7 +186,6 @@ class MockDataExportsBase(TestCase): '_attachments': [], '_bamboo_dataset_id': '', '_geolocation': [None, None], - '_id': 61, '_notes': [], '_status': 'submitted_via_web', '_submission_time': '2017-10-23T09:41:19', @@ -208,7 +206,6 @@ class MockDataExportsBase(TestCase): '_attachments': [], '_bamboo_dataset_id': '', '_geolocation': [None, None], - '_id': 62, '_notes': [], '_status': 'submitted_via_web', '_submission_time': '2017-10-23T09:41:38', @@ -229,7 +226,6 @@ class MockDataExportsBase(TestCase): '_attachments': [], '_bamboo_dataset_id': '', '_geolocation': [None, None], - '_id': 63, '_notes': [], '_status': 'submitted_via_web', '_submission_time': '2017-10-23T09:42:11', @@ -277,7 +273,6 @@ class MockDataExportsBase(TestCase): }, 'submissions': [ { - '_id': 9999, 'formhub/uuid': 'cfb562511e8e44d1998de69002b492d9', 'people/person': [ { @@ -320,19 +315,18 @@ class MockDataExportsBase(TestCase): }, 'submissions': [ { - '_id': 99999, 'formhub/uuid': 'cfb562511e8e44d1998de69002b49299', - 'an_image': 'image.png', + 'an_image': 'audio_conversion_test_image.jpg', '__version__': 'vbKavWWCpgBCZms6hQX4FB', 'meta/instanceID': 'uuid:f80be949-89b5-4af1-a42d-7d292b2bc0cd', '_xform_id_string': 'aaURCfR8mYe8pzc5h3YiZz', '_uuid': 'f80be949-89b5-4af1-a42d-7d292b2bc0cd', '_attachments': [ { - 'download_url': 'http://testserver/image.png', - 'filename': 'path/to/image.png', - } - ], + 'download_url': 'http://testserver/audio_conversion_test_image.jpg', + 'filename': 'path/to/audio_conversion_test_image.jpg', + } + ], '_status': 'submitted_via_web', '_geolocation': [None, None], '_submission_time': '2021-06-30T22:12:56', @@ -378,6 +372,7 @@ def _create_asset_with_submissions(user, content, name, submissions): submission.update({ '__version__': v_uid }) + asset.deployment.set_namespace('api_v2') asset.deployment.mock_submissions(submissions, flush_db=False) return asset @@ -487,26 +482,29 @@ def run_xls_export_test( assert result_row == expected_row def test_csv_export_default_options(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version.uid expected_lines = [ '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?/Spherical";"What kind of symmetry do you have?/Radial";"What kind of symmetry do you have?/Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', '"";"";"#symmetry";"";"";"";"#segments";"#fluids";"";"";"";"";"";"";"";"";"";"";""', - f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', - f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', - f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', + f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"{submissions[0]["_id"]}";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', + f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"{submissions[1]["_id"]}";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', + f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', ] self.run_csv_export_test(expected_lines) def test_csv_export_default_options_partial_submissions(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid expected_lines = [ '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?/Spherical";"What kind of symmetry do you have?/Radial";"What kind of symmetry do you have?/Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', f'"";"";"#symmetry";"";"";"";"#segments";"#fluids";"";"";"";"";"";"";"";"";"";"";""', - f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"1"', + f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"1"', ] self.run_csv_export_test(expected_lines, user=self.anotheruser) def test_csv_export_english_labels(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = { 'lang': 'English', @@ -514,13 +512,14 @@ def test_csv_export_english_labels(self): expected_lines = [ '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?/Spherical";"What kind of symmetry do you have?/Radial";"What kind of symmetry do you have?/Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', f'"";"";"#symmetry";"";"";"";"#segments";"#fluids";"";"";"";"";"";"";"";"";"";"";""', - f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', - f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', - f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', + f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"{submissions[0]["_id"]}";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', + f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"{submissions[1]["_id"]}";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', + f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', ] self.run_csv_export_test(expected_lines, export_options) def test_csv_export_spanish_labels(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = { 'lang': 'Spanish', @@ -528,13 +527,14 @@ def test_csv_export_spanish_labels(self): expected_lines = [ '"start";"end";"¿Qué tipo de simetría tiene?";"¿Qué tipo de simetría tiene?/Esférico";"¿Qué tipo de simetría tiene?/Radial";"¿Qué tipo de simetría tiene?/Bilateral";"¿Cuántos segmentos tiene tu cuerpo?";"¿Tienes fluidos corporales que ocupan espacio intracelular?";"¿Desciende de un organismo unicelular ancestral?";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', '"";"";"#symmetry";"";"";"";"#segments";"#fluids";"";"";"";"";"";"";"";"";"";"";""', - f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Esférico Radial Bilateral";"1";"1";"1";"6";"Sí, y algún espacio extracelular";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', - f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Sí";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', - f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Inseguro";"Sí";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', + f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Esférico Radial Bilateral";"1";"1";"1";"6";"Sí, y algún espacio extracelular";"No";"{submissions[0]["_id"]}";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', + f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Sí";"No";"{submissions[1]["_id"]}";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', + f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Inseguro";"Sí";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', ] self.run_csv_export_test(expected_lines, export_options) def test_csv_export_english_labels_no_hxl(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = { 'lang': 'English', @@ -542,13 +542,14 @@ def test_csv_export_english_labels_no_hxl(self): } expected_lines = [ '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?/Spherical";"What kind of symmetry do you have?/Radial";"What kind of symmetry do you have?/Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', - f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', - f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', - f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', + f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"{submissions[0]["_id"]}";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', + f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"{submissions[1]["_id"]}";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', + f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', ] self.run_csv_export_test(expected_lines, export_options) def test_csv_export_english_labels_group_sep(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid # Check `group_sep` by looking at the `select_multiple` question export_options = { @@ -558,21 +559,22 @@ def test_csv_export_english_labels_group_sep(self): expected_lines = [ '"start";"end";"What kind of symmetry do you have?";"What kind of symmetry do you have?%Spherical";"What kind of symmetry do you have?%Radial";"What kind of symmetry do you have?%Bilateral";"How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', '"";"";"#symmetry";"";"";"";"#segments";"#fluids";"";"";"";"";"";"";"";"";"";"";""', - f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', - f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', - f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', + f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"{submissions[0]["_id"]}";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', + f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"{submissions[1]["_id"]}";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', + f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', ] self.run_csv_export_test(expected_lines, export_options) def test_csv_export_hierarchy_in_labels(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'hierarchy_in_labels': 'true'} expected_lines = [ '"start";"end";"External Characteristics/What kind of symmetry do you have?";"External Characteristics/What kind of symmetry do you have?/Spherical";"External Characteristics/What kind of symmetry do you have?/Radial";"External Characteristics/What kind of symmetry do you have?/Bilateral";"External Characteristics/How many segments does your body have?";"Do you have body fluids that occupy intracellular space?";"Do you descend from an ancestral unicellular organism?";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', '"";"";"#symmetry";"";"";"";"#segments";"#fluids";"";"";"";"";"";"";"";"";"";"";""', - f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', - f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', - f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', + f'"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"Spherical Radial Bilateral";"1";"1";"1";"6";"Yes, and some extracellular space";"No";"{submissions[0]["_id"]}";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', + f'"2017-10-23T05:41:14.000-04:00";"2017-10-23T05:41:32.000-04:00";"Radial";"0";"1";"0";"3";"Yes";"No";"{submissions[1]["_id"]}";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', + f'"2017-10-23T05:41:32.000-04:00";"2017-10-23T05:42:05.000-04:00";"Bilateral";"0";"0";"1";"2";"No / Unsure";"Yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', ] self.run_csv_export_test(expected_lines, export_options) @@ -587,62 +589,67 @@ def test_csv_export_filter_fields(self): self.run_csv_export_test(expected_lines, export_options) def test_xls_export_english_labels(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English'} expected_data = {self.asset.name: [ ['start', 'end', 'What kind of symmetry do you have?', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id','_uuid','_submission_time','_validation_status','_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], ['', '', '#symmetry', '', '', '', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', 61.0, '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], - ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '0', '1', '0', '3', 'Yes', 'No', 62.0, '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', 63.0, '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] + ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', submissions[0]['_id'], '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], + ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '0', '1', '0', '3', 'Yes', 'No', submissions[1]['_id'], '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], + ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] ]} self.run_xls_export_test(expected_data, export_options) def test_xls_export_english_labels_partial_submissions(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English'} expected_data = {self.asset.name: [ ['start', 'end', 'What kind of symmetry do you have?', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id','_uuid','_submission_time','_validation_status','_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], ['', '', '#symmetry', '', '', '', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', 63.0, '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 1.0] + ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 1.0] ]} self.run_xls_export_test( expected_data, export_options, user=self.anotheruser ) def test_xls_export_multiple_select_both(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English', 'multiple_select': 'both'} expected_data = {self.asset.name: [ ['start', 'end', 'What kind of symmetry do you have?', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id','_uuid','_submission_time','_validation_status','_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], ['', '', '#symmetry', '', '', '', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', 61.0, '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], - ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '0', '1', '0', '3', 'Yes', 'No', 62.0, '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', 63.0, '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] + ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', submissions[0]['_id'], '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], + ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '0', '1', '0', '3', 'Yes', 'No', submissions[1]['_id'], '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], + ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] ]} self.run_xls_export_test(expected_data, export_options) def test_xls_export_multiple_select_summary(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English', 'multiple_select': 'summary'} expected_data = {self.asset.name: [ ['start', 'end', 'What kind of symmetry do you have?', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id', '_uuid', '_submission_time', '_validation_status', '_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], ['', '', '#symmetry', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '6', 'Yes, and some extracellular space', 'No', 61.0, '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], - ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '3', 'Yes', 'No', 62.0, '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '2', 'No / Unsure', 'Yes', 63.0, '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] + ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '6', 'Yes, and some extracellular space', 'No', submissions[0]['_id'], '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], + ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '3', 'Yes', 'No', submissions[1]['_id'], '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], + ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] ]} self.run_xls_export_test(expected_data, export_options) def test_xls_export_multiple_select_details(self): + submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English', 'multiple_select': 'details'} expected_data = {self.asset.name: [ ['start', 'end', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id', '_uuid', '_submission_time', '_validation_status', '_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], ['', '', '#symmetry', '', '', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', 61.0, '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], - ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', '0', '1', '0', '3', 'Yes', 'No', 62.0, '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', '0', '0', '1', '2', 'No / Unsure', 'Yes', 63.0, '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] + ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', submissions[0]['_id'], '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], + ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', '0', '1', '0', '3', 'Yes', 'No', submissions[1]['_id'], '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], + ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', '0', '0', '1', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] ]} self.run_xls_export_test(expected_data, export_options) @@ -730,12 +737,18 @@ def test_xls_export_filter_fields_without_index(self): def test_xls_export_filter_fields_with_media_url(self): asset_name = 'Simple media' export_options = {'fields': ['an_image'], 'include_media_url': True} + asset = self.assets[asset_name] + submissions = self.forms[asset_name]['submissions'] + submission = asset.deployment.get_submission( + submissions[0]['_id'], asset.owner + ) + media_url = submission['_attachments'][0]['download_url'] expected_data = { asset_name: [ ['Submit an image', 'Submit an image_URL', '_uuid'], [ - 'image.png', - 'http://testserver/image.png', + 'audio_conversion_test_image.jpg', + media_url, 'f80be949-89b5-4af1-a42d-7d292b2bc0cd', ], ] @@ -824,6 +837,7 @@ def test_xls_export_filter_fields_repeat_groups(self): def test_xls_export_repeat_groups(self): asset = self.assets['Simple repeat group'] + submissions = self.forms['Simple repeat group']['submissions'] version_uid = asset.latest_deployed_version_uid expected_data = { asset.name: [ @@ -840,7 +854,7 @@ def test_xls_export_repeat_groups(self): '_index', ], [ - 9999.0, + submissions[0]['_id'], 'f80be949-89b5-4af1-a29d-7d292b2bc0cd', '2021-06-30T22:12:56', '', @@ -875,7 +889,7 @@ def test_xls_export_repeat_groups(self): 1.0, 'Simple repeat group', 1.0, - 9999.0, + submissions[0]['_id'], 'f80be949-89b5-4af1-a29d-7d292b2bc0cd', '2021-06-30T22:12:56', '', @@ -891,7 +905,7 @@ def test_xls_export_repeat_groups(self): 2.0, 'Simple repeat group', 1.0, - 9999.0, + submissions[0]['_id'], 'f80be949-89b5-4af1-a29d-7d292b2bc0cd', '2021-06-30T22:12:56', '', @@ -1106,6 +1120,7 @@ def test_export_long_form_title(self): ) def test_export_latest_version_only(self): + submissions = self.forms[self.form_names[0]]['submissions'] new_survey_content = [{ 'label': ['Do you descend... new label', '\xbfDesciende de... etiqueta nueva'], @@ -1124,9 +1139,9 @@ def test_export_latest_version_only(self): self.asset.deploy(backend='mock', active=True) expected_lines = [ '"Do you descend... new label";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', - f'"no";"61";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', - f'"no";"62";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', - f'"yes";"63";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"' + f'"no";"{submissions[0]["_id"]}";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', + f'"no";"{submissions[1]["_id"]}";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', + f'"yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"' ] self.run_csv_export_test( expected_lines, {'fields_from_all_versions': 'false'}) @@ -1141,7 +1156,7 @@ def test_export_exceeding_api_submission_limit(self): asset = Asset.objects.create( name='Lots of submissions', owner=self.user, - content={'survey': [{'name': 'q', 'type': 'integer'}]}, + content={'survey': [{'label': 'q', 'name': 'q', 'type': 'integer'}]}, ) asset.deploy(backend='mock', active=True) submissions = [ @@ -1167,18 +1182,23 @@ def test_export_with_disabled_questions(self): name='Form with undocumented `disabled` column', owner=self.user, content={'survey': [ - {'name': 'q', 'type': 'integer'}, + {'label': 'q', 'name': 'q', 'type': 'integer'}, {'name': 'ignore', 'type': 'select_one nope', 'disabled': True}, ]}, ) asset.deploy(backend='mock', active=True) - asset.deployment.mock_submissions( - [{'__version__': asset.latest_deployed_version.uid, 'q': 123,}] - ) + submissions = [ + { + '__version__': asset.latest_deployed_version.uid, + 'q': 123, + '_submission_time': '2017-10-23T09:41:19', + } + ] + asset.deployment.mock_submissions(submissions) # observe that `ignore` does not appear! expected_lines = [ '"q";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', - f'"123";"1";"";"";"";"";"";"";"{asset.latest_deployed_version.uid}";"";"1"', + f'"123";"{submissions[0]["_id"]}";"{submissions[0]["_uuid"]}";"2017-10-23T09:41:19";"";"";"submitted_via_web";"someuser";"{asset.latest_deployed_version.uid}";"";"1"', ] # fails with `KeyError` prior to fix for kobotoolbox/formpack#219 self.run_csv_export_test(expected_lines, asset=asset) diff --git a/kpi/tests/test_mongo_helper.py b/kpi/tests/test_mongo_helper.py index f931e1bf14..e34d079581 100644 --- a/kpi/tests/test_mongo_helper.py +++ b/kpi/tests/test_mongo_helper.py @@ -1,6 +1,7 @@ from __future__ import annotations import copy +import itertools from django.conf import settings from django.test import TestCase @@ -25,10 +26,15 @@ def assert_instances_count(self, instances: tuple, expected_count: int): assert instances[1] == expected_count def test_get_instances(self): - users = baker.make(settings.AUTH_USER_MODEL, _quantity=2) + names = ('bob', 'alice') + users = baker.make( + settings.AUTH_USER_MODEL, + username=itertools.cycle(names), + _quantity=2, + ) assets = [] - for user in users: - asset = baker.make('kpi.Asset', owner=user) + for idx, user in enumerate(users): + asset = baker.make('kpi.Asset', owner=user, uid=f'assetUid{idx}') asset.deploy(backend='mock', active=True) assets.append(asset) (asset1, asset2) = assets @@ -62,8 +68,9 @@ def test_get_instances(self): ) def test_get_instances_permission_filters(self): - user = baker.make(settings.AUTH_USER_MODEL) - asset = baker.make('kpi.Asset', owner=user) + bob = baker.make(settings.AUTH_USER_MODEL, username='bob') + alice = baker.make(settings.AUTH_USER_MODEL, username='alice') + asset = baker.make('kpi.Asset', owner=bob, uid='assetUid') asset.deploy(backend='mock', active=True) userform_id = asset.deployment.mongo_userform_id submissions = [ diff --git a/kpi/tests/utils/dicts.py b/kpi/tests/utils/dicts.py new file mode 100644 index 0000000000..3e25ddfe3c --- /dev/null +++ b/kpi/tests/utils/dicts.py @@ -0,0 +1,35 @@ +from __future__ import annotations + + +def nested_dict_from_keys(dict_: dict) -> dict: + """ + Transforms a dictionary with keys containing slashes into a nested + dictionary structure. + """ + + result = {} + + for key, value in dict_.items(): + keys = key.split('/') + sub_dict = result + for sub_key in keys[:-1]: + if sub_key not in sub_dict: + sub_dict[sub_key] = {} + sub_dict = sub_dict[sub_key] + + if isinstance(value, list): + sub_dict[keys[-1]] = [ + { + sub_key.split('/')[-1]: sub_val + for sub_key, sub_val in item.items() + } + for item in value if item + ] + else: + sub_dict[keys[-1]] = ( + nested_dict_from_keys(value) + if isinstance(value, dict) + else value + ) + + return result diff --git a/kpi/tests/utils/mock.py b/kpi/tests/utils/mock.py index 1400228e93..f579eb1e6c 100644 --- a/kpi/tests/utils/mock.py +++ b/kpi/tests/utils/mock.py @@ -1,31 +1,12 @@ # coding: utf-8 import json import lxml -import os from mimetypes import guess_type -from tempfile import NamedTemporaryFile -from typing import Optional from urllib.parse import parse_qs, unquote from django.conf import settings -from django.core.files import File - -from django.core.files.storage import default_storage from rest_framework import status -from kobo.apps.openrosa.apps.logger.models.attachment import ( - Attachment, - upload_to, -) -from kobo.apps.openrosa.libs.utils.image_tools import ( - get_optimized_image_path, - resize, -) -from kpi.deployment_backends.kc_access.storage import ( - default_kobocat_storage, - KobocatFileSystemStorage, -) -from kpi.mixins.audio_transcoding import AudioTranscodingMixin from kpi.models.asset_snapshot import AssetSnapshot from kpi.tests.utils.xml import get_form_and_submission_tag_names @@ -112,3 +93,12 @@ def enketo_view_instance_response(request): } headers = {} return status.HTTP_201_CREATED, headers, json.dumps(resp_body) + + +def guess_type_mock(url, strict=True): + """ + In the container, `*.3gp` returns "audio/3gpp" instead of "video/3gpp". + """ + if url.endswith('.3gp'): + return 'video/3gpp', None + return guess_type(url, strict) diff --git a/kpi/tests/utils/xml.py b/kpi/tests/utils/xml.py index 4e0e4d2758..57edce20fc 100644 --- a/kpi/tests/utils/xml.py +++ b/kpi/tests/utils/xml.py @@ -4,7 +4,7 @@ def get_form_and_submission_tag_names(form: str, submission: str) -> tuple[str, str]: - submission_root_name = etree.fromstring(submission).tag + submission_root_name = etree.fromstring(submission.encode()).tag tree = etree.ElementTree(etree.fromstring(form)) root = tree.getroot() # We cannot use `root.nsmap` directly because the default namespace key is diff --git a/kpi/utils/files.py b/kpi/utils/files.py index adb911ee1e..f732dd83d6 100644 --- a/kpi/utils/files.py +++ b/kpi/utils/files.py @@ -1,12 +1,18 @@ import os -from mimetypes import guess_type +import mimetypes +# from mimetypes import guess_type from django.core.files.base import ContentFile class ExtendedContentFile(ContentFile): + def __init__(self, content, name=None, *args, **kwargs): + super().__init__(content, name) + self._mimetype = kwargs.get('mimetype') + @property def content_type(self): - mimetype, _ = guess_type(os.path.basename(self.name)) + if not (mimetype := self._mimetype): + mimetype, _ = mimetypes.guess_type(os.path.basename(self.name)) return mimetype diff --git a/kpi/views/v2/attachment.py b/kpi/views/v2/attachment.py index b35c1dd4d0..fb900bbb18 100644 --- a/kpi/views/v2/attachment.py +++ b/kpi/views/v2/attachment.py @@ -140,7 +140,7 @@ def _get_response( # the content to the Response object if settings.TESTING: # setting the content type to `None` here allows the renderer to - # specify the content type for the response + # specify the content type for the response. content_type = ( attachment.mimetype if request.accepted_renderer.format != MP3ConversionRenderer.format diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index febab500ed..6ab9300769 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -396,7 +396,6 @@ def destroy(self, request, pk, *args, **kwargs): json_response = deployment.delete_submission( submission_id, user=request.user ) - if json_response['status'] == status.HTTP_204_NO_CONTENT: AuditLog.objects.create( app_label='logger', @@ -531,16 +530,11 @@ def retrieve(self, request, pk, *args, **kwargs): # Join all parameters to be passed to `deployment.get_submissions()` params.update(filters) - # The `get_submissions()` is a generator in KobocatDeploymentBackend - # class but a list in MockDeploymentBackend. We cast the result as a list - # no matter what is the deployment back-end class to make it work with - # both. Since the number of submissions is very small, it should not - # have a big impact on memory (i.e. list vs generator) - submissions = list(deployment.get_submissions(**params)) + submissions = deployment.get_submissions(**params) if not submissions: raise Http404 - submission = submissions[0] + submission = list(submissions)[0] return Response(submission) @action(detail=True, methods=['POST'], From e6139cac60865132a70017e81d2c79cab615b662 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 8 Aug 2024 11:51:39 -0400 Subject: [PATCH 024/119] Fix last tests --- kobo/apps/openrosa/libs/utils/logger_tools.py | 1 - kobo/apps/trackers/submission_utils.py | 9 +++------ kobo/settings/testing.py | 7 +++++++ kpi/deployment_backends/mock_backend.py | 14 +++++++------- kpi/fixtures/attachments/IMG_4266-11_38_22.jpg | Bin 0 -> 1130 bytes ...creenshot_2024-02-14_at_18.31.39-11_38_35.jpg | Bin 0 -> 1130 bytes .../attachments}/audio_conversion_test_clip.3gp | Bin .../attachments}/audio_conversion_test_image.jpg | Bin ...\330\261\330\247\331\212\330\271-10_7_41.jpg" | Bin 0 -> 1130 bytes kpi/management/commands/cypress_testserver.py | 2 +- kpi/tests/api/v2/test_api_asset_usage.py | 10 +++++++--- kpi/tests/api/v2/test_api_service_usage.py | 8 +++++--- kpi/tests/api/v2/test_api_submissions.py | 4 ++-- kpi/tests/test_mock_data_exports.py | 2 +- kpi/tests/test_mongo_helper.py | 4 +--- 15 files changed, 34 insertions(+), 27 deletions(-) create mode 100644 kpi/fixtures/attachments/IMG_4266-11_38_22.jpg create mode 100644 kpi/fixtures/attachments/Screenshot_2024-02-14_at_18.31.39-11_38_35.jpg rename kpi/{tests => fixtures/attachments}/audio_conversion_test_clip.3gp (100%) rename kpi/{tests => fixtures/attachments}/audio_conversion_test_image.jpg (100%) create mode 100644 "kpi/fixtures/attachments/\331\203\331\210\330\250\331\210-\330\261\330\247\331\212\330\271-10_7_41.jpg" diff --git a/kobo/apps/openrosa/libs/utils/logger_tools.py b/kobo/apps/openrosa/libs/utils/logger_tools.py index 3fd880394e..81decfc462 100644 --- a/kobo/apps/openrosa/libs/utils/logger_tools.py +++ b/kobo/apps/openrosa/libs/utils/logger_tools.py @@ -466,7 +466,6 @@ def publish_xls_form(xls_file, user, id_string=None): with transaction.atomic(): dd = DataDictionary.objects.create(user=user, xls=xls_file) except IntegrityError as e: - breakpoint() raise e return dd diff --git a/kobo/apps/trackers/submission_utils.py b/kobo/apps/trackers/submission_utils.py index 18702cf842..0294318d96 100644 --- a/kobo/apps/trackers/submission_utils.py +++ b/kobo/apps/trackers/submission_utils.py @@ -52,10 +52,7 @@ def _get_uid(count): _quantity=assets_per_user, ) - print([a.uid for a in assets]) - breakpoint() for asset in assets: - print('DEPLOYING ', asset.uid, flush=True) asset.deploy(backend='mock', active=True) asset.deployment.set_namespace(ROUTER_URL_NAMESPACE) asset.save() # might be redundant? @@ -68,9 +65,9 @@ def expected_file_size(submissions: int = 1): Calculate the expected combined file size for the test audio clip and image """ return (os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_clip.3gp' + settings.BASE_DIR + '/kpi/fixtures/attachments/audio_conversion_test_clip.3gp' ) + os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_image.jpg' + settings.BASE_DIR + '/kpi/fixtures/attachments/audio_conversion_test_image.jpg' )) * submissions @@ -106,7 +103,7 @@ def add_mock_submissions(assets: list, submissions_per_asset: int = 1): } asset_submissions.append(submission) - asset.deployment.mock_submissions(asset_submissions, flush_db=False) + asset.deployment.mock_submissions(asset_submissions) all_submissions = all_submissions + asset_submissions # update_xform_counters(asset, submissions=submissions_per_asset) diff --git a/kobo/settings/testing.py b/kobo/settings/testing.py index 421adcaeb6..fcd4a2e749 100644 --- a/kobo/settings/testing.py +++ b/kobo/settings/testing.py @@ -52,3 +52,10 @@ TEST_USERNAME = 'bob' OPENROSA_DB_ALIAS = DEFAULT_DB_ALIAS + +# STORAGES['default']['BACKEND'] = 'django.core.files.storage.FileSystemStorage' +# KOBOCAT_DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' +# KOBOCAT_MEDIA_ROOT = os.environ.get( +# 'KOBOCAT_MEDIA_ROOT', MEDIA_ROOT.replace('kpi', 'kobocat') +# ) +# PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher'] diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 702ce886c4..4dfa0b8e28 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -49,7 +49,7 @@ def get_submissions( )) def mock_submissions( - self, submissions, create_uuids: bool = True, flush_db: bool = True + self, submissions, create_uuids: bool = True ): """ Simulate client (i.e.: Enketo or Collect) data submission. @@ -120,11 +120,10 @@ class FakeRequest: if error: raise Exception(error) - # Inject (or update) real PK in submission - # FIXME TRY TO ASSIGN Instance.PK if it already exists + # Inject (or update) real PKs in submission… submission['_id'] = instance.pk - # Reassign attachment PKs + # … and attachments if '_attachments' in submission: for idx, attachment in enumerate(instance.attachments.all()): submission['_attachments'][idx]['id'] = attachment.pk @@ -160,14 +159,15 @@ def _get_media_files(self, submission): file_ = os.path.join( settings.BASE_DIR, 'kpi', - 'tests', + 'fixtures', + 'attachments', basename ) if not os.path.isfile(file_): raise Exception( f'File `filename` does not exist! Use `path/to/image.png` if' - f' you need a fake attachment, or ' - f'`audio_conversion_test_image.(jpg|3gp)` for real attachment' + f' you need a fake attachment, or use one of file names ' + f'inside `kpi/fixtures/attachments for real attachment' ) with open(file_, 'rb') as f: diff --git a/kpi/fixtures/attachments/IMG_4266-11_38_22.jpg b/kpi/fixtures/attachments/IMG_4266-11_38_22.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f60baa8ea58f5de22b57f6c2705630c797a80bee GIT binary patch literal 1130 zcma)5O;6iE5M4JRq`^R}fP?_4D|2b9#%nty!4@t`Fi}&<6@p0Z)z}NMg1wgQG=^*Y zuX-ql{)m2@xL55@aMx}UXdzWcvb6g=@6F8a+vIa{4b~1~=M;c$_bDiXn@GL`P9Iu> z3&4Opg?9l+u5)rfihLCzFBD9hoM6EsE{aVbNrH&L?q2Mh)&NKF1fMyc#{c~Ak%x}0 z@vrNKX!!fM@4UDS@bTrVo^?5}6r10B4tHZUc6}E|CXC&4FH~cV&%{-_PGQ8uEF~If zd@HRA-x!B*p9B~xf>gCcQHD)LFdJs0rZjfUXHXX9Z6wx^*r-a9T9;L^4#&aMf`NUi z9_gKNIl9yM@m!!xRr$%fGpzUlW~;jnk%V|0W=G7KzC)3;KxukHuf zj7ZQU6WSI>Pm7WgQ33TWog%;=*if z)tTK(bEe*yYtQawXC@cXW>9*$w=R1EQ3s{Vse9>W>VND}(+AYu$q%rUc^24L^c6A5 zH}D=5^7;H+zA!gecvzs{La|UNF0iH2LaD@-*(}QJ(sFruiLF#tRw|Xvjg5`XN7G>D z=jRvM#kF#IZGDwnU7!B=-$il-xFUVnOpf8e1CGgYO!5i5q{yF9Wd24#`3}O}L6Wb4 Q%`sFs$ALZ2dzXCq1x>LgzyJUM literal 0 HcmV?d00001 diff --git a/kpi/fixtures/attachments/Screenshot_2024-02-14_at_18.31.39-11_38_35.jpg b/kpi/fixtures/attachments/Screenshot_2024-02-14_at_18.31.39-11_38_35.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f60baa8ea58f5de22b57f6c2705630c797a80bee GIT binary patch literal 1130 zcma)5O;6iE5M4JRq`^R}fP?_4D|2b9#%nty!4@t`Fi}&<6@p0Z)z}NMg1wgQG=^*Y zuX-ql{)m2@xL55@aMx}UXdzWcvb6g=@6F8a+vIa{4b~1~=M;c$_bDiXn@GL`P9Iu> z3&4Opg?9l+u5)rfihLCzFBD9hoM6EsE{aVbNrH&L?q2Mh)&NKF1fMyc#{c~Ak%x}0 z@vrNKX!!fM@4UDS@bTrVo^?5}6r10B4tHZUc6}E|CXC&4FH~cV&%{-_PGQ8uEF~If zd@HRA-x!B*p9B~xf>gCcQHD)LFdJs0rZjfUXHXX9Z6wx^*r-a9T9;L^4#&aMf`NUi z9_gKNIl9yM@m!!xRr$%fGpzUlW~;jnk%V|0W=G7KzC)3;KxukHuf zj7ZQU6WSI>Pm7WgQ33TWog%;=*if z)tTK(bEe*yYtQawXC@cXW>9*$w=R1EQ3s{Vse9>W>VND}(+AYu$q%rUc^24L^c6A5 zH}D=5^7;H+zA!gecvzs{La|UNF0iH2LaD@-*(}QJ(sFruiLF#tRw|Xvjg5`XN7G>D z=jRvM#kF#IZGDwnU7!B=-$il-xFUVnOpf8e1CGgYO!5i5q{yF9Wd24#`3}O}L6Wb4 Q%`sFs$ALZ2dzXCq1x>LgzyJUM literal 0 HcmV?d00001 diff --git a/kpi/tests/audio_conversion_test_clip.3gp b/kpi/fixtures/attachments/audio_conversion_test_clip.3gp similarity index 100% rename from kpi/tests/audio_conversion_test_clip.3gp rename to kpi/fixtures/attachments/audio_conversion_test_clip.3gp diff --git a/kpi/tests/audio_conversion_test_image.jpg b/kpi/fixtures/attachments/audio_conversion_test_image.jpg similarity index 100% rename from kpi/tests/audio_conversion_test_image.jpg rename to kpi/fixtures/attachments/audio_conversion_test_image.jpg diff --git "a/kpi/fixtures/attachments/\331\203\331\210\330\250\331\210-\330\261\330\247\331\212\330\271-10_7_41.jpg" "b/kpi/fixtures/attachments/\331\203\331\210\330\250\331\210-\330\261\330\247\331\212\330\271-10_7_41.jpg" new file mode 100644 index 0000000000000000000000000000000000000000..f60baa8ea58f5de22b57f6c2705630c797a80bee GIT binary patch literal 1130 zcma)5O;6iE5M4JRq`^R}fP?_4D|2b9#%nty!4@t`Fi}&<6@p0Z)z}NMg1wgQG=^*Y zuX-ql{)m2@xL55@aMx}UXdzWcvb6g=@6F8a+vIa{4b~1~=M;c$_bDiXn@GL`P9Iu> z3&4Opg?9l+u5)rfihLCzFBD9hoM6EsE{aVbNrH&L?q2Mh)&NKF1fMyc#{c~Ak%x}0 z@vrNKX!!fM@4UDS@bTrVo^?5}6r10B4tHZUc6}E|CXC&4FH~cV&%{-_PGQ8uEF~If zd@HRA-x!B*p9B~xf>gCcQHD)LFdJs0rZjfUXHXX9Z6wx^*r-a9T9;L^4#&aMf`NUi z9_gKNIl9yM@m!!xRr$%fGpzUlW~;jnk%V|0W=G7KzC)3;KxukHuf zj7ZQU6WSI>Pm7WgQ33TWog%;=*if z)tTK(bEe*yYtQawXC@cXW>9*$w=R1EQ3s{Vse9>W>VND}(+AYu$q%rUc^24L^c6A5 zH}D=5^7;H+zA!gecvzs{La|UNF0iH2LaD@-*(}QJ(sFruiLF#tRw|Xvjg5`XN7G>D z=jRvM#kF#IZGDwnU7!B=-$il-xFUVnOpf8e1CGgYO!5i5q{yF9Wd24#`3}O}L6Wb4 Q%`sFs$ALZ2dzXCq1x>LgzyJUM literal 0 HcmV?d00001 diff --git a/kpi/management/commands/cypress_testserver.py b/kpi/management/commands/cypress_testserver.py index 42e70fdcb8..3d66c22970 100644 --- a/kpi/management/commands/cypress_testserver.py +++ b/kpi/management/commands/cypress_testserver.py @@ -381,4 +381,4 @@ def set_version(submission): submission['__version__'] = latest_version_uuid return submission submission_generator = (set_version(s) for s in submissions) - asset.deployment.mock_submissions(submission_generator, flush_db=False) + asset.deployment.mock_submissions(submission_generator) diff --git a/kpi/tests/api/v2/test_api_asset_usage.py b/kpi/tests/api/v2/test_api_asset_usage.py index 261db675b4..4f597c0e03 100644 --- a/kpi/tests/api/v2/test_api_asset_usage.py +++ b/kpi/tests/api/v2/test_api_asset_usage.py @@ -109,7 +109,7 @@ def __add_submissions(self): submissions.append(submission1) submissions.append(submission2) - self.asset.deployment.mock_submissions(submissions, flush_db=False) + self.asset.deployment.mock_submissions(submissions) def __create_asset(self): content_source_asset = { @@ -140,8 +140,12 @@ def __expected_file_size(self): Calculate the expected combined file size for the test audio clip and image """ return os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_clip.3gp' - ) + os.path.getsize(settings.BASE_DIR + '/kpi/tests/audio_conversion_test_image.jpg') + settings.BASE_DIR + + '/kpi/fixtures/attachments/audio_conversion_test_clip.3gp' + ) + os.path.getsize( + settings.BASE_DIR + + '/kpi/fixtures/attachments/audio_conversion_test_image.jpg' + ) def test_anonymous_user(self): """ diff --git a/kpi/tests/api/v2/test_api_service_usage.py b/kpi/tests/api/v2/test_api_service_usage.py index 0d56a71133..9d927d93c8 100644 --- a/kpi/tests/api/v2/test_api_service_usage.py +++ b/kpi/tests/api/v2/test_api_service_usage.py @@ -137,7 +137,7 @@ def add_submissions(self, count=2): self.attachment_id = self.attachment_id + 2 submissions.append(submission) - self.asset.deployment.mock_submissions(submissions, flush_db=False) + self.asset.deployment.mock_submissions(submissions) @staticmethod def expected_file_size(): @@ -145,9 +145,11 @@ def expected_file_size(): Calculate the expected combined file size for the test audio clip and image """ return os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_clip.3gp' + settings.BASE_DIR + + '/kpi/fixtures/attachments/audio_conversion_test_clip.3gp' ) + os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_image.jpg' + settings.BASE_DIR + + '/kpi/fixtures/attachments/audio_conversion_test_image.jpg' ) diff --git a/kpi/tests/api/v2/test_api_submissions.py b/kpi/tests/api/v2/test_api_submissions.py index 9ffad5722d..98c4abdaf7 100644 --- a/kpi/tests/api/v2/test_api_submissions.py +++ b/kpi/tests/api/v2/test_api_submissions.py @@ -1060,7 +1060,7 @@ def test_attachments_rewrite(self): { 'group_ec9yq67/group_dq8as25/group_xt0za80': [ { - 'group_ec9yq67/group_dq8as25/group_xt0za80/my_attachment': 'Screenshot 2024-02-14 at 18.31.39-11_38_35.png' + 'group_ec9yq67/group_dq8as25/group_xt0za80/my_attachment': 'Screenshot 2024-02-14 at 18.31.39-11_38_35.jpg' } ] }, @@ -1094,7 +1094,7 @@ def test_attachments_rewrite(self): 'download_medium_url': 'http://kc.testserver/3.jpg', 'download_small_url': 'http://kc.testserver/3.jpg', 'mimetype': 'image/jpeg', - 'filename': 'anotheruser/attachments/formhub-uuid/submission-uuid/Screenshot_2024-02-14_at_18.31.39-11_38_35.png', + 'filename': 'anotheruser/attachments/formhub-uuid/submission-uuid/Screenshot_2024-02-14_at_18.31.39-11_38_35.jpg', 'instance': 1, 'xform': 1, 'id': 3, diff --git a/kpi/tests/test_mock_data_exports.py b/kpi/tests/test_mock_data_exports.py index 816f3ab0c2..fdc9986c74 100644 --- a/kpi/tests/test_mock_data_exports.py +++ b/kpi/tests/test_mock_data_exports.py @@ -373,7 +373,7 @@ def _create_asset_with_submissions(user, content, name, submissions): '__version__': v_uid }) asset.deployment.set_namespace('api_v2') - asset.deployment.mock_submissions(submissions, flush_db=False) + asset.deployment.mock_submissions(submissions) return asset diff --git a/kpi/tests/test_mongo_helper.py b/kpi/tests/test_mongo_helper.py index e34d079581..48a1192b9a 100644 --- a/kpi/tests/test_mongo_helper.py +++ b/kpi/tests/test_mongo_helper.py @@ -18,9 +18,7 @@ def setUp(self): def add_submissions(self, asset, submissions: list[dict]): for submission in submissions: submission['__version__'] = asset.latest_deployed_version.uid - asset.deployment.mock_submissions( - copy.deepcopy(submissions), flush_db=False - ) + asset.deployment.mock_submissions(copy.deepcopy(submissions)) def assert_instances_count(self, instances: tuple, expected_count: int): assert instances[1] == expected_count From 244c38ea45ec110ab483497f30f8331580f0a704 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 8 Aug 2024 12:17:57 -0400 Subject: [PATCH 025/119] Add pytest-xdist pip dependency --- dependencies/pip/dev_requirements.in | 4 ++-- dependencies/pip/dev_requirements.txt | 5 +++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/dependencies/pip/dev_requirements.in b/dependencies/pip/dev_requirements.in index ce4a55e5c0..2ef94dd41d 100644 --- a/dependencies/pip/dev_requirements.in +++ b/dependencies/pip/dev_requirements.in @@ -11,8 +11,8 @@ pytest pytest-cov pytest-django pytest-env +pytest-xdist - -# Kobocat +# KoboCAT httmock simplejson diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index 4030660476..69da04e3fd 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -242,6 +242,8 @@ et-xmlfile==1.1.0 # via openpyxl exceptiongroup==1.2.0 # via pytest +execnet==2.1.1 + # via pytest-xdist executing==2.0.1 # via stack-data fabric==3.2.2 @@ -469,12 +471,15 @@ pytest==8.1.1 # pytest-cov # pytest-django # pytest-env + # pytest-xdist pytest-cov==5.0.0 # via -r dependencies/pip/dev_requirements.in pytest-django==4.8.0 # via -r dependencies/pip/dev_requirements.in pytest-env==1.1.3 # via -r dependencies/pip/dev_requirements.in +pytest-xdist==3.6.1 + # via -r dependencies/pip/dev_requirements.in python-crontab==3.0.0 # via django-celery-beat python-dateutil==2.9.0.post0 From be7505e764fe0d750182e685b5b4e489370b502d Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 8 Aug 2024 16:15:27 -0400 Subject: [PATCH 026/119] Fix unit tests with FileSystemStorage --- .../logger/tests/models/test_attachment.py | 1 + kobo/apps/openrosa/libs/utils/image_tools.py | 3 +- kobo/settings/testing.py | 7 - kpi/tests/api/v1/test_api_submissions.py | 6 +- kpi/tests/api/v2/test_api_attachments.py | 4 +- kpi/tests/api/v2/test_api_submissions.py | 168 ++++++++---------- 6 files changed, 80 insertions(+), 109 deletions(-) diff --git a/kobo/apps/openrosa/apps/logger/tests/models/test_attachment.py b/kobo/apps/openrosa/apps/logger/tests/models/test_attachment.py index 693e49e4e8..3ae291cc20 100644 --- a/kobo/apps/openrosa/apps/logger/tests/models/test_attachment.py +++ b/kobo/apps/openrosa/apps/logger/tests/models/test_attachment.py @@ -12,6 +12,7 @@ default_kobocat_storage as default_storage, ) + class TestAttachment(TestBase): def setUp(self): diff --git a/kobo/apps/openrosa/libs/utils/image_tools.py b/kobo/apps/openrosa/libs/utils/image_tools.py index 61e8684e60..39f83bc35c 100644 --- a/kobo/apps/openrosa/libs/utils/image_tools.py +++ b/kobo/apps/openrosa/libs/utils/image_tools.py @@ -72,11 +72,10 @@ def _save_thumbnails(image, original_path, size, suffix): def resize(filename): image = None - if isinstance(default_storage, FileSystemStorage): path = default_storage.path(filename) image = Image.open(path) - original_path = path + original_path = filename else: path = default_storage.url(filename) original_path = filename diff --git a/kobo/settings/testing.py b/kobo/settings/testing.py index fcd4a2e749..421adcaeb6 100644 --- a/kobo/settings/testing.py +++ b/kobo/settings/testing.py @@ -52,10 +52,3 @@ TEST_USERNAME = 'bob' OPENROSA_DB_ALIAS = DEFAULT_DB_ALIAS - -# STORAGES['default']['BACKEND'] = 'django.core.files.storage.FileSystemStorage' -# KOBOCAT_DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' -# KOBOCAT_MEDIA_ROOT = os.environ.get( -# 'KOBOCAT_MEDIA_ROOT', MEDIA_ROOT.replace('kpi', 'kobocat') -# ) -# PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher'] diff --git a/kpi/tests/api/v1/test_api_submissions.py b/kpi/tests/api/v1/test_api_submissions.py index 4c5ee14ae0..509ed0365b 100644 --- a/kpi/tests/api/v1/test_api_submissions.py +++ b/kpi/tests/api/v1/test_api_submissions.py @@ -90,7 +90,7 @@ def test_list_submissions_as_owner_with_params(self): 'limit': 5, 'sort': '{"q1": -1}', 'fields': '["q1", "_submitted_by"]', - 'query': '{"_submitted_by": {"$in": ["unknown", "someuser", "another"]}}', + 'query': '{"_submitted_by": {"$in": ["unknownuser", "someuser", "anotheruser"]}}', } ) # ToDo add more assertions. E.g. test whether sort, limit, start really work @@ -98,7 +98,7 @@ def test_list_submissions_as_owner_with_params(self): self.assertEqual(response.status_code, status.HTTP_200_OK) def test_delete_submission_as_owner(self): - submission = self.get_random_submission(self.asset.owner) + submission = self.submissions_submitted_by_someuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -116,7 +116,7 @@ def test_delete_submission_as_owner(self): def test_delete_submission_shared_as_anotheruser(self): self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self._log_in_as_another_user() - submission = self.get_random_submission(self.asset.owner) + submission = self.submissions_submitted_by_someuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ diff --git a/kpi/tests/api/v2/test_api_attachments.py b/kpi/tests/api/v2/test_api_attachments.py index 49f83ff3b8..667de9a1ce 100644 --- a/kpi/tests/api/v2/test_api_attachments.py +++ b/kpi/tests/api/v2/test_api_attachments.py @@ -211,7 +211,9 @@ def test_duplicate_attachment_with_submission(self): duplicate_file = response.data # Ensure that the files are the same - assert original_file.read() == duplicate_file.read() + with default_storage.open(str(original_file), 'rb') as of: + with default_storage.open(str(duplicate_file), 'rb') as df: + assert of.read() == df.read() def test_xpath_not_found(self): query_dict = QueryDict('', mutable=True) diff --git a/kpi/tests/api/v2/test_api_submissions.py b/kpi/tests/api/v2/test_api_submissions.py index 98c4abdaf7..c4eb694d2b 100644 --- a/kpi/tests/api/v2/test_api_submissions.py +++ b/kpi/tests/api/v2/test_api_submissions.py @@ -71,8 +71,8 @@ def setUp(self): self.client.login(username='someuser', password='someuser') self.someuser = User.objects.get(username='someuser') self.anotheruser = User.objects.get(username='anotheruser') - self.unknown_user = User.objects.create(username='unknown_user') - UserProfile.objects.create(user=self.unknown_user) + self.unknownuser = User.objects.create(username='unknownuser') + UserProfile.objects.create(user=self.unknownuser) content_source_asset = Asset.objects.get(id=1) self.asset = Asset.objects.create( @@ -91,66 +91,36 @@ def setUp(self): ) self._deployment = self.asset.deployment - def get_random_submission(self, user: settings.AUTH_USER_MODEL) -> dict: - return self.get_random_submissions(user, 1)[0] - - def get_random_submissions( - self, user: settings.AUTH_USER_MODEL, limit: int = 1 - ) -> list: - """ - Get random submissions within all generated submissions. - - If user is not the owner, we only return submissions submitted by them. - It is useful to ensure restricted users fail tests with forbidden - submissions. - """ - query = {} - if self.asset.owner != user: - query = {'_submitted_by': user.username} - - submissions = self.asset.deployment.get_submissions(user, query=query) - random.shuffle(submissions) - return submissions[:limit] - def _add_submissions(self, other_fields: dict = None): letters = string.ascii_letters submissions = [] v_uid = self.asset.latest_deployed_version.uid self.submissions_submitted_by_someuser = [] - self.submissions_submitted_by_unknown = [] + self.submissions_submitted_by_unknownuser = [] self.submissions_submitted_by_anotheruser = [] - submitted_by_choices = ['unknown_user', 'someuser', 'anotheruser'] - for i in range(20): - # We want to have at least one submission from each - if i <= 2: - submitted_by = submitted_by_choices[i] - else: - submitted_by = random.choice(submitted_by_choices) - uuid_ = uuid.uuid4() - submission = { - '__version__': v_uid, - 'q1': ''.join(random.choice(letters) for l in range(10)), - 'q2': ''.join(random.choice(letters) for l in range(10)), - 'meta/instanceID': f'uuid:{uuid_}', - '_uuid': str(uuid_), - '_submitted_by': submitted_by - } - if other_fields is not None: - submission.update(**other_fields) - - if submitted_by == 'someuser': - self.submissions_submitted_by_someuser.append(submission) - - if submitted_by == 'unknown_user': - self.submissions_submitted_by_unknown.append(submission) - - if submitted_by == 'anotheruser': - self.submissions_submitted_by_anotheruser.append(submission) - - submissions.append(submission) + submitted_by_choices = ['unknownuser', 'someuser', 'anotheruser'] + for submitted_by in submitted_by_choices: + for i in range(2): + uuid_ = uuid.uuid4() + submission = { + '__version__': v_uid, + 'q1': ''.join(random.choice(letters) for letter in range(10)), + 'q2': ''.join(random.choice(letters) for letter in range(10)), + 'meta/instanceID': f'uuid:{uuid_}', + '_uuid': str(uuid_), + '_submitted_by': submitted_by + } + if other_fields is not None: + submission.update(**other_fields) + + submissions.append(submission) self.asset.deployment.mock_submissions(submissions) + + self.submissions_submitted_by_unknownuser = submissions[0:2] + self.submissions_submitted_by_someuser = submissions[2:4] + self.submissions_submitted_by_anotheruser = submissions[4:6] self.submissions = submissions def _log_in_as_another_user(self): @@ -304,7 +274,7 @@ def test_delete_all_allowed_submissions_with_partial_perms_as_anotheruser(self): response = self.client.get(self.submission_list_url, {'format': 'json'}) unknown_submission_ids = [ - sub['_id'] for sub in self.submissions_submitted_by_unknown + sub['_id'] for sub in self.submissions_submitted_by_unknownuser ] someuser_submission_ids = [ sub['_id'] for sub in self.submissions_submitted_by_someuser @@ -340,31 +310,32 @@ def test_delete_some_allowed_submissions_with_partial_perms_as_anotheruser(self) ) # Try first submission submitted by unknown - random_submissions = self.get_random_submissions(self.unknown_user, 3) + submissions = self.submissions_submitted_by_unknownuser data = { 'payload': { - 'submission_ids': [rs['_id'] for rs in random_submissions] + 'submission_ids': [submissions[0]['_id']] } } - response = self.client.delete(self.submission_bulk_url, - data=data, - format='json') + response = self.client.delete( + self.submission_bulk_url, data=data, format='json' + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) # Try second submission submitted by anotheruser count = self._deployment.calculated_submission_count(self.anotheruser) - random_submissions = self.get_random_submissions(self.anotheruser, 3) + assert count == 2 + submissions = self.submissions_submitted_by_anotheruser data = { 'payload': { - 'submission_ids': [rs['_id'] for rs in random_submissions], + 'submission_ids': [submissions[0]['_id']], } } - response = self.client.delete(self.submission_bulk_url, - data=data, - format='json') + response = self.client.delete( + self.submission_bulk_url, data=data, format='json' + ) self.assertEqual(response.status_code, status.HTTP_200_OK) response = self.client.get(self.submission_list_url, {'format': 'json'}) - self.assertEqual(response.data['count'], count - len(random_submissions)) + self.assertEqual(response.data['count'], count - 1) def test_cannot_delete_view_only_submissions_with_partial_perms_as_anotheruser(self): """ @@ -473,14 +444,15 @@ def test_list_submissions_as_owner_with_params(self): params """ response = self.client.get( - self.submission_list_url, { + self.submission_list_url, + { 'format': 'json', 'start': 1, 'limit': 5, 'sort': '{"q1": -1}', 'fields': '["q1", "_submitted_by"]', - 'query': '{"_submitted_by": {"$in": ["unknown", "someuser", "another"]}}', - } + 'query': '{"_submitted_by": {"$in": ["unknownuser", "someuser", "anotheruser"]}}', + }, ) # ToDo add more assertions. E.g. test whether sort, limit, start really work self.assertEqual(len(response.data['results']), 5) @@ -702,7 +674,7 @@ def test_retrieve_submission_as_owner(self): someuser is the owner of the project. someuser can view one of their submission. """ - submission = self.get_random_submission(self.asset.owner) + submission = self.submissions_submitted_by_someuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -712,7 +684,7 @@ def test_retrieve_submission_as_owner(self): ) response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data, submission) + self.assertEqual(response.data['_id'], submission['_id']) def test_retrieve_submission_by_uuid(self): """ @@ -739,7 +711,7 @@ def test_retrieve_submission_not_shared_as_anotheruser(self): someuser's data existence should not be revealed. """ self._log_in_as_another_user() - submission = self.get_random_submission(self.unknown_user) + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -757,7 +729,7 @@ def test_retrieve_submission_shared_as_anotheruser(self): """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self._log_in_as_another_user() - submission = self.get_random_submission(self.unknown_user) + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -767,7 +739,7 @@ def test_retrieve_submission_shared_as_anotheruser(self): ) response = self.client.get(url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.data, submission) + self.assertEqual(response.data['_id'], submission['_id']) def test_retrieve_submission_with_partial_permissions_as_anotheruser(self): """ @@ -779,11 +751,14 @@ def test_retrieve_submission_with_partial_permissions_as_anotheruser(self): partial_perms = { PERM_VIEW_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } - self.asset.assign_perm(self.anotheruser, PERM_PARTIAL_SUBMISSIONS, - partial_perms=partial_perms) + self.asset.assign_perm( + self.anotheruser, + PERM_PARTIAL_SUBMISSIONS, + partial_perms=partial_perms, + ) # Try first submission submitted by unknown - submission = self.get_random_submission(self.unknown_user) + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -871,7 +846,8 @@ def test_delete_submission_as_anonymous(self): someuser's data existence should not be revealed. """ self.client.logout() - submission = self.get_random_submission(self.asset.owner) + submission = self.submissions_submitted_by_someuser[0] + url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -891,7 +867,7 @@ def test_delete_submission_not_shared_as_anotheruser(self): someuser's data existence should not be revealed. """ self._log_in_as_another_user() - submission = self.get_random_submission(self.unknown_user) + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -911,7 +887,7 @@ def test_delete_submission_shared_as_anotheruser(self): """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self._log_in_as_another_user() - submission = self.get_random_submission(self.unknown_user) + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -954,7 +930,7 @@ def test_delete_submission_with_partial_perms_as_anotheruser(self): ) # Try first submission submitted by unknown - submission = self.submissions_submitted_by_unknown[0] + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -969,7 +945,7 @@ def test_delete_submission_with_partial_perms_as_anotheruser(self): # Try second submission submitted by anotheruser anotheruser_submission_count = len(self.submissions_submitted_by_anotheruser) - submission = self.get_random_submission(self.anotheruser) + submission = self.submissions_submitted_by_anotheruser[0] url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -1287,7 +1263,7 @@ def test_get_edit_link_with_partial_perms_as_anotheruser(self): ) # Try first submission submitted by unknown - submission = self.get_random_submission(self.unknown_user) + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-enketo-edit'), kwargs={ @@ -1299,7 +1275,7 @@ def test_get_edit_link_with_partial_perms_as_anotheruser(self): self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) # Try second submission submitted by anotheruser - submission = self.get_random_submission(self.anotheruser) + submission = self.submissions_submitted_by_anotheruser[0] url = reverse( self._get_endpoint('submission-enketo-edit'), kwargs={ @@ -1418,7 +1394,7 @@ def test_get_multiple_edit_links_and_attempt_submit_edits(self): # for POSTing to later submission_urls = [] for _ in range(2): - submission = self.get_random_submission(self.asset.owner) + submission = self.submissions_submitted_by_someuser[0] edit_url = reverse( self._get_endpoint('submission-enketo-edit'), kwargs={ @@ -1760,7 +1736,7 @@ def test_get_view_link_with_partial_perms_as_anotheruser(self): ) # Try first submission submitted by unknown - submission = self.submissions_submitted_by_unknown[0] + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-enketo-view'), kwargs={ @@ -1970,7 +1946,7 @@ def test_duplicate_submission_as_anotheruser_with_partial_perms(self): ) # Try first submission submitted by unknown - submission = self.get_random_submission(self.unknown_user) + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-duplicate'), kwargs={ @@ -1982,7 +1958,7 @@ def test_duplicate_submission_as_anotheruser_with_partial_perms(self): self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) # Try second submission submitted by anotheruser - submission = self.get_random_submission(self.anotheruser) + submission = self.submissions_submitted_by_anotheruser[0] url = reverse( self._get_endpoint('submission-duplicate'), kwargs={ @@ -2007,9 +1983,9 @@ def setUp(self): }, ) - random_submissions = self.get_random_submissions(self.asset.owner, 3) + submissions = self.submissions_submitted_by_someuser self.updated_submission_data = { - 'submission_ids': [rs['_id'] for rs in random_submissions], + 'submission_ids': [rs['_id'] for rs in submissions], 'data': { 'q1': 'Updated value', 'q_new': 'A new question and value' @@ -2161,9 +2137,9 @@ def test_bulk_update_submissions_as_anotheruser_with_partial_perms(self): assert response.status_code == status.HTTP_403_FORBIDDEN # Update some of another's submissions - random_submissions = self.get_random_submissions(self.anotheruser, 3) + submissions = self.submissions_submitted_by_anotheruser self.updated_submission_data['submission_ids'] = [ - rs['_id'] for rs in random_submissions + rs['_id'] for rs in submissions ] response = self.client.patch( self.submission_url, data=self.submitted_payload, format='json' @@ -2366,7 +2342,7 @@ def test_edit_status_with_partial_perms_as_anotheruser(self): } # Try first submission submitted by unknown - submission = self.submissions_submitted_by_unknown[0] + submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-validation-status'), kwargs={ @@ -2780,12 +2756,12 @@ def test_edit_some_submission_validation_statuses_with_partial_perms_as_anotheru self.asset.assign_perm(self.anotheruser, PERM_PARTIAL_SUBMISSIONS, partial_perms=partial_perms) - random_submissions = self.get_random_submissions(self.asset.owner, 3) + submissions = self.submissions_submitted_by_someuser data = { 'payload': { 'validation_status.uid': 'validation_status_approved', 'submission_ids': [ - rs['_id'] for rs in random_submissions + rs['_id'] for rs in submissions ] } } @@ -2797,9 +2773,9 @@ def test_edit_some_submission_validation_statuses_with_partial_perms_as_anotheru self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) # Try 2nd submission submitted by anotheruser - random_submissions = self.get_random_submissions(self.anotheruser, 3) + submissions = self.submissions_submitted_by_anotheruser data['payload']['submission_ids'] = [ - rs['_id'] for rs in random_submissions + rs['_id'] for rs in submissions ] response = self.client.patch(self.validation_statuses_url, data=data, From 9c9a300e3acf9a1f63d9ebaf7352a9b80017ec72 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 8 Aug 2024 17:36:39 -0400 Subject: [PATCH 027/119] Remove deprecated methods --- kpi/deployment_backends/base_backend.py | 15 --------------- kpi/deployment_backends/openrosa_backend.py | 21 --------------------- 2 files changed, 36 deletions(-) diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index b26dd84fdf..c848803e7b 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -342,16 +342,6 @@ def get_submission( pass return None - @abc.abstractmethod - def get_submission_detail_url(self, submission_id: int) -> str: - pass - - def get_submission_validation_status_url(self, submission_id: int) -> str: - url = '{detail_url}validation_status/'.format( - detail_url=self.get_submission_detail_url(submission_id) - ) - return url - @abc.abstractmethod def get_submissions( self, @@ -501,11 +491,6 @@ def submission_count_since_date( ): pass - @property - @abc.abstractmethod - def submission_list_url(self): - pass - @property @abc.abstractmethod def submission_model(self): diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index bf4933a52b..53579d5a4b 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -713,18 +713,6 @@ def get_orphan_postgres_submissions(self) -> Optional[QuerySet, bool]: except InvalidXFormException: return None - def get_submission_detail_url(self, submission_id: int) -> str: - 1/0 - #url = f'{self.submission_list_url}/{submission_id}' - #return url - - def get_submission_validation_status_url(self, submission_id: int) -> str: - 1/0 - #url = '{detail_url}/validation_status'.format( - # detail_url=self.get_submission_detail_url(submission_id) - #) - #return url - def get_submissions( self, user: settings.AUTH_USER_MODEL, @@ -1187,15 +1175,6 @@ def submission_count_since_date(self, start_date=None): else: return total_submissions['count_sum'] - @property - def submission_list_url(self): - 1/0 - #url = '{kc_base}/api/v1/data/{formid}'.format( - # kc_base=settings.KOBOCAT_INTERNAL_URL, - # formid=self.backend_response['formid'] - #) - #return url - @property def submission_model(self): return Instance From 8557c38734c8cbe241e4e363e28c1b598cc5102d Mon Sep 17 00:00:00 2001 From: rgraber Date: Wed, 21 Aug 2024 16:12:57 -0400 Subject: [PATCH 028/119] fix: fix failing unit tests --- .../audit_log/tests/test_one_time_auth.py | 27 ++++++++----------- kpi/authentication.py | 2 +- 2 files changed, 12 insertions(+), 17 deletions(-) diff --git a/kobo/apps/audit_log/tests/test_one_time_auth.py b/kobo/apps/audit_log/tests/test_one_time_auth.py index 172f269517..383e870833 100644 --- a/kobo/apps/audit_log/tests/test_one_time_auth.py +++ b/kobo/apps/audit_log/tests/test_one_time_auth.py @@ -47,39 +47,34 @@ def setUp(self): # expected authentication type, method that needs to be mocked, endpoint to hit # (kpi and openrosa endpoints use different auth methods, and we want to test endpoints in both v1 and v2) ( - 'Token', + 'token', 'kpi.authentication.DRFTokenAuthentication.authenticate', 'data-list', ), ( - 'Basic', + 'basic', 'kpi.authentication.DRFBasicAuthentication.authenticate', 'api_v2:audit-log-list', ), ( - 'OAuth2', + 'oauth2', 'kpi.authentication.OPOAuth2Authentication.authenticate', 'data-list', ), ( - 'Https Basic', + 'https basic', 'kobo.apps.openrosa.libs.authentication.BasicAuthentication.authenticate', 'data-list', ), ( - 'Token', + 'token', 'kpi.authentication.DRFTokenAuthentication.authenticate', - 'api_v2:submission-list', + 'api_v2:asset-list', ), ( - 'OAuth2', + 'oauth2', 'kpi.authentication.OPOAuth2Authentication.authenticate', - 'api_v2:submission-list', - ), - ( - 'Https Basic', - 'kobo.apps.openrosa.libs.authentication.BasicAuthentication.authenticate', - 'api_v2:submission-list', + 'api_v2:asset-list', ), ) @unpack @@ -121,11 +116,11 @@ def side_effect(request): return_value=True, side_effect=side_effect, ): - self.client.get(reverse('api_v2:submission-list'), **header) + self.client.get(reverse('api_v2:asset-list'), **header) log_exists = AuditLog.objects.filter( user_uid=TestOneTimeAuthentication.user.extra_details.uid, action=AuditAction.AUTH, - metadata__auth_type='Digest', + metadata__auth_type='digest', ).exists() self.assertTrue(log_exists) self.assertEqual(AuditLog.objects.count(), 1) @@ -154,7 +149,7 @@ def side_effect(request): log_exists = AuditLog.objects.filter( user_uid=TestOneTimeAuthentication.user.extra_details.uid, action=AuditAction.AUTH, - metadata__auth_type='Submission', + metadata__auth_type='submission', ).exists() self.assertTrue(log_exists) self.assertEqual(AuditLog.objects.count(), 1) diff --git a/kpi/authentication.py b/kpi/authentication.py index dfb72003d7..8f358b0566 100644 --- a/kpi/authentication.py +++ b/kpi/authentication.py @@ -170,5 +170,5 @@ def authenticate(self, request): if result is None: return result user, creds = result - self.create_access_log(request, user, 'OAuth2') + self.create_access_log(request, user, 'oauth2') return user, creds From 330e7894764e7da6dd234531aa03eb798b46c5e3 Mon Sep 17 00:00:00 2001 From: rgraber Date: Wed, 21 Aug 2024 16:20:29 -0400 Subject: [PATCH 029/119] fixup!: fix failing unit tests --- kobo/apps/audit_log/tests/test_one_time_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kobo/apps/audit_log/tests/test_one_time_auth.py b/kobo/apps/audit_log/tests/test_one_time_auth.py index 383e870833..d3db488dea 100644 --- a/kobo/apps/audit_log/tests/test_one_time_auth.py +++ b/kobo/apps/audit_log/tests/test_one_time_auth.py @@ -116,7 +116,7 @@ def side_effect(request): return_value=True, side_effect=side_effect, ): - self.client.get(reverse('api_v2:asset-list'), **header) + self.client.get(reverse('data-list'), **header) log_exists = AuditLog.objects.filter( user_uid=TestOneTimeAuthentication.user.extra_details.uid, action=AuditAction.AUTH, From 9cb65ca0917d22237038b2ace1e9253e4dc41e45 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 21 Aug 2024 17:58:26 -0400 Subject: [PATCH 030/119] Fix unit tests --- .../stripe/tests/test_organization_usage.py | 4 +--- kobo/apps/trackers/tests/submission_utils.py | 17 +++++++---------- kpi/tests/test_provision_server.py | 7 +++++++ kpi/tests/utils/xml.py | 2 +- kpi/views/v2/data.py | 3 ++- 5 files changed, 18 insertions(+), 15 deletions(-) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 04ade73c96..9ab4f2fa9b 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -14,13 +14,11 @@ from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.organizations.models import Organization, OrganizationUser -from kobo.apps.stripe.tests.utils import generate_enterprise_subscription, generate_plan_subscription -from kobo.apps.trackers.tests.submission_utils import create_mock_assets, add_mock_submissions from kobo.apps.stripe.tests.utils import ( generate_enterprise_subscription, generate_plan_subscription, ) -from kobo.apps.trackers.submission_utils import ( +from kobo.apps.trackers.tests.submission_utils import ( create_mock_assets, add_mock_submissions, ) diff --git a/kobo/apps/trackers/tests/submission_utils.py b/kobo/apps/trackers/tests/submission_utils.py index 461c89160c..9cc5578448 100644 --- a/kobo/apps/trackers/tests/submission_utils.py +++ b/kobo/apps/trackers/tests/submission_utils.py @@ -71,15 +71,14 @@ def expected_file_size(submissions: int = 1): settings.BASE_DIR + '/kpi/fixtures/attachments/audio_conversion_test_image.jpg' )) * submissions -def add_mock_submissions(assets: list, submissions_per_asset: int = 1, age_days: int = 0): + +def add_mock_submissions( + assets: list, submissions_per_asset: int = 1, age_days: int = 0 +): """ Add one (default) or more submissions to an asset """ - # FIXME - if age_days > 0: - raise Exception('FIXME!!!!') - all_submissions = [] for asset in assets: asset_submissions = [] @@ -106,14 +105,12 @@ def add_mock_submissions(assets: list, submissions_per_asset: int = 1, age_days: ], '_submitted_by': asset.owner.username, } + if age_days > 0: + submission_time = timezone.now() - relativedelta(days=age_days) + submission['_submission_time'] = submission_time.strftime('%Y-%m-%dT%H:%M:%S') asset_submissions.append(submission) asset.deployment.mock_submissions(asset_submissions) all_submissions = all_submissions + asset_submissions -<<<<<<< HEAD:kobo/apps/trackers/tests/submission_utils.py - update_xform_counters(asset, submissions=submissions_per_asset, age_days=age_days) -======= - # update_xform_counters(asset, submissions=submissions_per_asset) ->>>>>>> kobocat-django-app-part-2-refactor-mock-deployment-backend:kobo/apps/trackers/submission_utils.py return all_submissions diff --git a/kpi/tests/test_provision_server.py b/kpi/tests/test_provision_server.py index 00eb1ab46c..c4ec6499e6 100644 --- a/kpi/tests/test_provision_server.py +++ b/kpi/tests/test_provision_server.py @@ -10,6 +10,13 @@ class ProvisionServerCommandTest(TestCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + # Delete any social app that could be added by migration + # `0007_add_providers_from_environment_to_db` + SocialApp.objects.all().delete() + @patch('os.getenv') def test_handle_successful_creation(self, mock_getenv): mock_getenv.return_value = None diff --git a/kpi/tests/utils/xml.py b/kpi/tests/utils/xml.py index d47d535e6d..827193b1f1 100644 --- a/kpi/tests/utils/xml.py +++ b/kpi/tests/utils/xml.py @@ -7,7 +7,7 @@ def get_form_and_submission_tag_names( form: str, submission: str ) -> tuple[str, str]: - submission_root_name = check_lxml_fromstring(submission).tag + submission_root_name = check_lxml_fromstring(submission.encode()).tag tree = etree.ElementTree(check_lxml_fromstring(form)) root = tree.getroot() # We cannot use `root.nsmap` directly because the default namespace key is diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index e5287e396c..2aaf4d000e 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -432,6 +432,7 @@ def duplicate(self, request, pk, *args, **kwargs): """ Creates a duplicate of the submission with a given `pk` """ + deployment = self._get_deployment() # Coerce to int because back end only finds matches with same type submission_id = positive_int(pk) @@ -439,7 +440,7 @@ def duplicate(self, request, pk, *args, **kwargs): submission_id, request.user, fields=['_uuid'] ) duplicate_response = deployment.duplicate_submission( - submission_id=submission_id, user=request.user + submission_id=submission_id, request=request ) deployment.copy_submission_extras( original_submission['_uuid'], duplicate_response['_uuid'] From abcda2be1faa188f3892635d8ae6eb3f4c1d62c5 Mon Sep 17 00:00:00 2001 From: Guillermo Date: Wed, 21 Aug 2024 21:23:27 -0600 Subject: [PATCH 031/119] Refactor ServiceUsageSerializer into a reusable utility to calculate usage numbers --- .../stripe/tests/test_organization_usage.py | 6 +- kpi/serializers/v2/service_usage.py | 164 ++--------- kpi/tests/api/v2/test_api_service_usage.py | 193 +------------ kpi/tests/test_usage_calculator.py | 267 ++++++++++++++++++ kpi/utils/usage_calculator.py | 146 ++++++++++ 5 files changed, 439 insertions(+), 337 deletions(-) create mode 100644 kpi/tests/test_usage_calculator.py create mode 100644 kpi/utils/usage_calculator.py diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 0d234ce8f2..9a5af8a1ea 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -15,13 +15,13 @@ from kobo.apps.organizations.models import Organization, OrganizationUser from kobo.apps.stripe.tests.utils import generate_enterprise_subscription, generate_plan_subscription from kobo.apps.trackers.tests.submission_utils import create_mock_assets, add_mock_submissions -from kpi.tests.api.v2.test_api_service_usage import ServiceUsageAPIBase +from kpi.tests.test_usage_calculator import BaseUsageCalculatorTestCase from kpi.tests.api.v2.test_api_asset_usage import AssetUsageAPITestCase from rest_framework import status -class OrganizationServiceUsageAPIMultiUserTestCase(ServiceUsageAPIBase): +class OrganizationServiceUsageAPIMultiUserTestCase(BaseUsageCalculatorTestCase): """ Test organization service usage when Stripe is enabled. @@ -149,7 +149,7 @@ def test_endpoint_is_cached(self): self.expected_file_size() * self.expected_submissions_multi ) -class OrganizationServiceUsageAPITestCase(ServiceUsageAPIBase): +class OrganizationServiceUsageAPITestCase(BaseUsageCalculatorTestCase): org_id = 'orgAKWMFskafsngf' @classmethod diff --git a/kpi/serializers/v2/service_usage.py b/kpi/serializers/v2/service_usage.py index a3f65971b3..822f8ffe51 100644 --- a/kpi/serializers/v2/service_usage.py +++ b/kpi/serializers/v2/service_usage.py @@ -7,7 +7,10 @@ from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.organizations.models import Organization -from kobo.apps.organizations.utils import get_monthly_billing_dates, get_yearly_billing_dates +from kobo.apps.organizations.utils import ( + get_monthly_billing_dates, + get_yearly_billing_dates, +) from kobo.apps.stripe.constants import ACTIVE_STRIPE_STATUSES from kobo.apps.trackers.models import NLPUsageCounter from kpi.deployment_backends.kc_access.shadow_models import ( @@ -16,6 +19,7 @@ ) from kpi.deployment_backends.kobocat_backend import KobocatDeploymentBackend from kpi.models.asset import Asset +from kpi.utils.usage_calculator import UsageCalculator class AssetUsageSerializer(serializers.HyperlinkedModelSerializer): @@ -117,158 +121,32 @@ class ServiceUsageSerializer(serializers.Serializer): def __init__(self, instance=None, data=empty, **kwargs): super().__init__(instance=instance, data=data, **kwargs) - - self._total_nlp_usage = {} - self._total_storage_bytes = 0 - self._total_submission_count = {} - self._current_month_start = None - self._current_month_end = None - self._current_year_start = None - self._current_year_end = None - self._organization = None - self._now = timezone.now() - self._get_per_asset_usage(instance) + organization = None + organization_id = self.context.get('organization_id', None) + if organization_id: + organization = Organization.objects.filter( + organization_users__user_id=instance.id, + id=organization_id, + ).first() + self.calculator = UsageCalculator(instance, organization) def get_total_nlp_usage(self, user): - return self._total_nlp_usage + return self.calculator.get_nlp_usage_counters() def get_total_submission_count(self, user): - return self._total_submission_count + return self.calculator.get_submission_counters() def get_total_storage_bytes(self, user): - return self._total_storage_bytes + return self.calculator.get_storage_usage() def get_current_month_start(self, user): - return self._current_month_start.isoformat() - + return self.calculator.current_month_start.isoformat() + def get_current_month_end(self, user): - return self._current_month_end.isoformat() + return self.calculator.current_month_end.isoformat() def get_current_year_start(self, user): - return self._current_year_start.isoformat() + return self.calculator.current_year_start.isoformat() def get_current_year_end(self, user): - return self._current_year_end.isoformat() - - def _filter_by_user(self, user_ids: list) -> Q: - """ - Turns a list of user ids into a query object to filter by - """ - return Q(user_id__in=user_ids) - - def _get_nlp_user_counters(self, month_filter, year_filter): - nlp_tracking = NLPUsageCounter.objects.only( - 'date', 'total_asr_seconds', 'total_mt_characters' - ).filter(self._user_id_query).aggregate( - asr_seconds_current_year=Coalesce( - Sum('total_asr_seconds', filter=year_filter), 0 - ), - mt_characters_current_year=Coalesce( - Sum('total_mt_characters', filter=year_filter), 0 - ), - asr_seconds_current_month=Coalesce( - Sum('total_asr_seconds', filter=month_filter), 0 - ), - mt_characters_current_month=Coalesce( - Sum('total_mt_characters', filter=month_filter), 0 - ), - asr_seconds_all_time=Coalesce(Sum('total_asr_seconds'), 0), - mt_characters_all_time=Coalesce(Sum('total_mt_characters'), 0), - ) - - for nlp_key, count in nlp_tracking.items(): - self._total_nlp_usage[nlp_key] = count if count is not None else 0 - - def _get_organization_details(self, user_id: int): - # Get the organization ID from the request - organization_id = self.context.get( - 'organization_id', None - ) - - if not organization_id: - return - - self._organization = Organization.objects.filter( - organization_users__user_id=user_id, - id=organization_id, - ).first() - - if not self._organization: - # Couldn't find organization, proceed as normal - return - - if settings.STRIPE_ENABLED: - # if the user is in an organization and has an enterprise plan, get all org users - # we evaluate this queryset instead of using it as a subquery because it's referencing - # fields from the auth_user tables on kpi *and* kobocat, making getting results in a - # single query not feasible until those tables are combined - user_ids = list( - User.objects.filter( - organizations_organization__id=organization_id, - organizations_organization__djstripe_customers__subscriptions__status__in=ACTIVE_STRIPE_STATUSES, - organizations_organization__djstripe_customers__subscriptions__items__price__product__metadata__has_key='plan_type', - organizations_organization__djstripe_customers__subscriptions__items__price__product__metadata__plan_type='enterprise', - ).values_list('pk', flat=True)[:settings.ORGANIZATION_USER_LIMIT] - ) - if user_ids: - self._user_id_query = self._filter_by_user(user_ids) - - def _get_per_asset_usage(self, user): - self._user_id = user.pk - self._user_id_query = self._filter_by_user([self._user_id]) - # get the billing data and list of organization users (if applicable) - self._get_organization_details(self._user_id) - - self._get_storage_usage() - - self._current_month_start, self._current_month_end = get_monthly_billing_dates(self._organization) - self._current_year_start, self._current_year_end = get_yearly_billing_dates(self._organization) - - current_month_filter = Q( - date__range=[self._current_month_start, self._now] - ) - current_year_filter = Q( - date__range=[self._current_year_start, self._now] - ) - - self._get_submission_counters(current_month_filter, current_year_filter) - self._get_nlp_user_counters(current_month_filter, current_year_filter) - - def _get_storage_usage(self): - """ - Get the storage used by non-(soft-)deleted projects for all users - - Users are represented by their ids with `self._user_ids` - """ - xforms = KobocatXForm.objects.only('attachment_storage_bytes', 'id').exclude( - pending_delete=True - ).filter(self._user_id_query) - - total_storage_bytes = xforms.aggregate( - bytes_sum=Coalesce(Sum('attachment_storage_bytes'), 0), - ) - - self._total_storage_bytes = total_storage_bytes['bytes_sum'] or 0 - - def _get_submission_counters(self, month_filter, year_filter): - """ - Calculate submissions for all users' projects even their deleted ones - - Users are represented by their ids with `self._user_ids` - """ - submission_count = KobocatDailyXFormSubmissionCounter.objects.only( - 'counter', 'user_id' - ).filter(self._user_id_query).aggregate( - all_time=Coalesce(Sum('counter'), 0), - current_year=Coalesce( - Sum('counter', filter=year_filter), 0 - ), - current_month=Coalesce( - Sum('counter', filter=month_filter), 0 - ), - ) - - for submission_key, count in submission_count.items(): - self._total_submission_count[submission_key] = ( - count if count is not None else 0 - ) + return self.calculator.current_year_end.isoformat() diff --git a/kpi/tests/api/v2/test_api_service_usage.py b/kpi/tests/api/v2/test_api_service_usage.py index 63826cd516..af5deb90f8 100644 --- a/kpi/tests/api/v2/test_api_service_usage.py +++ b/kpi/tests/api/v2/test_api_service_usage.py @@ -15,199 +15,10 @@ ) from kobo.apps.trackers.models import NLPUsageCounter from kpi.models import Asset -from kpi.tests.base_test_case import BaseAssetTestCase -from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE +from kpi.tests.test_usage_calculator import BaseUsageCalculatorTestCase -class ServiceUsageAPIBase(BaseAssetTestCase): - """ - This class contains setup logic and utility functions to test submissions/usage - """ - fixtures = ['test_data'] - - URL_NAMESPACE = ROUTER_URL_NAMESPACE - - xform = None - counter = None - attachment_id = 0 - - @classmethod - def setUpTestData(cls): - super().setUpTestData() - cls.anotheruser = User.objects.get(username='anotheruser') - cls.someuser = User.objects.get(username='someuser') - - def setUp(self): - super().setUp() - self.client.login(username='anotheruser', password='anotheruser') - - def _create_asset(self, user=None): - owner = user or self.anotheruser - content_source_asset = { - 'survey': [ - { - 'type': 'audio', - 'label': 'q1', - 'required': 'false', - '$kuid': 'abcd', - }, - { - 'type': 'file', - 'label': 'q2', - 'required': 'false', - '$kuid': 'efgh', - }, - ] - } - self.asset = Asset.objects.create( - content=content_source_asset, - owner=owner, - asset_type='survey', - ) - - self.asset.deploy(backend='mock', active=True) - self.asset.save() - - self.asset.deployment.set_namespace(self.URL_NAMESPACE) - self.submission_list_url = self.asset.deployment.submission_list_url - self._deployment = self.asset.deployment - - def add_nlp_trackers(self): - """ - Add nlp data to an asset - """ - # this month - today = timezone.now().date() - counter_1 = { - 'google_asr_seconds': 4586, - 'google_mt_characters': 5473, - } - NLPUsageCounter.objects.create( - user_id=self.anotheruser.id, - asset_id=self.asset.id, - date=today, - counters=counter_1, - total_asr_seconds=counter_1['google_asr_seconds'], - total_mt_characters=counter_1['google_mt_characters'], - ) - - # last month - last_month = today - relativedelta(months=1) - counter_2 = { - 'google_asr_seconds': 142, - 'google_mt_characters': 1253, - } - NLPUsageCounter.objects.create( - user_id=self.anotheruser.id, - asset_id=self.asset.id, - date=last_month, - counters=counter_2, - total_asr_seconds=counter_2['google_asr_seconds'], - total_mt_characters=counter_2['google_mt_characters'], - ) - - def add_submissions(self, count=2): - """ - Add one or more submissions to an asset (TWO by default) - """ - submissions = [] - v_uid = self.asset.latest_deployed_version.uid - - for x in range(count): - submission = { - '__version__': v_uid, - 'q1': 'audio_conversion_test_clip.3gp', - 'q2': 'audio_conversion_test_image.jpg', - '_uuid': str(uuid.uuid4()), - '_attachments': [ - { - 'id': self.attachment_id, - 'download_url': 'http://testserver/anotheruser/audio_conversion_test_clip.3gp', - 'filename': 'anotheruser/audio_conversion_test_clip.3gp', - 'mimetype': 'video/3gpp', - }, - { - 'id': self.attachment_id + 1, - 'download_url': 'http://testserver/anotheruser/audio_conversion_test_image.jpg', - 'filename': 'anotheruser/audio_conversion_test_image.jpg', - 'mimetype': 'image/jpeg', - }, - ], - '_submitted_by': 'anotheruser', - } - # increment the attachment ID for each attachment created - self.attachment_id = self.attachment_id + 2 - submissions.append(submission) - - self.asset.deployment.mock_submissions(submissions, flush_db=False) - self.update_xform_counters(self.asset, submissions=count) - - def update_xform_counters(self, asset: Asset, submissions: int = 0): - """ - Create/update the daily submission counter and the shadow xform we use to query it - """ - today = timezone.now() - if self.xform: - self.xform.attachment_storage_bytes += ( - self.expected_file_size() * submissions - ) - self.xform.save() - else: - xform_xml = ( - f'' - f'' - f'' - f' XForm test' - f' ' - f' ' - f' <{asset.uid} id="{asset.uid}" />' - f' ' - f' ' - f'' - f'' - f'' - f'' - ) - - self.xform = XForm.objects.create( - attachment_storage_bytes=( - self.expected_file_size() * submissions - ), - kpi_asset_uid=asset.uid, - date_created=today, - date_modified=today, - user_id=asset.owner_id, - xml=xform_xml, - json={} - ) - self.xform.save() - - if self.counter: - self.counter.counter += submissions - self.counter.save() - else: - self.counter = ( - DailyXFormSubmissionCounter.objects.create( - date=today.date(), - counter=submissions, - xform=self.xform, - user_id=asset.owner_id, - ) - ) - self.counter.save() - - def expected_file_size(self): - """ - Calculate the expected combined file size for the test audio clip and image - """ - return os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_clip.3gp' - ) + os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_image.jpg' - ) - - -class ServiceUsageAPITestCase(ServiceUsageAPIBase): +class ServiceUsageAPITestCase(BaseUsageCalculatorTestCase): def test_anonymous_user(self): """ Test that the endpoint is forbidden to anonymous user diff --git a/kpi/tests/test_usage_calculator.py b/kpi/tests/test_usage_calculator.py new file mode 100644 index 0000000000..0cc6236e8e --- /dev/null +++ b/kpi/tests/test_usage_calculator.py @@ -0,0 +1,267 @@ +import os.path +import uuid + +from dateutil.relativedelta import relativedelta +from django.conf import settings +from django.test import override_settings +from django.utils import timezone +from model_bakery import baker + +from kobo.apps.kobo_auth.shortcuts import User +from kobo.apps.openrosa.apps.logger.models import ( + XForm, + DailyXFormSubmissionCounter, +) +from kobo.apps.organizations.models import Organization +from kobo.apps.stripe.tests.utils import generate_enterprise_subscription +from kobo.apps.trackers.models import NLPUsageCounter +from kpi.models import Asset +from kpi.tests.base_test_case import BaseAssetTestCase +from kpi.utils.usage_calculator import UsageCalculator +from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE + + +class BaseUsageCalculatorTestCase(BaseAssetTestCase): + """ + This class contains setup logic and utility functions to test usage + calculations + """ + + fixtures = ['test_data'] + attachment_id = 0 + xform = None + counter = None + + URL_NAMESPACE = ROUTER_URL_NAMESPACE + + def setUp(self): + super().setUp() + self.client.login(username='anotheruser', password='anotheruser') + + @classmethod + def setUpTestData(cls): + super().setUpTestData() + cls.anotheruser = User.objects.get(username='anotheruser') + cls.someuser = User.objects.get(username='someuser') + + def _create_asset(self, user=None): + owner = user or self.anotheruser + content_source_asset = { + 'survey': [ + { + 'type': 'audio', + 'label': 'q1', + 'required': 'false', + '$kuid': 'abcd', + }, + { + 'type': 'file', + 'label': 'q2', + 'required': 'false', + '$kuid': 'efgh', + }, + ] + } + self.asset = Asset.objects.create( + content=content_source_asset, + owner=owner, + asset_type='survey', + ) + + self.asset.deploy(backend='mock', active=True) + self.asset.save() + + self.asset.deployment.set_namespace(self.URL_NAMESPACE) + self.submission_list_url = self.asset.deployment.submission_list_url + self._deployment = self.asset.deployment + + def add_nlp_trackers(self): + """ + Add nlp data to an asset + """ + # this month + today = timezone.now().date() + counter_1 = { + 'google_asr_seconds': 4586, + 'google_mt_characters': 5473, + } + NLPUsageCounter.objects.create( + user_id=self.anotheruser.id, + asset_id=self.asset.id, + date=today, + counters=counter_1, + total_asr_seconds=counter_1['google_asr_seconds'], + total_mt_characters=counter_1['google_mt_characters'], + ) + + # last month + last_month = today - relativedelta(months=1) + counter_2 = { + 'google_asr_seconds': 142, + 'google_mt_characters': 1253, + } + NLPUsageCounter.objects.create( + user_id=self.anotheruser.id, + asset_id=self.asset.id, + date=last_month, + counters=counter_2, + total_asr_seconds=counter_2['google_asr_seconds'], + total_mt_characters=counter_2['google_mt_characters'], + ) + + def add_submissions(self, count=2): + """ + Add one or more submissions to an asset (TWO by default) + """ + submissions = [] + v_uid = self.asset.latest_deployed_version.uid + + for x in range(count): + submission = { + '__version__': v_uid, + 'q1': 'audio_conversion_test_clip.3gp', + 'q2': 'audio_conversion_test_image.jpg', + '_uuid': str(uuid.uuid4()), + '_attachments': [ + { + 'id': self.attachment_id, + 'download_url': 'http://testserver/anotheruser/audio_conversion_test_clip.3gp', + 'filename': 'anotheruser/audio_conversion_test_clip.3gp', + 'mimetype': 'video/3gpp', + }, + { + 'id': self.attachment_id + 1, + 'download_url': 'http://testserver/anotheruser/audio_conversion_test_image.jpg', + 'filename': 'anotheruser/audio_conversion_test_image.jpg', + 'mimetype': 'image/jpeg', + }, + ], + '_submitted_by': 'anotheruser', + } + # increment the attachment ID for each attachment created + self.attachment_id = self.attachment_id + 2 + submissions.append(submission) + + self.asset.deployment.mock_submissions(submissions, flush_db=False) + self.update_xform_counters(self.asset, submissions=count) + + def expected_file_size(self): + """ + Calculate the expected combined file size for the test audio clip and image + """ + return os.path.getsize( + settings.BASE_DIR + '/kpi/tests/audio_conversion_test_clip.3gp' + ) + os.path.getsize( + settings.BASE_DIR + '/kpi/tests/audio_conversion_test_image.jpg' + ) + + def update_xform_counters(self, asset: Asset, submissions: int = 0): + """ + Create/update the daily submission counter and the shadow xform we use to query it + """ + today = timezone.now() + if self.xform: + self.xform.attachment_storage_bytes += ( + self.expected_file_size() * submissions + ) + self.xform.save() + else: + xform_xml = ( + f'' + f'' + f'' + f' XForm test' + f' ' + f' ' + f' <{asset.uid} id="{asset.uid}" />' + f' ' + f' ' + f'' + f'' + f'' + f'' + ) + + self.xform = XForm.objects.create( + attachment_storage_bytes=( + self.expected_file_size() * submissions + ), + kpi_asset_uid=asset.uid, + date_created=today, + date_modified=today, + user_id=asset.owner_id, + xml=xform_xml, + json={}, + ) + self.xform.save() + + if self.counter: + self.counter.counter += submissions + self.counter.save() + else: + self.counter = DailyXFormSubmissionCounter.objects.create( + date=today.date(), + counter=submissions, + xform=self.xform, + user_id=asset.owner_id, + ) + self.counter.save() + + +class UsageCalculatorTestCase(BaseUsageCalculatorTestCase): + def setUp(self): + super().setUp() + self._create_asset() + self.add_nlp_trackers() + self.add_submissions(count=5) + + def test_nlp_usage_counters(self): + calculator = UsageCalculator(self.anotheruser, None) + nlp_usage = calculator.get_nlp_usage_counters() + assert nlp_usage['asr_seconds_current_month'] == 4586 + assert nlp_usage['asr_seconds_all_time'] == 4728 + assert nlp_usage['mt_characters_current_month'] == 5473 + assert nlp_usage['mt_characters_all_time'] == 6726 + + def test_storage_usage(self): + calculator = UsageCalculator(self.anotheruser, None) + assert calculator.get_storage_usage() == 5 * self.expected_file_size() + + def test_submission_counters(self): + calculator = UsageCalculator(self.anotheruser, None) + submission_counters = calculator.get_submission_counters() + assert submission_counters['current_month'] == 5 + assert submission_counters['all_time'] == 5 + + def test_no_data(self): + calculator = UsageCalculator(self.someuser, None) + nlp_usage = calculator.get_nlp_usage_counters() + submission_counters = calculator.get_submission_counters() + + assert nlp_usage['asr_seconds_current_month'] == 0 + assert nlp_usage['asr_seconds_all_time'] == 0 + assert nlp_usage['mt_characters_current_month'] == 0 + assert nlp_usage['mt_characters_all_time'] == 0 + assert calculator.get_storage_usage() == 0 + assert submission_counters['current_month'] == 0 + assert submission_counters['all_time'] == 0 + + @override_settings(STRIPE_ENABLED=True) + def test_organization_setup(self): + organization = baker.make(Organization, id='org_abcd1234') + organization.add_user(user=self.anotheruser, is_admin=True) + organization.add_user(user=self.someuser, is_admin=True) + generate_enterprise_subscription(organization) + + calculator = UsageCalculator(self.someuser, organization) + submission_counters = calculator.get_submission_counters() + assert submission_counters['current_month'] == 5 + assert submission_counters['all_time'] == 5 + + nlp_usage = calculator.get_nlp_usage_counters() + assert nlp_usage['asr_seconds_current_month'] == 4586 + assert nlp_usage['asr_seconds_all_time'] == 4728 + assert nlp_usage['mt_characters_current_month'] == 5473 + assert nlp_usage['mt_characters_all_time'] == 6726 + + assert calculator.get_storage_usage() == 5 * self.expected_file_size() diff --git a/kpi/utils/usage_calculator.py b/kpi/utils/usage_calculator.py new file mode 100644 index 0000000000..1119babbe3 --- /dev/null +++ b/kpi/utils/usage_calculator.py @@ -0,0 +1,146 @@ +from typing import Optional + +from django.conf import settings +from django.db.models import Sum, Q +from django.db.models.functions import Coalesce +from django.utils import timezone + +from kobo.apps.kobo_auth.shortcuts import User +from kobo.apps.organizations.models import Organization +from kobo.apps.organizations.utils import ( + get_monthly_billing_dates, + get_yearly_billing_dates, +) +from kobo.apps.stripe.constants import ACTIVE_STRIPE_STATUSES +from kobo.apps.trackers.models import NLPUsageCounter +from kpi.deployment_backends.kc_access.shadow_models import ( + KobocatXForm, + KobocatDailyXFormSubmissionCounter, +) + + +class UsageCalculator: + def __init__(self, user: User, organization: Optional[Organization]): + self.user = user + self.organization = organization + + self._user_ids = [user.pk] + self._user_id_query = self._filter_by_user([user.pk]) + if organization and settings.STRIPE_ENABLED: + # if the user is in an organization and has an enterprise plan, get all org users + # we evaluate this queryset instead of using it as a subquery because it's referencing + # fields from the auth_user tables on kpi *and* kobocat, making getting results in a + # single query not feasible until those tables are combined + user_ids = list( + User.objects.filter( + organizations_organization__id=organization.id, + organizations_organization__djstripe_customers__subscriptions__status__in=ACTIVE_STRIPE_STATUSES, + organizations_organization__djstripe_customers__subscriptions__items__price__product__metadata__has_key='plan_type', + organizations_organization__djstripe_customers__subscriptions__items__price__product__metadata__plan_type='enterprise', + ).values_list('pk', flat=True)[ + : settings.ORGANIZATION_USER_LIMIT + ] + ) + if user_ids: + self._user_ids = user_ids + self._user_id_query = self._filter_by_user(user_ids) + + now = timezone.now() + self.current_month_start, self.current_month_end = ( + get_monthly_billing_dates(organization) + ) + self.current_year_start, self.current_year_end = ( + get_yearly_billing_dates(organization) + ) + self.current_month_filter = Q( + date__range=[self.current_month_start, now] + ) + self.current_year_filter = Q(date__range=[self.current_year_start, now]) + + def _filter_by_user(self, user_ids: list) -> Q: + """ + Turns a list of user ids into a query object to filter by + """ + return Q(user_id__in=user_ids) + + def get_nlp_usage_counters(self): + nlp_tracking = ( + NLPUsageCounter.objects.only( + 'date', 'total_asr_seconds', 'total_mt_characters' + ) + .filter(self._user_id_query) + .aggregate( + asr_seconds_current_year=Coalesce( + Sum('total_asr_seconds', filter=self.current_year_filter), 0 + ), + mt_characters_current_year=Coalesce( + Sum('total_mt_characters', filter=self.current_year_filter), + 0, + ), + asr_seconds_current_month=Coalesce( + Sum('total_asr_seconds', filter=self.current_month_filter), + 0, + ), + mt_characters_current_month=Coalesce( + Sum( + 'total_mt_characters', filter=self.current_month_filter + ), + 0, + ), + asr_seconds_all_time=Coalesce(Sum('total_asr_seconds'), 0), + mt_characters_all_time=Coalesce(Sum('total_mt_characters'), 0), + ) + ) + + total_nlp_usage = {} + for nlp_key, count in nlp_tracking.items(): + total_nlp_usage[nlp_key] = count if count is not None else 0 + + return total_nlp_usage + + def get_storage_usage(self): + """ + Get the storage used by non-(soft-)deleted projects for all users + + Users are represented by their ids with `self._user_ids` + """ + xforms = ( + KobocatXForm.objects.only('attachment_storage_bytes', 'id') + .exclude(pending_delete=True) + .filter(self._user_id_query) + ) + + total_storage_bytes = xforms.aggregate( + bytes_sum=Coalesce(Sum('attachment_storage_bytes'), 0), + ) + + return total_storage_bytes['bytes_sum'] or 0 + + def get_submission_counters(self): + """ + Calculate submissions for all users' projects even their deleted ones + + Users are represented by their ids with `self._user_ids` + """ + submission_count = ( + KobocatDailyXFormSubmissionCounter.objects.only( + 'counter', 'user_id' + ) + .filter(self._user_id_query) + .aggregate( + all_time=Coalesce(Sum('counter'), 0), + current_year=Coalesce( + Sum('counter', filter=self.current_year_filter), 0 + ), + current_month=Coalesce( + Sum('counter', filter=self.current_month_filter), 0 + ), + ) + ) + total_submission_count = {} + for submission_key, count in submission_count.items(): + total_submission_count[submission_key] = ( + count if count is not None else 0 + ) + + return total_submission_count From 6317be6bcb65172fdc10040ca646bce20884a5a1 Mon Sep 17 00:00:00 2001 From: Guillermo Date: Wed, 21 Aug 2024 21:36:36 -0600 Subject: [PATCH 032/119] Remove unused imports due to refactor --- kpi/serializers/v2/service_usage.py | 35 +++++--------- kpi/tests/api/v2/test_api_service_usage.py | 53 ++++++++-------------- kpi/utils/usage_calculator.py | 2 +- 3 files changed, 33 insertions(+), 57 deletions(-) diff --git a/kpi/serializers/v2/service_usage.py b/kpi/serializers/v2/service_usage.py index 822f8ffe51..93bc1236f8 100644 --- a/kpi/serializers/v2/service_usage.py +++ b/kpi/serializers/v2/service_usage.py @@ -1,22 +1,11 @@ -from django.conf import settings -from django.db.models import Sum, Q -from django.db.models.functions import Coalesce -from django.utils import timezone from rest_framework import serializers from rest_framework.fields import empty -from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.organizations.models import Organization from kobo.apps.organizations.utils import ( get_monthly_billing_dates, get_yearly_billing_dates, ) -from kobo.apps.stripe.constants import ACTIVE_STRIPE_STATUSES -from kobo.apps.trackers.models import NLPUsageCounter -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatXForm, - KobocatDailyXFormSubmissionCounter, -) from kpi.deployment_backends.kobocat_backend import KobocatDeploymentBackend from kpi.models.asset import Asset from kpi.utils.usage_calculator import UsageCalculator @@ -130,23 +119,23 @@ def __init__(self, instance=None, data=empty, **kwargs): ).first() self.calculator = UsageCalculator(instance, organization) - def get_total_nlp_usage(self, user): - return self.calculator.get_nlp_usage_counters() - - def get_total_submission_count(self, user): - return self.calculator.get_submission_counters() - - def get_total_storage_bytes(self, user): - return self.calculator.get_storage_usage() + def get_current_month_end(self, user): + return self.calculator.current_month_end.isoformat() def get_current_month_start(self, user): return self.calculator.current_month_start.isoformat() - def get_current_month_end(self, user): - return self.calculator.current_month_end.isoformat() + def get_current_year_end(self, user): + return self.calculator.current_year_end.isoformat() def get_current_year_start(self, user): return self.calculator.current_year_start.isoformat() - def get_current_year_end(self, user): - return self.calculator.current_year_end.isoformat() + def get_total_nlp_usage(self, user): + return self.calculator.get_nlp_usage_counters() + + def get_total_submission_count(self, user): + return self.calculator.get_submission_counters() + + def get_total_storage_bytes(self, user): + return self.calculator.get_storage_usage() diff --git a/kpi/tests/api/v2/test_api_service_usage.py b/kpi/tests/api/v2/test_api_service_usage.py index af5deb90f8..c00f2adb92 100644 --- a/kpi/tests/api/v2/test_api_service_usage.py +++ b/kpi/tests/api/v2/test_api_service_usage.py @@ -1,19 +1,6 @@ -# coding: utf-8 -import os.path -import uuid - -from dateutil.relativedelta import relativedelta -from django.conf import settings from django.urls import reverse -from django.utils import timezone from rest_framework import status -from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.apps.logger.models import ( - XForm, - DailyXFormSubmissionCounter, -) -from kobo.apps.trackers.models import NLPUsageCounter from kpi.models import Asset from kpi.tests.test_usage_calculator import BaseUsageCalculatorTestCase @@ -78,26 +65,6 @@ def test_multiple_forms(self): self.expected_file_size() * 3 ) - def test_service_usages_with_projects_in_trash_bin(self): - self.test_multiple_forms() - # Simulate trash bin - self.asset.pending_delete = True - self.asset.save( - update_fields=['pending_delete'], - create_version=False, - adjust_content=False, - ) - self.xform.pending_delete = True - self.xform.save(update_fields=['pending_delete']) - - # Retry endpoint - url = reverse(self._get_endpoint('service-usage-list')) - response = self.client.get(url) - - assert response.data['total_submission_count']['current_month'] == 3 - assert response.data['total_submission_count']['all_time'] == 3 - assert response.data['total_storage_bytes'] == 0 - def test_no_data(self): """ Test the endpoint functions when assets have no data @@ -143,3 +110,23 @@ def test_no_deployment(self): assert response.data['total_submission_count']['all_time'] == 0 assert response.data['total_nlp_usage']['asr_seconds_all_time'] == 0 assert response.data['total_storage_bytes'] == 0 + + def test_service_usages_with_projects_in_trash_bin(self): + self.test_multiple_forms() + # Simulate trash bin + self.asset.pending_delete = True + self.asset.save( + update_fields=['pending_delete'], + create_version=False, + adjust_content=False, + ) + self.xform.pending_delete = True + self.xform.save(update_fields=['pending_delete']) + + # Retry endpoint + url = reverse(self._get_endpoint('service-usage-list')) + response = self.client.get(url) + + assert response.data['total_submission_count']['current_month'] == 3 + assert response.data['total_submission_count']['all_time'] == 3 + assert response.data['total_storage_bytes'] == 0 diff --git a/kpi/utils/usage_calculator.py b/kpi/utils/usage_calculator.py index 1119babbe3..e9d8c845b1 100644 --- a/kpi/utils/usage_calculator.py +++ b/kpi/utils/usage_calculator.py @@ -14,8 +14,8 @@ from kobo.apps.stripe.constants import ACTIVE_STRIPE_STATUSES from kobo.apps.trackers.models import NLPUsageCounter from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatXForm, KobocatDailyXFormSubmissionCounter, + KobocatXForm, ) From 32bbcc5b9696e5dacbd9bfd10f791b7b791f0f54 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 22 Aug 2024 09:57:45 -0400 Subject: [PATCH 033/119] Apply requested changes --- kpi/deployment_backends/openrosa_backend.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 4bf50c1909..372e40308e 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -3,7 +3,7 @@ from collections import defaultdict from contextlib import contextmanager from datetime import date, datetime -from typing import Generator, Optional, Union +from typing import Generator, Optional, Union, Literal from urllib.parse import urlparse try: from zoneinfo import ZoneInfo @@ -38,7 +38,10 @@ remove_validation_status_from_instance, set_instance_validation_statuses, ) -from kobo.apps.openrosa.libs.utils.logger_tools import safe_create_instance, publish_xls_form +from kobo.apps.openrosa.libs.utils.logger_tools import ( + safe_create_instance, + publish_xls_form, +) from kobo.apps.subsequences.utils import stream_with_extras from kobo.apps.trackers.models import NLPUsageCounter from kpi.constants import ( @@ -208,7 +211,7 @@ def delete_submission( self, submission_id: int, user: settings.AUTH_USER_MODEL ) -> dict: """ - Delete a submission through KoBoCAT proxy + Delete a submission It returns a dictionary which can used as Response object arguments """ @@ -319,7 +322,8 @@ def duplicate_submission( uuid_formatted ) - safe_create_instance( + # TODO Handle errors returned by safe_create_instance + error, instance = safe_create_instance( username=user.username, xml_file=ContentFile(xml_tostring(xml_parsed)), media_files=attachments, @@ -327,7 +331,7 @@ def duplicate_submission( request=request, ) return self._rewrite_json_attachment_urls( - next(self.get_submissions(user, query={'_uuid': _uuid})), request + next(self.get_submissions(user, submission_id=instance.pk)), request ) def edit_submission( @@ -395,6 +399,7 @@ def edit_submission( media_file for media_file in attachments.values() ) + # TODO Handle errors returned by safe_create_instance safe_create_instance( username=user.username, xml_file=xml_submission_file, @@ -845,7 +850,7 @@ def redeploy(self, active=None): publish_xls_form(xlsx_file, self.asset.owner, self.xform.id_string) - # Do not call save it, asset (and its deployment) is saved right + # Do not call `save_to_db()`, asset (and its deployment) is saved right # after calling this method in `DeployableMixin.deploy()` self.store_data( { @@ -1044,7 +1049,7 @@ def set_validation_status( submission_id: int, user: settings.AUTH_USER_MODEL, data: dict, - method: str, + method: str = Literal['DELETE', 'PATCH'], ) -> dict: """ Update validation status. From b36a130a9396e42090288346b3358b127b23109e Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 22 Aug 2024 11:12:14 -0400 Subject: [PATCH 034/119] Fix bad refactoring --- .../openrosa/apps/main/models/user_profile.py | 74 +++++++++---------- 1 file changed, 33 insertions(+), 41 deletions(-) diff --git a/kobo/apps/openrosa/apps/main/models/user_profile.py b/kobo/apps/openrosa/apps/main/models/user_profile.py index 7460ad11fb..fa0034a825 100644 --- a/kobo/apps/openrosa/apps/main/models/user_profile.py +++ b/kobo/apps/openrosa/apps/main/models/user_profile.py @@ -54,47 +54,39 @@ def to_dict(cls, user_id: int) -> dict: Retrieve all fields from the user's KC profile and return them in a dictionary """ - try: - profile_model = cls.objects.get(user_id=user_id) - # Use a dict instead of the object in case we enter the next exception. - # The response will return a json. - # We want the variable to have the same type in both cases. - except UserProfile.DoesNotExist: - token, _ = Token.objects.get_or_create(user_id=user_id) - profile_model, _ = cls.objects.get_or_create(user_id=user_id) - - profile = profile_model.__dict__ - - fields = [ - # Use a (kc_name, new_name) tuple to rename a field - 'name', - 'organization', - ('home_page', 'organization_website'), - ('description', 'bio'), - ('phonenumber', 'phone_number'), - 'address', - 'city', - 'country', - 'twitter', - 'metadata', - ] - - result = {} - - for field in fields: - - if isinstance(field, tuple): - kc_name, field = field - else: - kc_name = field - - value = profile.get(kc_name) - # When a field contains JSON (e.g. `metadata`), it gets loaded as a - # `dict`. Convert it back to a string representation - if isinstance(value, dict): - value = json.dumps(value) - result[field] = value - return result + profile_model, _ = cls.objects.get_or_create(user_id=user_id) + profile = profile_model.__dict__ + + fields = [ + # Use a (kc_name, new_name) tuple to rename a field + 'name', + 'organization', + ('home_page', 'organization_website'), + ('description', 'bio'), + ('phonenumber', 'phone_number'), + 'address', + 'city', + 'country', + 'twitter', + 'metadata', + ] + + result = {} + + for field in fields: + + if isinstance(field, tuple): + kc_name, field = field + else: + kc_name = field + + value = profile.get(kc_name) + # When a field contains JSON (e.g. `metadata`), it gets loaded as a + # `dict`. Convert it back to a string representation + if isinstance(value, dict): + value = json.dumps(value) + result[field] = value + return result @property def gravatar(self): From 0f7f8b71536d968a951f028c0215c2fe166194f8 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 22 Aug 2024 12:53:59 -0400 Subject: [PATCH 035/119] Fix sync_kobocat_xforms --- .../apps/openrosa/apps/logger/models/xform.py | 13 ++++-- kpi/deployment_backends/openrosa_backend.py | 2 + .../commands/sync_kobocat_xforms.py | 42 +++++++------------ kpi/tasks.py | 1 - 4 files changed, 28 insertions(+), 30 deletions(-) diff --git a/kobo/apps/openrosa/apps/logger/models/xform.py b/kobo/apps/openrosa/apps/logger/models/xform.py index c7fd168a59..c37aae1666 100644 --- a/kobo/apps/openrosa/apps/logger/models/xform.py +++ b/kobo/apps/openrosa/apps/logger/models/xform.py @@ -116,9 +116,6 @@ class Meta: objects = XFormWithoutPendingDeletedManager() all_objects = XFormAllManager() - def file_name(self): - return self.id_string + '.xml' - @property def asset(self): """ @@ -143,6 +140,16 @@ def asset(self): return getattr(self, '_cache_asset') + def file_name(self): + return self.id_string + '.xml' + + @property + def prefixed_hash(self): + """ + Matches what's returned by the KC API + """ + return f'md5:{self.md5_hash}' + def url(self): return reverse( 'download_xform', diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 5ab5b92191..f4a631580a 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -162,6 +162,7 @@ def connect(self, active=False): 'uuid': self._xform.uuid, 'id_string': self._xform.id_string, 'kpi_asset_uid': self.asset.uid, + 'hash': self._xform.prefixed_hash, }, 'version': self.asset.version_id, } @@ -855,6 +856,7 @@ def redeploy(self, active=None): 'uuid': self.xform.uuid, 'id_string': self.xform.id_string, 'kpi_asset_uid': self.asset.uid, + 'hash': self._xform.prefixed_hash, }, 'version': self.asset.version_id, } diff --git a/kpi/management/commands/sync_kobocat_xforms.py b/kpi/management/commands/sync_kobocat_xforms.py index fae1d98374..32e5fd3214 100644 --- a/kpi/management/commands/sync_kobocat_xforms.py +++ b/kpi/management/commands/sync_kobocat_xforms.py @@ -37,7 +37,10 @@ ASSET_CT = ContentType.objects.get_for_model(Asset) FROM_KC_ONLY_PERMISSION = Permission.objects.get( content_type=ASSET_CT, codename=PERM_FROM_KC_ONLY) -XFORM_CT = XForm.get_content_type() +XFORM_CT = ContentType.objects.using(settings.OPENROSA_DB_ALIAS).get( + app_label=XForm._meta.app_label, model=XForm._meta.model_name +) + ANONYMOUS_USER = get_anonymous_user() # Replace codenames with Permission PKs, remembering the codenames permission_map_copy = dict(PERMISSIONS_MAP) @@ -176,25 +179,14 @@ def _xform_to_asset_content(xform): return asset_content -def _get_kc_backend_response(xform): - # FIXME wrong backend info - # Get the form data from KC - user = xform.user - response = _kc_forms_api_request(user.auth_token, xform.pk) - if response.status_code == 404: - raise SyncKCXFormsWarning([ - user.username, - xform.id_string, - 'unable to load form data ({})'.format(response.status_code) - ]) - elif response.status_code != 200: - raise SyncKCXFormsError([ - user.username, - xform.id_string, - 'unable to load form data ({})'.format(response.status_code) - ]) - backend_response = response.json() - return backend_response +def _get_backend_response(xform): + return { + 'formid': xform.pk, + 'uuid': xform.uuid, + 'id_string': xform.id_string, + 'kpi_asset_uid': xform.asset.uid, + 'hash': xform.prefixed_hash, + } def _sync_form_content(asset, xform, changes): @@ -244,7 +236,7 @@ def _sync_form_content(asset, xform, changes): # It's important to update `deployment_data` with the new hash from KC; # otherwise, we'll be re-syncing the same content forever (issue #1302) asset.deployment.store_data( - {'backend_response': _get_kc_backend_response(xform)} + {'backend_response': _get_backend_response(xform)} ) return modified @@ -264,7 +256,7 @@ def _sync_form_metadata(asset, xform, changes): backend_deployment.store_data({ 'backend': 'openrosa', 'active': xform.downloadable, - 'backend_response': _get_kc_backend_response(xform), + 'backend_response': _get_backend_response(xform), 'version': asset.version_id }) changes.append('CREATE METADATA') @@ -278,10 +270,8 @@ def _sync_form_metadata(asset, xform, changes): modified = False fetch_backend_response = False - backend_response = asset.deployment.backend_response - if (asset.deployment.active != xform.downloadable or - backend_response['downloadable'] != xform.downloadable): + if asset.deployment.active != xform.downloadable: asset.deployment.store_data({'active': xform.downloadable}) modified = True fetch_backend_response = True @@ -299,7 +289,7 @@ def _sync_form_metadata(asset, xform, changes): if fetch_backend_response: asset.deployment.store_data({ - 'backend_response': _get_kc_backend_response(xform) + 'backend_response': _get_backend_response(xform) }) modified = True diff --git a/kpi/tasks.py b/kpi/tasks.py index 28b5d19697..39fd6f53b3 100644 --- a/kpi/tasks.py +++ b/kpi/tasks.py @@ -105,7 +105,6 @@ def enketo_flush_cached_preview(server_url, form_id): response.raise_for_status() - @celery_app.task(time_limit=LIMIT_HOURS_23, soft_time_limit=LIMIT_HOURS_23) def perform_maintenance(): """ From a711471e557c0207a55f875a81ecae9fb0ce8358 Mon Sep 17 00:00:00 2001 From: Guillermo Date: Thu, 22 Aug 2024 11:57:42 -0600 Subject: [PATCH 036/119] Change name to specify service usage in the class name Signed-off-by: Guillermo Signed-off-by: Guillermo --- .../apps/stripe/tests/test_organization_usage.py | 6 +++--- kpi/serializers/v2/service_usage.py | 4 ++-- kpi/tests/api/v2/test_api_service_usage.py | 4 ++-- kpi/tests/test_usage_calculator.py | 16 ++++++++-------- kpi/utils/usage_calculator.py | 2 +- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 9a5af8a1ea..89d8ff5ba6 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -15,13 +15,13 @@ from kobo.apps.organizations.models import Organization, OrganizationUser from kobo.apps.stripe.tests.utils import generate_enterprise_subscription, generate_plan_subscription from kobo.apps.trackers.tests.submission_utils import create_mock_assets, add_mock_submissions -from kpi.tests.test_usage_calculator import BaseUsageCalculatorTestCase +from kpi.tests.test_usage_calculator import ServiceUsageBaseTestCase from kpi.tests.api.v2.test_api_asset_usage import AssetUsageAPITestCase from rest_framework import status -class OrganizationServiceUsageAPIMultiUserTestCase(BaseUsageCalculatorTestCase): +class OrganizationServiceUsageAPIMultiUserTestCase(ServiceUsageBaseTestCase): """ Test organization service usage when Stripe is enabled. @@ -149,7 +149,7 @@ def test_endpoint_is_cached(self): self.expected_file_size() * self.expected_submissions_multi ) -class OrganizationServiceUsageAPITestCase(BaseUsageCalculatorTestCase): +class OrganizationServiceUsageAPITestCase(ServiceUsageBaseTestCase): org_id = 'orgAKWMFskafsngf' @classmethod diff --git a/kpi/serializers/v2/service_usage.py b/kpi/serializers/v2/service_usage.py index 93bc1236f8..99d022ac44 100644 --- a/kpi/serializers/v2/service_usage.py +++ b/kpi/serializers/v2/service_usage.py @@ -8,7 +8,7 @@ ) from kpi.deployment_backends.kobocat_backend import KobocatDeploymentBackend from kpi.models.asset import Asset -from kpi.utils.usage_calculator import UsageCalculator +from kpi.utils.usage_calculator import ServiceUsageCalculator class AssetUsageSerializer(serializers.HyperlinkedModelSerializer): @@ -117,7 +117,7 @@ def __init__(self, instance=None, data=empty, **kwargs): organization_users__user_id=instance.id, id=organization_id, ).first() - self.calculator = UsageCalculator(instance, organization) + self.calculator = ServiceUsageCalculator(instance, organization) def get_current_month_end(self, user): return self.calculator.current_month_end.isoformat() diff --git a/kpi/tests/api/v2/test_api_service_usage.py b/kpi/tests/api/v2/test_api_service_usage.py index c00f2adb92..37ac4c54a2 100644 --- a/kpi/tests/api/v2/test_api_service_usage.py +++ b/kpi/tests/api/v2/test_api_service_usage.py @@ -2,10 +2,10 @@ from rest_framework import status from kpi.models import Asset -from kpi.tests.test_usage_calculator import BaseUsageCalculatorTestCase +from kpi.tests.test_usage_calculator import BaseServiceUsageTestCase -class ServiceUsageAPITestCase(BaseUsageCalculatorTestCase): +class ServiceUsageAPITestCase(BaseServiceUsageTestCase): def test_anonymous_user(self): """ Test that the endpoint is forbidden to anonymous user diff --git a/kpi/tests/test_usage_calculator.py b/kpi/tests/test_usage_calculator.py index 0cc6236e8e..49df95ec71 100644 --- a/kpi/tests/test_usage_calculator.py +++ b/kpi/tests/test_usage_calculator.py @@ -17,11 +17,11 @@ from kobo.apps.trackers.models import NLPUsageCounter from kpi.models import Asset from kpi.tests.base_test_case import BaseAssetTestCase -from kpi.utils.usage_calculator import UsageCalculator +from kpi.utils.usage_calculator import ServiceUsageCalculator from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE -class BaseUsageCalculatorTestCase(BaseAssetTestCase): +class BaseServiceUsageTestCase(BaseAssetTestCase): """ This class contains setup logic and utility functions to test usage calculations @@ -208,7 +208,7 @@ def update_xform_counters(self, asset: Asset, submissions: int = 0): self.counter.save() -class UsageCalculatorTestCase(BaseUsageCalculatorTestCase): +class ServiceUsageCalculatorTestCase(BaseServiceUsageTestCase): def setUp(self): super().setUp() self._create_asset() @@ -216,7 +216,7 @@ def setUp(self): self.add_submissions(count=5) def test_nlp_usage_counters(self): - calculator = UsageCalculator(self.anotheruser, None) + calculator = ServiceUsageCalculator(self.anotheruser, None) nlp_usage = calculator.get_nlp_usage_counters() assert nlp_usage['asr_seconds_current_month'] == 4586 assert nlp_usage['asr_seconds_all_time'] == 4728 @@ -224,17 +224,17 @@ def test_nlp_usage_counters(self): assert nlp_usage['mt_characters_all_time'] == 6726 def test_storage_usage(self): - calculator = UsageCalculator(self.anotheruser, None) + calculator = ServiceUsageCalculator(self.anotheruser, None) assert calculator.get_storage_usage() == 5 * self.expected_file_size() def test_submission_counters(self): - calculator = UsageCalculator(self.anotheruser, None) + calculator = ServiceUsageCalculator(self.anotheruser, None) submission_counters = calculator.get_submission_counters() assert submission_counters['current_month'] == 5 assert submission_counters['all_time'] == 5 def test_no_data(self): - calculator = UsageCalculator(self.someuser, None) + calculator = ServiceUsageCalculator(self.someuser, None) nlp_usage = calculator.get_nlp_usage_counters() submission_counters = calculator.get_submission_counters() @@ -253,7 +253,7 @@ def test_organization_setup(self): organization.add_user(user=self.someuser, is_admin=True) generate_enterprise_subscription(organization) - calculator = UsageCalculator(self.someuser, organization) + calculator = ServiceUsageCalculator(self.someuser, organization) submission_counters = calculator.get_submission_counters() assert submission_counters['current_month'] == 5 assert submission_counters['all_time'] == 5 diff --git a/kpi/utils/usage_calculator.py b/kpi/utils/usage_calculator.py index e9d8c845b1..481168ede9 100644 --- a/kpi/utils/usage_calculator.py +++ b/kpi/utils/usage_calculator.py @@ -19,7 +19,7 @@ ) -class UsageCalculator: +class ServiceUsageCalculator: def __init__(self, user: User, organization: Optional[Organization]): self.user = user self.organization = organization From 9e7a4c9a47d94d7c199082f52b5a8c3a24a80d1a Mon Sep 17 00:00:00 2001 From: Guillermo Date: Thu, 22 Aug 2024 11:57:57 -0600 Subject: [PATCH 037/119] Fix broken test related to service usage calculations --- .../tests/api/v2/test_api.py | 10 ++++----- kobo/apps/project_ownership/tests/utils.py | 21 +++++++++++-------- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/kobo/apps/project_ownership/tests/api/v2/test_api.py b/kobo/apps/project_ownership/tests/api/v2/test_api.py index 9b600815f7..526270aa3b 100644 --- a/kobo/apps/project_ownership/tests/api/v2/test_api.py +++ b/kobo/apps/project_ownership/tests/api/v2/test_api.py @@ -16,7 +16,7 @@ InviteStatusChoices, Transfer, ) -from kobo.apps.project_ownership.tests.utils import MockServiceUsageSerializer +from kobo.apps.project_ownership.tests.utils import MockServiceUsageCalculator from kobo.apps.trackers.utils import update_nlp_counter from kpi.constants import PERM_VIEW_ASSET @@ -357,12 +357,12 @@ def __add_submissions(self): self.submissions = submissions @patch( - 'kpi.serializers.v2.service_usage.ServiceUsageSerializer._get_storage_usage', - new=MockServiceUsageSerializer._get_storage_usage + 'kpi.utils.usage_calculator.ServiceUsageCalculator.get_storage_usage', + new=MockServiceUsageCalculator.get_storage_usage ) @patch( - 'kpi.serializers.v2.service_usage.ServiceUsageSerializer._get_submission_counters', - new=MockServiceUsageSerializer._get_submission_counters + 'kpi.utils.usage_calculator.ServiceUsageCalculator.get_submission_counters', + new=MockServiceUsageCalculator.get_submission_counters ) @patch( 'kobo.apps.project_ownership.models.transfer.reset_kc_permissions', diff --git a/kobo/apps/project_ownership/tests/utils.py b/kobo/apps/project_ownership/tests/utils.py index fe46f81c9e..602f7e0775 100644 --- a/kobo/apps/project_ownership/tests/utils.py +++ b/kobo/apps/project_ownership/tests/utils.py @@ -2,24 +2,25 @@ from kpi.models.asset import Asset -class MockServiceUsageSerializer: +class MockServiceUsageCalculator: - def _get_storage_usage(self): + def get_storage_usage(self): assets = Asset.objects.annotate(user_id=F('owner_id')).filter( self._user_id_query ) - self._total_storage_bytes = 0 + total_storage_bytes = 0 for asset in assets: if asset.has_deployment: for submission in asset.deployment.get_submissions(asset.owner): - self._total_storage_bytes += sum( + total_storage_bytes += sum( [att['bytes'] for att in submission['_attachments']] ) + return total_storage_bytes - def _get_submission_counters(self, month_filter, year_filter): - self._total_submission_count = { + def get_submission_counters(self): + total_submission_count = { 'all_time': 0, 'current_year': 0, 'current_month': 0, @@ -30,6 +31,8 @@ def _get_submission_counters(self, month_filter, year_filter): for asset in assets: if asset.has_deployment: submissions = asset.deployment.get_submissions(asset.owner) - self._total_submission_count['all_time'] += len(submissions) - self._total_submission_count['current_year'] += len(submissions) - self._total_submission_count['current_month'] += len(submissions) + total_submission_count['all_time'] += len(submissions) + total_submission_count['current_year'] += len(submissions) + total_submission_count['current_month'] += len(submissions) + + return total_submission_count From 22ff2b70a82faba2d1fd741ffd4870d11a1f678b Mon Sep 17 00:00:00 2001 From: Guillermo Date: Thu, 22 Aug 2024 12:11:42 -0600 Subject: [PATCH 038/119] Fix broken test due to change in class name --- kobo/apps/stripe/tests/test_organization_usage.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 89d8ff5ba6..16da3d55f5 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -15,13 +15,13 @@ from kobo.apps.organizations.models import Organization, OrganizationUser from kobo.apps.stripe.tests.utils import generate_enterprise_subscription, generate_plan_subscription from kobo.apps.trackers.tests.submission_utils import create_mock_assets, add_mock_submissions -from kpi.tests.test_usage_calculator import ServiceUsageBaseTestCase +from kpi.tests.test_usage_calculator import BaseServiceUsageTestCase from kpi.tests.api.v2.test_api_asset_usage import AssetUsageAPITestCase from rest_framework import status -class OrganizationServiceUsageAPIMultiUserTestCase(ServiceUsageBaseTestCase): +class OrganizationServiceUsageAPIMultiUserTestCase(BaseServiceUsageTestCase): """ Test organization service usage when Stripe is enabled. @@ -149,7 +149,7 @@ def test_endpoint_is_cached(self): self.expected_file_size() * self.expected_submissions_multi ) -class OrganizationServiceUsageAPITestCase(ServiceUsageBaseTestCase): +class OrganizationServiceUsageAPITestCase(BaseServiceUsageTestCase): org_id = 'orgAKWMFskafsngf' @classmethod From 735817db43f77e4287eef90b6465711052b6bea7 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 22 Aug 2024 14:27:31 -0400 Subject: [PATCH 039/119] Improve error message when XForm does not exist --- kpi/deployment_backends/openrosa_backend.py | 32 +++++++++++++-------- kpi/exceptions.py | 9 +++++- 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 53579d5a4b..9a5cb29721 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -58,6 +58,7 @@ AttachmentNotFoundException, InvalidXFormException, InvalidXPathException, + MissingXFormException, SubmissionIntegrityError, SubmissionNotFoundException, XPathNotFoundException, @@ -100,7 +101,7 @@ def __init__(self, asset): def attachment_storage_bytes(self): try: return self.xform.attachment_storage_bytes - except InvalidXFormException: + except (InvalidXFormException, MissingXFormException): return 0 def bulk_assign_mapped_perms(self): @@ -200,8 +201,8 @@ def delete(self): WARNING! Deletes all submitted data! """ try: - self._xform.delete() - except XForm.DoesNotExist: + self.xform.delete() + except (MissingXFormException, InvalidXFormException): pass super().delete() @@ -263,7 +264,14 @@ def delete_submissions( data['submission_ids'] = submission_ids # TODO handle errors - deleted_count = delete_instances(self.xform, data) + try: + deleted_count = delete_instances(self.xform, data) + except (MissingXFormException, InvalidXFormException): + return { + 'data': {'detail': f'Could not delete submissions'}, + 'content_type': 'application/json', + 'status': status.HTTP_400_BAD_REQUEST, + } return { 'data': {'detail': f'{deleted_count} submissions have been deleted'}, @@ -710,7 +718,7 @@ def get_orphan_postgres_submissions(self) -> Optional[QuerySet, bool]: try: return Instance.objects.filter(xform_id=self.xform_id) - except InvalidXFormException: + except (InvalidXFormException, MissingXFormException): return None def get_submissions( @@ -1148,13 +1156,13 @@ def store_submission( def submission_count(self): try: return self.xform.num_of_submissions - except InvalidXFormException: + except (InvalidXFormException, MissingXFormException): return 0 def submission_count_since_date(self, start_date=None): try: xform_id = self.xform_id - except InvalidXFormException: + except (InvalidXFormException, MissingXFormException): return 0 today = timezone.now().date() @@ -1288,14 +1296,14 @@ def xform(self): .first() ) + if not xform: + raise MissingXFormException + if not ( - xform - and xform.user.username == self.asset.owner.username + xform.user.username == self.asset.owner.username and xform.id_string == self.xform_id_string ): - raise InvalidXFormException( - 'Deployment links to an unexpected KoboCAT XForm' - ) + raise InvalidXFormException self._xform = xform return self._xform diff --git a/kpi/exceptions.py b/kpi/exceptions.py index 924e8fe733..fa17212107 100644 --- a/kpi/exceptions.py +++ b/kpi/exceptions.py @@ -91,7 +91,10 @@ class InvalidSearchException(exceptions.APIException): class InvalidXFormException(Exception): - pass + def __init__( + self, message=t('Deployment links to an unexpected KoboCAT XForm') + ): + super().__init__(message) class InvalidXPathException(Exception): @@ -134,6 +137,10 @@ class KobocatProfileException(Exception): pass +class MissingXFormException(Exception): + pass + + class NotSupportedFormatException(Exception): pass From 425d6247b303ba7f50e337af04d60e91e66618e8 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 22 Aug 2024 15:27:49 -0400 Subject: [PATCH 040/119] Refactor delete action and bulk update --- kobo/apps/trash_bin/utils.py | 31 ++-- kpi/deployment_backends/openrosa_backend.py | 28 +--- kpi/views/v2/data.py | 157 +++++++++++++------- 3 files changed, 116 insertions(+), 100 deletions(-) diff --git a/kobo/apps/trash_bin/utils.py b/kobo/apps/trash_bin/utils.py index c5b7d22487..fdd21111a4 100644 --- a/kobo/apps/trash_bin/utils.py +++ b/kobo/apps/trash_bin/utils.py @@ -15,10 +15,12 @@ PeriodicTask, PeriodicTasks, ) -from rest_framework import status from kobo.apps.audit_log.models import AuditLog, AuditAction -from kpi.exceptions import KobocatCommunicationError +from kpi.exceptions import ( + InvalidXFormException, + MissingXFormException, +) from kpi.models import Asset, ExportTask, ImportTask from kpi.utils.mongo_helper import MongoHelper from kpi.utils.storage import rmdir @@ -28,7 +30,6 @@ TrashNotImplementedError, TrashMongoDeleteOrphansError, TrashTaskInProgressError, - TrashUnknownKobocatError, ) from .models import TrashStatus from .models.account import AccountTrash @@ -323,24 +324,16 @@ def _delete_submissions(request_author: settings.AUTH_USER_MODEL, asset: 'kpi.As )) submission_ids.append(submission['_id']) - json_response = asset.deployment.delete_submissions( - {'submission_ids': submission_ids, 'query': ''}, request_author - ) - - if json_response['status'] in [ - status.HTTP_502_BAD_GATEWAY, - status.HTTP_504_GATEWAY_TIMEOUT, - ]: - raise KobocatCommunicationError - - if json_response['status'] not in [ - status.HTTP_404_NOT_FOUND, - status.HTTP_200_OK, - ]: - raise TrashUnknownKobocatError(response=json_response) + try: + deleted = asset.deployment.delete_submissions( + {'submission_ids': submission_ids, 'query': ''}, request_author + ) + except (MissingXFormException, InvalidXFormException): + # XForm is invalid or gone + deleted = 0 if audit_logs: - if json_response['status'] == status.HTTP_404_NOT_FOUND: + if not deleted: # Submissions are lingering in MongoDB but XForm has been # already deleted if not MongoHelper.delete( diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 9a5cb29721..38241fb83c 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -223,17 +223,7 @@ def delete_submission( ) count, _ = Instance.objects.filter(pk=submission_id).delete() - if not count: - return { - 'data': {'detail': 'Submission not found'}, - 'content_type': 'application/json', - 'status': status.HTTP_404_NOT_FOUND, - } - - return { - 'content_type': 'application/json', - 'status': status.HTTP_204_NO_CONTENT, - } + return count def delete_submissions( self, data: dict, user: settings.AUTH_USER_MODEL, **kwargs @@ -263,21 +253,7 @@ def delete_submissions( data.pop('query', None) data['submission_ids'] = submission_ids - # TODO handle errors - try: - deleted_count = delete_instances(self.xform, data) - except (MissingXFormException, InvalidXFormException): - return { - 'data': {'detail': f'Could not delete submissions'}, - 'content_type': 'application/json', - 'status': status.HTTP_400_BAD_REQUEST, - } - - return { - 'data': {'detail': f'{deleted_count} submissions have been deleted'}, - 'content_type': 'application/json', - 'status': status.HTTP_200_OK, - } + return delete_instances(self.xform, data) def duplicate_submission( self, submission_id: int, request: 'rest_framework.request.Request', diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 6ab9300769..c6339194df 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -31,7 +31,11 @@ PERM_VALIDATE_SUBMISSIONS, PERM_VIEW_SUBMISSIONS, ) -from kpi.exceptions import ObjectDeploymentDoesNotExist +from kpi.exceptions import ( + InvalidXFormException, + MissingXFormException, + ObjectDeploymentDoesNotExist, +) from kpi.models import Asset from kpi.paginators import DataPagination from kpi.permissions import ( @@ -328,58 +332,12 @@ def _get_deployment(self): @action(detail=False, methods=['PATCH', 'DELETE'], renderer_classes=[renderers.JSONRenderer]) def bulk(self, request, *args, **kwargs): - deployment = self._get_deployment() - kwargs = { - 'data': request.data, - 'context': self.get_serializer_context(), - } if request.method == 'DELETE': - action_ = deployment.delete_submissions - kwargs['perm'] = PERM_DELETE_SUBMISSIONS + response = self._bulk_delete(request) elif request.method == 'PATCH': - action_ = deployment.bulk_update_submissions - kwargs['perm'] = PERM_CHANGE_SUBMISSIONS - - bulk_actions_validator = DataBulkActionsValidator(**kwargs) - bulk_actions_validator.is_valid(raise_exception=True) - audit_logs = [] - if request.method == 'DELETE': - # Prepare audit logs - data = copy.deepcopy(bulk_actions_validator.data) - # Retrieve all submissions matching `submission_ids` or `query`. - # If user is not allowed to see some of the submissions (i.e.: user - # with partial permissions), the request will be rejected - # (aka `PermissionDenied`) before AuditLog objects are saved in DB. - submissions = deployment.get_submissions( - user=request.user, - submission_ids=data['submission_ids'], - query=data['query'], - fields=['_id', '_uuid'] - ) - for submission in submissions: - audit_logs.append(AuditLog( - app_label='logger', - model_name='instance', - object_id=submission['_id'], - user=request.user, - user_uid=request.user.extra_details.uid, - metadata={ - 'asset_uid': self.asset.uid, - 'uuid': submission['_uuid'], - }, - action=AuditAction.DELETE, - )) - - # Send request to KC - json_response = action_( - bulk_actions_validator.data, request.user, request=request - ) - - # If requests has succeeded, let's log deletions (if any) - if json_response['status'] == status.HTTP_200_OK and audit_logs: - AuditLog.objects.bulk_create(audit_logs) + response = self._bulk_update(request) - return Response(**json_response) + return Response(**response) def destroy(self, request, pk, *args, **kwargs): deployment = self._get_deployment() @@ -393,10 +351,9 @@ def destroy(self, request, pk, *args, **kwargs): fields=['_id', '_uuid'] ) - json_response = deployment.delete_submission( + if deployment.delete_submission( submission_id, user=request.user - ) - if json_response['status'] == status.HTTP_204_NO_CONTENT: + ): AuditLog.objects.create( app_label='logger', model_name='instance', @@ -408,8 +365,17 @@ def destroy(self, request, pk, *args, **kwargs): }, action=AuditAction.DELETE, ) - - return Response(**json_response) + response = { + 'content_type': 'application/json', + 'status': status.HTTP_204_NO_CONTENT, + } + else: + response = { + 'data': {'detail': 'Submission not found'}, + 'content_type': 'application/json', + 'status': status.HTTP_404_NOT_FOUND, + } + return Response(**response) @action( detail=True, @@ -590,6 +556,87 @@ def validation_statuses(self, request, *args, **kwargs): return Response(**json_response) + def _bulk_delete(self, request: Request) -> dict: + deployment = self._get_deployment() + serializer_params = { + 'data': request.data, + 'context': self.get_serializer_context(), + 'perm': PERM_DELETE_SUBMISSIONS + } + bulk_actions_validator = DataBulkActionsValidator(**serializer_params) + bulk_actions_validator.is_valid(raise_exception=True) + + # Prepare audit logs + data = copy.deepcopy(bulk_actions_validator.data) + # Retrieve all submissions matching `submission_ids` or `query`. + # If user is not allowed to see some of the submissions (i.e.: user + # with partial permissions), the request will be rejected + # (aka `PermissionDenied`) before AuditLog objects are saved in DB. + submissions = deployment.get_submissions( + user=request.user, + submission_ids=data['submission_ids'], + query=data['query'], + fields=['_id', '_uuid'] + ) + + # Prepare logs before deleting all submissions. + audit_logs = [] + for submission in submissions: + audit_logs.append(AuditLog( + app_label='logger', + model_name='instance', + object_id=submission['_id'], + user=request.user, + user_uid=request.user.extra_details.uid, + metadata={ + 'asset_uid': self.asset.uid, + 'uuid': submission['_uuid'], + }, + action=AuditAction.DELETE, + )) + + try: + deleted = deployment.delete_submissions( + bulk_actions_validator.data, request.user, request=request + ) + except (MissingXFormException, InvalidXFormException): + return { + 'data': {'detail': 'Could not delete submissions'}, + 'content_type': 'application/json', + 'status': status.HTTP_400_BAD_REQUEST, + } + + # If requests has succeeded, let's log deletions (if any) + if audit_logs and deleted: + AuditLog.objects.bulk_create(audit_logs) + + return { + 'data': {'detail': f'{deleted} submissions have been deleted'}, + 'content_type': 'application/json', + 'status': status.HTTP_200_OK, + } + + def _bulk_update(self, request: Request) -> dict: + deployment = self._get_deployment() + serializer_params = { + 'data': request.data, + 'context': self.get_serializer_context(), + 'perm': PERM_CHANGE_SUBMISSIONS, + } + bulk_actions_validator = DataBulkActionsValidator(**serializer_params) + bulk_actions_validator.is_valid(raise_exception=True) + + try: + return deployment.bulk_update_submissions( + bulk_actions_validator.data, request.user, request=request + ) + except (MissingXFormException, InvalidXFormException): + return { + 'data': {'detail': f'Could not updated submissions'}, + 'content_type': 'application/json', + 'status': status.HTTP_400_BAD_REQUEST, + } + def _filter_mongo_query(self, request): """ Build filters to pass to Mongo query. From 821686f5fe9195cae437b5fffc7346f084b28629 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 22 Aug 2024 18:03:50 -0400 Subject: [PATCH 041/119] Use contextmanager to handle create_instance errors --- kobo/apps/openrosa/libs/utils/logger_tools.py | 98 ++++++++++++------- kpi/deployment_backends/openrosa_backend.py | 24 ++--- kpi/views/v2/asset_snapshot.py | 38 +++---- kpi/views/v2/data.py | 30 ++++-- 4 files changed, 113 insertions(+), 77 deletions(-) diff --git a/kobo/apps/openrosa/libs/utils/logger_tools.py b/kobo/apps/openrosa/libs/utils/logger_tools.py index 81decfc462..55270b47df 100644 --- a/kobo/apps/openrosa/libs/utils/logger_tools.py +++ b/kobo/apps/openrosa/libs/utils/logger_tools.py @@ -6,6 +6,7 @@ import re import sys import traceback +from contextlib import contextmanager from datetime import date, datetime, timezone from typing import Generator, Optional, Union from xml.etree import ElementTree as ET @@ -286,6 +287,61 @@ def get_xform_from_submission(xml, username, uuid=None): ) +@contextmanager +def http_open_rosa_error_handler(func, request): + class _ContextResult: + def __init__(self): + self.func_return = None + self.error = None + self.http_error_response = None + + @property + def status_code(self): + if self.http_error_response: + return self.http_error_response.status_code + return 200 + + result = _ContextResult() + try: + result.func_return = func() + except InstanceInvalidUserError: + result.error = t('Username or ID required.') + result.http_error_response = OpenRosaResponseBadRequest(result.error) + except InstanceEmptyError: + result.error = t('Received empty submission. No instance was created') + result.http_error_response = OpenRosaResponseBadRequest(result.error) + except FormInactiveError: + result.error = t('Form is not active') + result.http_error_response = OpenRosaResponseNotAllowed(result.error) + except TemporarilyUnavailableError: + result.error = t('Temporarily unavailable') + result.http_error_response = OpenRosaTemporarilyUnavailable(result.error) + except XForm.DoesNotExist: + result.error = t('Form does not exist on this account') + result.http_error_response = OpenRosaResponseNotFound(result.error) + except ExpatError: + result.error = t('Improperly formatted XML.') + result.http_error_response = OpenRosaResponseBadRequest(result.error) + except DuplicateInstance: + result.error = t('Duplicate submission') + response = OpenRosaResponse(result.error) + response.status_code = 202 + response['Location'] = request.build_absolute_uri(request.path) + result.http_error_response = response + except PermissionDenied: + result.error = t('Access denied') + result.http_error_response = OpenRosaResponseForbidden(result.error) + except InstanceMultipleNodeError as e: + result.error = str(e) + result.http_error_response = OpenRosaResponseBadRequest(e) + except DjangoUnicodeDecodeError: + result.error = t( + 'File likely corrupted during ' 'transmission, please try later.' + ) + result.http_error_response = OpenRosaResponseBadRequest(result.error) + yield result + + def inject_instanceid(xml_str, uuid): if get_uuid_from_xml(xml_str) is None: xml = clean_and_parse_xml(xml_str) @@ -550,48 +606,18 @@ def safe_create_instance( :returns: A list [error, instance] where error is None if there was no error. """ - error = instance = None - - try: - instance = create_instance( + with http_open_rosa_error_handler( + lambda: create_instance( username, xml_file, media_files, uuid=uuid, date_created_override=date_created_override, request=request, - ) - except InstanceInvalidUserError: - error = OpenRosaResponseBadRequest(t("Username or ID required.")) - except InstanceEmptyError: - error = OpenRosaResponseBadRequest( - t("Received empty submission. No instance was created") - ) - except FormInactiveError: - error = OpenRosaResponseNotAllowed(t("Form is not active")) - except TemporarilyUnavailableError: - error = OpenRosaTemporarilyUnavailable(t("Temporarily unavailable")) - except XForm.DoesNotExist: - error = OpenRosaResponseNotFound( - t("Form does not exist on this account") - ) - except ExpatError as e: - error = OpenRosaResponseBadRequest(t("Improperly formatted XML.")) - except DuplicateInstance: - response = OpenRosaResponse(t("Duplicate submission")) - response.status_code = 202 - response['Location'] = request.build_absolute_uri(request.path) - error = response - except PermissionDenied as e: - error = OpenRosaResponseForbidden(e) - except InstanceMultipleNodeError as e: - error = OpenRosaResponseBadRequest(e) - except DjangoUnicodeDecodeError: - error = OpenRosaResponseBadRequest(t("File likely corrupted during " - "transmission, please try later." - )) - - return [error, instance] + ), + request, + ) as handler: + return [handler.http_error_response, handler.func_return] def save_attachments( diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 38241fb83c..4abed7039b 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -39,8 +39,9 @@ set_instance_validation_statuses, ) from kobo.apps.openrosa.libs.utils.logger_tools import ( - safe_create_instance, + create_instance, publish_xls_form, + safe_create_instance, ) from kobo.apps.subsequences.utils import stream_with_extras from kobo.apps.trackers.models import NLPUsageCounter @@ -311,7 +312,7 @@ def duplicate_submission( uuid_formatted ) - safe_create_instance( + instance = create_instance( username=self.asset.owner.username, xml_file=ContentFile(xml_tostring(xml_parsed)), media_files=attachments, @@ -321,7 +322,7 @@ def duplicate_submission( # Cast to list to help unit tests to pass. return self._rewrite_json_attachment_urls( - list(self.get_submissions(user, query={'_uuid': _uuid}))[0], request + self.get_submission(submission_id=instance.pk, user=user), request ) def edit_submission( @@ -382,26 +383,13 @@ def edit_submission( # Request. xml_submission_file.seek(0) - # Retrieve only File objects to pass to `safe_create_instance` - # TODO remove those files as soon as the view sends request.FILES directly - # See TODO in kpi/views/v2/asset_snapshot.py::submission - media_files = ( - media_file for media_file in attachments.values() - ) - - safe_create_instance( + return create_instance( username=user.username, xml_file=xml_submission_file, - media_files=media_files, + media_files=attachments, request=request, ) - return { - 'headers': {}, - 'content_type': 'text/xml; charset=utf-8', - 'status': status.HTTP_201_CREATED, - } - @property def enketo_id(self): if not (enketo_id := self.get_data('enketo_id')): diff --git a/kpi/views/v2/asset_snapshot.py b/kpi/views/v2/asset_snapshot.py index c55d4c8855..9a964b6ea4 100644 --- a/kpi/views/v2/asset_snapshot.py +++ b/kpi/views/v2/asset_snapshot.py @@ -4,11 +4,14 @@ import requests from django.conf import settings from django.http import HttpResponseRedirect, Http404 -from rest_framework import renderers, serializers +from rest_framework import renderers, serializers, status from rest_framework.decorators import action from rest_framework.response import Response from rest_framework.reverse import reverse +from kobo.apps.openrosa.libs.utils.logger_tools import ( + http_open_rosa_error_handler, +) from kpi.authentication import DigestAuthentication, EnketoSessionAuthentication from kpi.constants import PERM_VIEW_ASSET from kpi.exceptions import SubmissionIntegrityError @@ -224,29 +227,30 @@ def submission(self, request, *args, **kwargs): xml_submission_file = request.data['xml_submission_file'] - # Prepare attachments even if all files are present in `request.FILES` - # (i.e.: submission XML and attachments) - attachments = {} # Remove 'xml_submission_file' since it is already handled request.FILES.pop('xml_submission_file') - # TODO pass request.FILES to `edit_submission()` directly when - # KobocatBackendDeployment is gone - if len(request.FILES): - for name, attachment in request.FILES.items(): - attachments[name] = attachment - try: - xml_response = asset_snapshot.asset.deployment.edit_submission( - xml_submission_file, request, attachments - ) + with http_open_rosa_error_handler( + lambda: asset_snapshot.asset.deployment.edit_submission( + xml_submission_file, request, request.FILES.values() + ), + request, + ) as handler: + if handler.http_error_response: + return handler.http_error_response + else: + instance = handler.func_return + response = { + 'headers': self.get_headers(), + 'data': instance.xml, + 'content_type': 'text/xml; charset=utf-8', + 'status': status.HTTP_201_CREATED, + } + return Response(**response) except SubmissionIntegrityError as e: raise serializers.ValidationError(str(e)) - # Add OpenRosa headers to response - xml_response['headers'].update(self.get_headers()) - return Response(**xml_response) - @action(detail=True, renderer_classes=[renderers.TemplateHTMLRenderer]) def xform(self, request, *args, **kwargs): """ diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index c6339194df..0881942ad0 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -5,7 +5,7 @@ import requests from django.conf import settings -from django.http import Http404 +from django.http import Http404, HttpResponseForbidden from django.utils.translation import gettext_lazy as t from pymongo.errors import OperationFailure from rest_framework import ( @@ -22,6 +22,9 @@ from rest_framework_extensions.mixins import NestedViewSetMixin from kobo.apps.audit_log.models import AuditAction, AuditLog +from kobo.apps.openrosa.libs.utils.logger_tools import ( + http_open_rosa_error_handler, +) from kpi.authentication import EnketoSessionAuthentication from kpi.constants import ( SUBMISSION_FORMAT_TYPE_JSON, @@ -52,7 +55,6 @@ from kpi.utils.log import logging from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin from kpi.utils.xml import ( - edit_submission_xml, fromstring_preserve_root_xmlns, get_or_create_element, xml_tostring, @@ -513,10 +515,26 @@ def duplicate(self, request, pk, *args, **kwargs): deployment = self._get_deployment() # Coerce to int because back end only finds matches with same type submission_id = positive_int(pk) - duplicate_response = deployment.duplicate_submission( - submission_id=submission_id, request=request - ) - return Response(duplicate_response, status=status.HTTP_201_CREATED) + + with http_open_rosa_error_handler( + lambda: deployment.duplicate_submission( + submission_id=submission_id, request=request + ), + request, + ) as handler: + if handler.http_error_response: + response = { + 'data': handler.error, + 'content_type': 'application/json', + 'status': handler.status_code, + } + else: + response = { + 'data': handler.func_return, + 'content_type': 'application/json', + 'status': status.HTTP_201_CREATED, + } + return Response(**response) @action(detail=True, methods=['GET', 'PATCH', 'DELETE'], renderer_classes=[renderers.JSONRenderer], From f5e1f923f6f6ba2529b38e4184ed6ddf33da8ea6 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 22 Aug 2024 18:12:43 -0400 Subject: [PATCH 042/119] Add TODO comment --- kpi/tasks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/kpi/tasks.py b/kpi/tasks.py index 28b5d19697..2ac7c12540 100644 --- a/kpi/tasks.py +++ b/kpi/tasks.py @@ -81,6 +81,7 @@ def sync_media_files(asset_uid): # 🙈 Race condition: Celery task starts too fast and does not see # the deployment data, even if asset has been saved prior to call this # task + # TODO Find why the race condition happens and remove `time.sleep(1)` time.sleep(1) asset.refresh_from_db(fields=['_deployment_data']) From 7697d683893f60053d1479530b843e47aeb5ec5d Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Fri, 23 Aug 2024 06:56:18 -0400 Subject: [PATCH 043/119] Use MagicMock instead of MockSSRFProtect --- kobo/apps/hook/tests/hook_test_case.py | 7 ----- kobo/apps/hook/tests/test_api_hook.py | 36 ++++++++++++++++---------- kobo/apps/hook/tests/test_email.py | 11 +++++--- kobo/apps/hook/tests/test_ssrf.py | 14 ++++++---- kobo/apps/hook/tests/test_utils.py | 7 ++--- 5 files changed, 42 insertions(+), 33 deletions(-) diff --git a/kobo/apps/hook/tests/hook_test_case.py b/kobo/apps/hook/tests/hook_test_case.py index c5c31d420a..bbf8799806 100644 --- a/kobo/apps/hook/tests/hook_test_case.py +++ b/kobo/apps/hook/tests/hook_test_case.py @@ -16,13 +16,6 @@ from ..models import HookLog, Hook -class MockSSRFProtect: - - @staticmethod - def _get_ip_address(url): - return ip_address('1.2.3.4') - - class HookTestCase(KpiTestCase): def setUp(self): diff --git a/kobo/apps/hook/tests/test_api_hook.py b/kobo/apps/hook/tests/test_api_hook.py index b5a62c1623..c6368f568d 100644 --- a/kobo/apps/hook/tests/test_api_hook.py +++ b/kobo/apps/hook/tests/test_api_hook.py @@ -5,9 +5,11 @@ import responses from constance.test import override_config from django.urls import reverse -from mock import patch +from ipaddress import ip_address +from mock import patch, MagicMock from rest_framework import status + from kobo.apps.hook.constants import ( HOOK_LOG_FAILED, HOOK_LOG_PENDING, @@ -22,7 +24,7 @@ PERM_CHANGE_ASSET ) from kpi.utils.datetime import several_minutes_from_now -from .hook_test_case import HookTestCase, MockSSRFProtect +from .hook_test_case import HookTestCase from ..exceptions import HookRemoteServerDownError @@ -173,7 +175,7 @@ def test_partial_update_hook(self): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address + new=MagicMock(return_value=ip_address('1.2.3.4')) ) @responses.activate def test_send_and_retry(self): @@ -203,7 +205,7 @@ def test_send_and_retry(self): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address + new=MagicMock(return_value=ip_address('1.2.3.4')) ) @responses.activate def test_send_and_cannot_retry(self): @@ -233,7 +235,7 @@ def test_send_and_cannot_retry(self): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address + new=MagicMock(return_value=ip_address('1.2.3.4')) ) @responses.activate def test_payload_template(self): @@ -317,8 +319,10 @@ def test_payload_template_validation(self): } self.assertEqual(response.data, expected_response) - @patch('ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address) + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MagicMock(return_value=ip_address('1.2.3.4')) + ) @responses.activate def test_hook_log_filter_success(self): # Create success hook @@ -352,8 +356,10 @@ def test_hook_log_filter_success(self): response = self.client.get(f'{hook_log_url}?status={HOOK_LOG_FAILED}', format='json') self.assertEqual(response.data.get('count'), 0) - @patch('ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address) + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MagicMock(return_value=ip_address('1.2.3.4')) + ) @responses.activate def test_hook_log_filter_failure(self): # Create failing hook @@ -414,14 +420,16 @@ def test_hook_log_filter_validation(self): response = self.client.get(f'{hook_log_url}?status=abc', format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - @patch('ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address) + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MagicMock(return_value=ip_address('1.2.3.4')) + ) @responses.activate def test_hook_log_filter_date(self): # Create success hook - hook = self._create_hook(name="date hook", - endpoint="http://date.service.local/", - settings={}) + hook = self._create_hook( + name="date hook", endpoint="http://date.service.local/", settings={} + ) responses.add(responses.POST, hook.endpoint, status=status.HTTP_200_OK, content_type="application/json") diff --git a/kobo/apps/hook/tests/test_email.py b/kobo/apps/hook/tests/test_email.py index 2b1f7d0fdf..2a587340cb 100644 --- a/kobo/apps/hook/tests/test_email.py +++ b/kobo/apps/hook/tests/test_email.py @@ -5,9 +5,10 @@ from django.template.loader import get_template from django.utils import translation, dateparse from django_celery_beat.models import PeriodicTask, CrontabSchedule -from mock import patch +from ipaddress import ip_address +from mock import patch, MagicMock -from .hook_test_case import HookTestCase, MockSSRFProtect +from .hook_test_case import HookTestCase from ..tasks import failures_reports @@ -28,8 +29,10 @@ def _create_periodic_task(self): return periodic_task - @patch('ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address) + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MagicMock(return_value=ip_address('1.2.3.4')) + ) @responses.activate def test_notifications(self): self._create_periodic_task() diff --git a/kobo/apps/hook/tests/test_ssrf.py b/kobo/apps/hook/tests/test_ssrf.py index 3d28e4a766..df3ebd193f 100644 --- a/kobo/apps/hook/tests/test_ssrf.py +++ b/kobo/apps/hook/tests/test_ssrf.py @@ -1,8 +1,8 @@ - import pytest import responses from constance.test import override_config -from mock import patch +from ipaddress import ip_address +from mock import patch, MagicMock from rest_framework import status from ssrf_protect.exceptions import SSRFProtectException @@ -10,13 +10,15 @@ HOOK_LOG_FAILED, KOBO_INTERNAL_ERROR_STATUS_CODE ) -from .hook_test_case import HookTestCase, MockSSRFProtect +from .hook_test_case import HookTestCase class SSRFHookTestCase(HookTestCase): - @patch('ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address) + @patch( + 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', + new=MagicMock(return_value=ip_address('1.2.3.4')) + ) @override_config(SSRF_DENIED_IP_ADDRESS='1.2.3.4') @responses.activate def test_send_with_ssrf_options(self): @@ -35,6 +37,8 @@ def test_send_with_ssrf_options(self): content_type='application/json') # Try to send data to external endpoint + # Note: it should failed because we explicitly deny 1.2.3.4 and + # SSRFProtect._get_ip_address is mocked to return 1.2.3.4 with pytest.raises(SSRFProtectException): service_definition.send() diff --git a/kobo/apps/hook/tests/test_utils.py b/kobo/apps/hook/tests/test_utils.py index 94167f1886..931f66ce1e 100644 --- a/kobo/apps/hook/tests/test_utils.py +++ b/kobo/apps/hook/tests/test_utils.py @@ -1,8 +1,9 @@ import responses -from mock import patch +from ipaddress import ip_address +from mock import patch, MagicMock from rest_framework import status -from .hook_test_case import HookTestCase, MockSSRFProtect +from .hook_test_case import HookTestCase from ..utils.services import call_services @@ -10,7 +11,7 @@ class HookUtilsTestCase(HookTestCase): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MockSSRFProtect._get_ip_address + new=MagicMock(return_value=ip_address('1.2.3.4')) ) @responses.activate def test_data_submission(self): From 3698143eb92d10eaca7b40a55529e1941b9a9ed7 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Fri, 23 Aug 2024 07:00:41 -0400 Subject: [PATCH 044/119] Remove unused import --- kpi/views/v2/data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 0881942ad0..46da8ea63a 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -5,7 +5,7 @@ import requests from django.conf import settings -from django.http import Http404, HttpResponseForbidden +from django.http import Http404 from django.utils.translation import gettext_lazy as t from pymongo.errors import OperationFailure from rest_framework import ( From a056a74d74999c0b4648d8be02fdb712338a049b Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Fri, 23 Aug 2024 09:37:28 -0400 Subject: [PATCH 045/119] Fix unit tests --- .../apps/api/tests/viewsets/test_xform_submission_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py index ea6ab542d8..989bdc3967 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py @@ -327,7 +327,7 @@ def test_post_submission_require_auth_other_user(self): auth = DigestAuth('alice', 'alicealice') request.META.update(auth(request.META, response)) response = self.view(request, username=self.user.username) - self.assertContains(response, 'Forbidden', status_code=403) + self.assertContains(response, 'Access denied', status_code=403) def test_post_submission_require_auth_data_entry_role(self): From 415b51fa3cf8c46ef600b698d8dd0b5e8a04ed69 Mon Sep 17 00:00:00 2001 From: Guillermo Date: Fri, 23 Aug 2024 12:34:51 -0600 Subject: [PATCH 046/119] Fix broken things after merge --- kpi/tests/api/v2/test_api_service_usage.py | 20 -------------------- kpi/tests/test_usage_calculator.py | 9 ++++++--- kpi/utils/usage_calculator.py | 6 +----- 3 files changed, 7 insertions(+), 28 deletions(-) diff --git a/kpi/tests/api/v2/test_api_service_usage.py b/kpi/tests/api/v2/test_api_service_usage.py index 5a0de08de0..68af6b2bbb 100644 --- a/kpi/tests/api/v2/test_api_service_usage.py +++ b/kpi/tests/api/v2/test_api_service_usage.py @@ -132,23 +132,3 @@ def test_no_deployment(self): assert response.data['total_submission_count']['all_time'] == 0 assert response.data['total_nlp_usage']['asr_seconds_all_time'] == 0 assert response.data['total_storage_bytes'] == 0 - - def test_service_usages_with_projects_in_trash_bin(self): - self.test_multiple_forms() - # Simulate trash bin - self.asset.pending_delete = True - self.asset.save( - update_fields=['pending_delete'], - create_version=False, - adjust_content=False, - ) - self.xform.pending_delete = True - self.xform.save(update_fields=['pending_delete']) - - # Retry endpoint - url = reverse(self._get_endpoint('service-usage-list')) - response = self.client.get(url) - - assert response.data['total_submission_count']['current_month'] == 3 - assert response.data['total_submission_count']['all_time'] == 3 - assert response.data['total_storage_bytes'] == 0 diff --git a/kpi/tests/test_usage_calculator.py b/kpi/tests/test_usage_calculator.py index 5b53f6f944..be4255a235 100644 --- a/kpi/tests/test_usage_calculator.py +++ b/kpi/tests/test_usage_calculator.py @@ -4,6 +4,7 @@ from dateutil.relativedelta import relativedelta from django.conf import settings from django.test import override_settings +from django.urls import reverse from django.utils import timezone from model_bakery import baker @@ -145,16 +146,18 @@ def add_submissions(self, count=2): self.attachment_id = self.attachment_id + 2 submissions.append(submission) - self.asset.deployment.mock_submissions(submissions, flush_db=False) + self.asset.deployment.mock_submissions(submissions) def expected_file_size(self): """ Calculate the expected combined file size for the test audio clip and image """ return os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_clip.3gp' + settings.BASE_DIR + + '/kpi/fixtures/attachments/audio_conversion_test_clip.3gp' ) + os.path.getsize( - settings.BASE_DIR + '/kpi/tests/audio_conversion_test_image.jpg' + settings.BASE_DIR + + '/kpi/fixtures/attachments/audio_conversion_test_image.jpg' ) diff --git a/kpi/utils/usage_calculator.py b/kpi/utils/usage_calculator.py index 1344d61c94..71d3d24175 100644 --- a/kpi/utils/usage_calculator.py +++ b/kpi/utils/usage_calculator.py @@ -17,10 +17,6 @@ ) from kobo.apps.stripe.constants import ACTIVE_STRIPE_STATUSES from kobo.apps.trackers.models import NLPUsageCounter -from kpi.deployment_backends.kc_access.shadow_models import ( - KobocatDailyXFormSubmissionCounter, - KobocatXForm, -) class ServiceUsageCalculator: @@ -138,7 +134,7 @@ def get_submission_counters(self): total_submission_count = {} for submission_key, count in submission_count.items(): - self.total_submission_count[submission_key] = ( + total_submission_count[submission_key] = ( count if count is not None else 0 ) From 79ea9b0004a72530b3c7de9bd474c99a2ee64bd2 Mon Sep 17 00:00:00 2001 From: Guillermo Date: Fri, 23 Aug 2024 19:37:34 -0600 Subject: [PATCH 047/119] Fix test and remove obsolete class --- .../tests/api/v2/test_api.py | 9 ----- kobo/apps/project_ownership/tests/utils.py | 38 ------------------- 2 files changed, 47 deletions(-) delete mode 100644 kobo/apps/project_ownership/tests/utils.py diff --git a/kobo/apps/project_ownership/tests/api/v2/test_api.py b/kobo/apps/project_ownership/tests/api/v2/test_api.py index 9bd1e00a4d..59ec50bced 100644 --- a/kobo/apps/project_ownership/tests/api/v2/test_api.py +++ b/kobo/apps/project_ownership/tests/api/v2/test_api.py @@ -13,7 +13,6 @@ InviteStatusChoices, Transfer, ) -from kobo.apps.project_ownership.tests.utils import MockServiceUsageCalculator from kobo.apps.trackers.utils import update_nlp_counter from kpi.constants import PERM_VIEW_ASSET @@ -351,14 +350,6 @@ def __add_submissions(self): self.asset.deployment.mock_submissions(submissions) self.submissions = submissions - @patch( - 'kpi.utils.usage_calculator.ServiceUsageCalculator.get_storage_usage', - new=MockServiceUsageCalculator.get_storage_usage - ) - @patch( - 'kpi.utils.usage_calculator.ServiceUsageCalculator.get_submission_counters', - new=MockServiceUsageCalculator.get_submission_counters - ) @patch( 'kobo.apps.project_ownership.models.transfer.reset_kc_permissions', MagicMock() diff --git a/kobo/apps/project_ownership/tests/utils.py b/kobo/apps/project_ownership/tests/utils.py deleted file mode 100644 index 602f7e0775..0000000000 --- a/kobo/apps/project_ownership/tests/utils.py +++ /dev/null @@ -1,38 +0,0 @@ -from django.db.models import F -from kpi.models.asset import Asset - - -class MockServiceUsageCalculator: - - def get_storage_usage(self): - - assets = Asset.objects.annotate(user_id=F('owner_id')).filter( - self._user_id_query - ) - - total_storage_bytes = 0 - for asset in assets: - if asset.has_deployment: - for submission in asset.deployment.get_submissions(asset.owner): - total_storage_bytes += sum( - [att['bytes'] for att in submission['_attachments']] - ) - return total_storage_bytes - - def get_submission_counters(self): - total_submission_count = { - 'all_time': 0, - 'current_year': 0, - 'current_month': 0, - } - assets = Asset.objects.annotate(user_id=F('owner_id')).filter( - self._user_id_query - ) - for asset in assets: - if asset.has_deployment: - submissions = asset.deployment.get_submissions(asset.owner) - total_submission_count['all_time'] += len(submissions) - total_submission_count['current_year'] += len(submissions) - total_submission_count['current_month'] += len(submissions) - - return total_submission_count From 9e4bb41748d9970904939216fda751e62535943f Mon Sep 17 00:00:00 2001 From: Guillermo Date: Tue, 27 Aug 2024 08:52:01 -0600 Subject: [PATCH 048/119] Remove unused class properties --- kpi/tests/test_usage_calculator.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/kpi/tests/test_usage_calculator.py b/kpi/tests/test_usage_calculator.py index be4255a235..921ab99c96 100644 --- a/kpi/tests/test_usage_calculator.py +++ b/kpi/tests/test_usage_calculator.py @@ -31,9 +31,6 @@ class BaseServiceUsageTestCase(BaseAssetTestCase): URL_NAMESPACE = ROUTER_URL_NAMESPACE - attachment_id = 0 - counter = None - def setUp(self): super().setUp() self.client.login(username='anotheruser', password='anotheruser') @@ -128,13 +125,11 @@ def add_submissions(self, count=2): '_uuid': str(uuid.uuid4()), '_attachments': [ { - 'id': self.attachment_id, 'download_url': 'http://testserver/anotheruser/audio_conversion_test_clip.3gp', 'filename': 'anotheruser/audio_conversion_test_clip.3gp', 'mimetype': 'video/3gpp', }, { - 'id': self.attachment_id + 1, 'download_url': 'http://testserver/anotheruser/audio_conversion_test_image.jpg', 'filename': 'anotheruser/audio_conversion_test_image.jpg', 'mimetype': 'image/jpeg', @@ -143,7 +138,6 @@ def add_submissions(self, count=2): '_submitted_by': 'anotheruser', } # increment the attachment ID for each attachment created - self.attachment_id = self.attachment_id + 2 submissions.append(submission) self.asset.deployment.mock_submissions(submissions) From f9fd86edba9cc4833057d963edfef972c5b1c476 Mon Sep 17 00:00:00 2001 From: rgraber Date: Tue, 27 Aug 2024 11:16:48 -0400 Subject: [PATCH 049/119] feat: log authorized application requests to authenticate user --- kobo/apps/audit_log/models.py | 13 ++++++++++- kobo/apps/audit_log/tests/test_models.py | 23 +++++++++++++++++++ .../audit_log/tests/test_one_time_auth.py | 21 +++++++++++++++++ kpi/constants.py | 1 + kpi/models/authorized_application.py | 5 +++- kpi/urls/__init__.py | 3 ++- kpi/views/__init__.py | 10 ++++++++ 7 files changed, 73 insertions(+), 3 deletions(-) diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index 226962c1d2..a78e63f470 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -96,8 +96,16 @@ def save( @staticmethod def create_access_log_for_request( - request, user=None, authentication_type: str = None + request, + user=None, + authentication_type: str = None, + extra_metadata: dict = None, ): + """ + Create an access log for a request, assigned to either the given user or request.user if not supplied + + Note: Data passed in extra_metadata will override default values for the same key + """ logged_in_user = user or request.user # django-loginas will keep the superuser as the _cached_user while request.user is set to the new one @@ -151,6 +159,9 @@ def create_access_log_for_request( if is_loginas: metadata['initial_user_uid'] = initial_user.extra_details.uid metadata['initial_user_username'] = initial_user.username + # add any other metadata the caller may want + if extra_metadata is not None: + metadata.update(extra_metadata) audit_log = AuditLog( user=logged_in_user, app_label=ACCESS_LOG_KOBO_AUTH_APP_LABEL, diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index cb5f35c18c..70e5bce893 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -138,3 +138,26 @@ def test_create_auth_log_unknown_authenticator( 'auth_type': ACCESS_LOG_UNKNOWN_AUTH_TYPE, }, ) + + def test_create_auth_log_with_extra_metadata( + self, patched_ip, patched_source + ): + request = self._create_request( + reverse('api_v2:asset-list'), + AnonymousUser(), + AuditLogModelTestCase.super_user, + ) + extra_metadata = {'foo': 'bar'} + log: AuditLog = AuditLog.create_access_log_for_request( + request, authentication_type='Token', extra_metadata=extra_metadata + ) + self._check_common_fields(log, AuditLogModelTestCase.super_user) + self.assertDictEqual( + log.metadata, + { + 'ip_address': '127.0.0.1', + 'source': 'source', + 'auth_type': 'Token', + 'foo': 'bar', + }, + ) diff --git a/kobo/apps/audit_log/tests/test_one_time_auth.py b/kobo/apps/audit_log/tests/test_one_time_auth.py index d3db488dea..205709c2e3 100644 --- a/kobo/apps/audit_log/tests/test_one_time_auth.py +++ b/kobo/apps/audit_log/tests/test_one_time_auth.py @@ -10,6 +10,7 @@ from kobo.apps.audit_log.models import AuditAction, AuditLog from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.main.models import UserProfile +from kpi.models import AuthorizedApplication from kpi.tests.base_test_case import BaseTestCase @@ -154,6 +155,26 @@ def side_effect(request): self.assertTrue(log_exists) self.assertEqual(AuditLog.objects.count(), 1) + def test_authorized_application_auth_creates_log(self): + app: AuthorizedApplication = AuthorizedApplication(name='Auth app') + app.save() + header = {'HTTP_AUTHORIZATION': f'Token {app.key}'} + response = self.client.post( + reverse('authenticate_user'), + **header, + data={'username': 'test', 'password': 'test'}, + ) + # this log should belong to the user, not the app, and have a bit of extra metadata + access_log_qs = AuditLog.objects.filter( + user_uid=TestOneTimeAuthentication.user.extra_details.uid, + action=AuditAction.AUTH, + metadata__auth_type='authorized-application', + ) + self.assertTrue(access_log_qs.exists()) + self.assertEqual(AuditLog.objects.count(), 1) + access_log = access_log_qs.first() + self.assertEqual(access_log.metadata['authorized_app_name'], 'Auth app') + def test_failed_request_does_not_create_log(self): self.client.get(reverse('data-list')) self.assertEqual(AuditLog.objects.count(), 0) diff --git a/kpi/constants.py b/kpi/constants.py index 215f1aaa21..2bcbaedb57 100644 --- a/kpi/constants.py +++ b/kpi/constants.py @@ -143,3 +143,4 @@ ACCESS_LOG_LOGINAS_AUTH_TYPE = 'django-loginas' ACCESS_LOG_UNKNOWN_AUTH_TYPE = 'unknown' ACCESS_LOG_SUBMISSION_AUTH_TYPE = 'submission' +ACCESS_LOG_AUTHORIZED_APP_TYPE = 'authorized-application' diff --git a/kpi/models/authorized_application.py b/kpi/models/authorized_application.py index 2f14417f6f..675966a81e 100644 --- a/kpi/models/authorized_application.py +++ b/kpi/models/authorized_application.py @@ -33,7 +33,10 @@ class ApplicationTokenAuthentication(TokenAuthentication): def authenticate_credentials(self, key): """ Mostly duplicated from TokenAuthentication, except that we return - an AnonymousUser """ + an AnonymousUser + + We also do not create an AuditLog here because we only want to do so for certain endpoints, + and only after we get the user being accessed""" try: token = self.model.objects.get(key=key) except self.model.DoesNotExist: diff --git a/kpi/urls/__init__.py b/kpi/urls/__init__.py index 5f45bc5811..786df65e38 100644 --- a/kpi/urls/__init__.py +++ b/kpi/urls/__init__.py @@ -34,7 +34,8 @@ re_path(r'^o/', include('oauth2_provider.urls', namespace='oauth2_provider')), re_path( r'^authorized_application/authenticate_user/$', - authorized_application_authenticate_user + authorized_application_authenticate_user, + name='authenticate_user' ), path('browser_tests/', browser_tests), path('modern_browsers/', modern_browsers), diff --git a/kpi/views/__init__.py b/kpi/views/__init__.py index 3fb2be0860..d30884957b 100644 --- a/kpi/views/__init__.py +++ b/kpi/views/__init__.py @@ -5,7 +5,9 @@ from rest_framework.decorators import api_view, authentication_classes from rest_framework.response import Response +from kobo.apps.audit_log.models import AuditLog from kobo.apps.kobo_auth.shortcuts import User +from kpi.constants import ACCESS_LOG_AUTHORIZED_APP_TYPE from kpi.models import AuthorizedApplication from kpi.models.authorized_application import ApplicationTokenAuthentication from kpi.serializers import AuthorizedApplicationUserSerializer @@ -58,6 +60,14 @@ def authorized_application_authenticate_user(request): ) for attribute in user_attributes_to_return: response_data[attribute] = getattr(user, attribute) + # usually we would do this at the authentication level but because this is + # authenticated as the application and not the user, we do it here so + # we can have the user information + extra_metadata_for_log = {'authorized_app_name': request.auth.name} + log = AuditLog.create_access_log_for_request( + request, user, ACCESS_LOG_AUTHORIZED_APP_TYPE, extra_metadata_for_log + ) + log.save() return Response(response_data) From 78eb2040fd4d18a823fdd327206d0ae07efbc6e9 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 28 Aug 2024 12:04:10 -0400 Subject: [PATCH 050/119] Remove deprecated documentation --- kobo/apps/openrosa/apps/api/viewsets/data_viewset.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py index be8402cc8f..82c47e77f5 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py @@ -324,7 +324,6 @@ class DataViewSet(AnonymousUserPublicFormsMixin, OpenRosaModelViewSet): > "timestamp": 1513299978, > "by_whom ": "John Doe", > "uid": "validation_status_approved", - > "color": "#00ff00", > "label: "Approved" > } @@ -351,7 +350,6 @@ class DataViewSet(AnonymousUserPublicFormsMixin, OpenRosaModelViewSet): > "timestamp": 1513299978, > "by_whom ": "John Doe", > "uid": "validation_status_not_approved", - > "color": "#ff0000", > "label": "Not Approved" > } From 54d3e00280b5cbc64616080b0faecc80529e42fa Mon Sep 17 00:00:00 2001 From: rgraber Date: Thu, 29 Aug 2024 16:44:25 -0400 Subject: [PATCH 051/119] fixup!: someone moved my delete --- kpi/views/v2/user.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/kpi/views/v2/user.py b/kpi/views/v2/user.py index 4d7e0ff627..b791b41909 100644 --- a/kpi/views/v2/user.py +++ b/kpi/views/v2/user.py @@ -29,10 +29,6 @@ class UserViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin): 'username__icontains', ] - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.authentication_classes += [ApplicationTokenAuthentication] - def get_serializer_class(self): if self.action == 'list': return UserListSerializer From e3b42e11128f5ee5d11399e6736ff728e222630f Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 29 Aug 2024 18:40:57 -0400 Subject: [PATCH 052/119] Remove QPath from back-end code --- .../actions/automatic_transcription.py | 9 +- kobo/apps/subsequences/actions/base.py | 4 +- .../subsequences/actions/keyword_search.py | 19 ++- kobo/apps/subsequences/actions/qual.py | 47 +++--- kobo/apps/subsequences/actions/translation.py | 9 +- .../advanced_features_params_schema.py | 21 ++- kobo/apps/subsequences/api_view.py | 36 ++++- .../subsequences/integrations/google/base.py | 12 +- .../integrations/google/google_transcribe.py | 5 +- .../integrations/google/google_translate.py | 5 +- kobo/apps/subsequences/models.py | 6 +- ...add_qual_to_last_question_of_last_asset.py | 139 +++++++++++------- .../tests/test_known_cols_utils.py | 91 ++++++------ .../tests/test_submission_stream.py | 6 +- kobo/apps/subsequences/utils/__init__.py | 7 +- kobo/apps/subsequences/utils/deprecation.py | 19 +++ .../determine_export_cols_with_values.py | 21 +-- .../subsequences/utils/parse_known_cols.py | 79 +++++----- kpi/mixins/formpack_xlsform_utils.py | 2 +- kpi/models/asset.py | 63 +++++--- kpi/tests/test_asset_content.py | 9 +- kpi/tests/test_asset_versions.py | 6 +- kpi/utils/absolute_paths.py | 7 - kpi/views/environment.py | 4 +- 24 files changed, 353 insertions(+), 273 deletions(-) create mode 100644 kobo/apps/subsequences/utils/deprecation.py diff --git a/kobo/apps/subsequences/actions/automatic_transcription.py b/kobo/apps/subsequences/actions/automatic_transcription.py index 3e16ed91ca..960288f31b 100644 --- a/kobo/apps/subsequences/actions/automatic_transcription.py +++ b/kobo/apps/subsequences/actions/automatic_transcription.py @@ -9,25 +9,26 @@ DT_MOD = BaseAction.DATE_MODIFIED_FIELD DT_CREATED = BaseAction.DATE_CREATED_FIELD + class AutomaticTranscriptionAction(BaseAction): ID = 'transcript' MANUAL = 'user_transcribed' @classmethod - def build_params(kls, params, content): + def build_params(cls, params, content): possible_transcribed_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - possible_transcribed_fields.append(kls.get_qpath(kls, row)) + possible_transcribed_fields.append(cls.get_xpath(cls, row)) params = {'values': possible_transcribed_fields, 'services': []} return params @classmethod - def get_values_for_content(kls, content): + def get_values_for_content(cls, content): possible_transcribed_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - possible_transcribed_fields.append(kls.get_qpath(kls, row)) + possible_transcribed_fields.append(cls.get_xpath(cls, row)) return possible_transcribed_fields def load_params(self, params): diff --git a/kobo/apps/subsequences/actions/base.py b/kobo/apps/subsequences/actions/base.py index fdae53cc79..c7993c4756 100644 --- a/kobo/apps/subsequences/actions/base.py +++ b/kobo/apps/subsequences/actions/base.py @@ -106,9 +106,9 @@ def has_change(self, original, edit): def build_params(kls, *args, **kwargs): raise NotImplementedError(f'{kls.__name__} has not implemented a build_params method') - def get_qpath(self, row): + def get_xpath(self, row): # return the full path... - for name_field in ['qpath', 'name', '$autoname']: + for name_field in ['xpath', 'name', '$autoname']: if name_field in row: return row[name_field] return None diff --git a/kobo/apps/subsequences/actions/keyword_search.py b/kobo/apps/subsequences/actions/keyword_search.py index 2f3afdd869..312162eb4e 100644 --- a/kobo/apps/subsequences/actions/keyword_search.py +++ b/kobo/apps/subsequences/actions/keyword_search.py @@ -1,26 +1,27 @@ import copy from ..actions.base import BaseAction, ACTION_NEEDED, PASSES + class KeywordSearchAction(BaseAction): ID = 'keyword_search' ''' @classmethod - def build_params(kls, params, content): + def build_params(cls, params, content): possible_transcribed_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - possible_transcribed_fields.append(kls.get_qpath(kls, row)) + possible_transcribed_fields.append(cls.get_xpath(cls, row)) params = {'values': possible_transcribed_fields} return params ''' @classmethod - def get_values_for_content(kls, content): + def get_values_for_content(cls, content): possible_transcribed_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - possible_transcribed_fields.append(kls.get_qpath(kls, row)) + possible_transcribed_fields.append(cls.get_xpath(cls, row)) return possible_transcribed_fields def load_params(self, params): @@ -68,8 +69,9 @@ def check_submission_status(self, submission): response = self._traverse_object(submission, source) except KeyError: continue - qpath = source.split('/')[0] - all_output = submission[qpath].setdefault(self.ID, []) + # FIXME QPATH + xpath = source.split('/')[0] + all_output = submission[xpath].setdefault(self.ID, []) this_output = self._get_matching_element(all_output, **query) if not this_output: return ACTION_NEEDED @@ -90,8 +92,9 @@ def run_change(self, submission): matches = 0 for keyword in query['keywords']: matches += response['value'].count(keyword) - qpath = source.split('/')[0] - all_output = submission[qpath].setdefault(self.ID, []) + # FIXME QPATH + xpath = source.split('/')[0] + all_output = submission[xpath].setdefault(self.ID, []) this_output = self._get_matching_element(all_output, **query) if not this_output: this_output = copy.deepcopy(query) diff --git a/kobo/apps/subsequences/actions/qual.py b/kobo/apps/subsequences/actions/qual.py index 1092e94901..e1a6b53902 100644 --- a/kobo/apps/subsequences/actions/qual.py +++ b/kobo/apps/subsequences/actions/qual.py @@ -6,7 +6,7 @@ class QualAction(BaseAction): ID = 'qual' @classmethod - def build_params(kls, survey_content): + def build_params(cls, survey_content): _fields = [] for row in survey_content.get('survey', []): if row['type'] in ['audio', 'video']: @@ -16,22 +16,22 @@ def build_params(kls, survey_content): def load_params(self, params): ''' Action.load_params is called when the instance is initialized - for each Asset. It will + for each Asset. It will ''' self.fields = params.get('values', []) self.qual_survey = params.get('qual_survey', []) self.everything_else = params @classmethod - def get_values_for_content(kls, content): - ''' + def get_values_for_content(cls, content): + """ If no "values" are defined for a given asset, then this method will generate a set of defaults. - ''' + """ values = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video']: - values.append(kls.get_qpath(kls, row)) + values.append(cls.get_xpath(cls, row)) return values def modify_jsonschema(self, schema): @@ -40,29 +40,32 @@ def modify_jsonschema(self, schema): for qual_item in self.qual_survey: if qual_item.get('scope') != 'by_question#survey': - raise NotImplementedError('by_question#survey is ' - 'the only implementation') - item_qpath = qual_item.get('qpath') - field_def = schema['properties'].setdefault( - item_qpath, - {'type': 'object', - 'additionalProperties': False, - 'properties': { - self.ID: { - 'type': 'array', - 'items': { - '$ref': '#/definitions/qual_item', + raise NotImplementedError( + 'by_question#survey is the only implementation' + ) + item_xpath = qual_item.get('xpath') + schema['properties'].setdefault( + item_xpath, + { + 'type': 'object', + 'additionalProperties': False, + 'properties': { + self.ID: { + 'type': 'array', + 'items': { + '$ref': '#/definitions/qual_item', + }, } - } - }}, + }, + }, ) return schema def compile_revised_record(self, content, edits): - ''' + """ a method that applies changes to a json structure but stores NO revision history - ''' + """ for field_name, vals in edits.items(): if field_name == 'submission': continue diff --git a/kobo/apps/subsequences/actions/translation.py b/kobo/apps/subsequences/actions/translation.py index 5c5801bd8d..644a6dd985 100644 --- a/kobo/apps/subsequences/actions/translation.py +++ b/kobo/apps/subsequences/actions/translation.py @@ -11,21 +11,20 @@ class TranslationAction(BaseAction): MANUAL = 'user_translated' @classmethod - def build_params(kls, survey_content): - audio_questions = [] + def build_params(cls, survey_content): translatable_fields = [] for row in survey_content.get('survey', []): if row['type'] in ['audio', 'video', 'text']: - translatable_fields.append(kls.get_qpath(kls, row)) + translatable_fields.append(cls.get_xpath(cls, row)) params = {'values': translatable_fields} return params @classmethod - def get_values_for_content(kls, content): + def get_values_for_content(cls, content): translatable_fields = [] for row in content.get('survey', []): if row['type'] in ['audio', 'video', 'text']: - name = kls.get_qpath(kls, row) + name = cls.get_xpath(cls, row) if name: translatable_fields.append(name) return translatable_fields diff --git a/kobo/apps/subsequences/advanced_features_params_schema.py b/kobo/apps/subsequences/advanced_features_params_schema.py index 90bb507110..846ce6f2ca 100644 --- a/kobo/apps/subsequences/advanced_features_params_schema.py +++ b/kobo/apps/subsequences/advanced_features_params_schema.py @@ -1,10 +1,10 @@ -''' +""" When setting "asset.advanced_features", the value is compared against this jsonschema. As "advanced_features" are added to the code, this schema will grow to describe what is needed. -''' +""" ADVANCED_FEATURES_PARAMS_SCHEMA = { 'type': 'object', @@ -25,23 +25,20 @@ 'type': 'array', 'items': {'type': 'string'}, }, - } + }, }, 'translation': { 'type': 'object', 'properties': { - 'languages': { - 'type': 'array', - 'items': {'type': 'string'} - }, + 'languages': {'type': 'array', 'items': {'type': 'string'}}, 'values': { 'type': 'array', 'items': {'type': 'string'}, }, }, - 'required': ['languages'] - } - } + 'required': ['languages'], + }, + }, } # User-defined qualitative analysis forms @@ -70,10 +67,10 @@ 'items': {'$ref': '#/$defs/qualChoice'}, }, 'scope': {'type': 'string'}, - 'qpath': {'type': 'string'}, + 'xpath': {'type': 'string'}, 'options': {'type': 'object'}, }, - 'required': ['uuid', 'type', 'labels', 'scope', 'qpath'], + 'required': ['uuid', 'type', 'labels', 'scope', 'xpath'], }, 'qualLabels': { 'type': 'object', diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index d53ebbfabe..77385b5681 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -1,23 +1,24 @@ -import json from copy import deepcopy from jsonschema import validate from jsonschema.exceptions import ValidationError as SchemaValidationError -from kobo.apps.subsequences.models import SubmissionExtras -from kpi.models import Asset -from kpi.permissions import SubmissionPermission -from kpi.views.environment import _check_asr_mt_access_for_user from rest_framework.exceptions import PermissionDenied from rest_framework.exceptions import ValidationError as APIValidationError from rest_framework.response import Response from rest_framework.views import APIView +from kobo.apps.subsequences.models import SubmissionExtras +from kobo.apps.subsequences.utils.deprecation import qpath_to_xpath +from kpi.models import Asset +from kpi.permissions import SubmissionPermission +from kpi.views.environment import check_asr_mt_access_for_user + def _check_asr_mt_access_if_applicable(user, posted_data): # This is for proof-of-concept testing and will be replaced with proper # quotas and accounting MAGIC_STATUS_VALUE = 'requested' - user_has_access = _check_asr_mt_access_for_user(user) + user_has_access = check_asr_mt_access_for_user(user) if user_has_access: return True # Oops, no access. But did they request ASR/MT in the first place? @@ -97,8 +98,27 @@ def post(self, request, asset_uid, format=None): def get_submission_processing(asset, s_uuid): try: - submission = asset.submission_extras.get(submission_uuid=s_uuid) - return Response(submission.content) + submission_extra = asset.submission_extras.get(submission_uuid=s_uuid) + + # TODO delete the loop when every asset is repopulated with `xpath` + # instead of `qpath`. + content = deepcopy(submission_extra.content) + changed = False + for old_xpath, values in submission_extra.content.items(): + if '-' in old_xpath and '/' not in old_xpath: + xpath = qpath_to_xpath(old_xpath, asset) + if xpath == old_xpath: + continue + + del content[old_xpath] + content[xpath] = values + changed = True + + if changed: + submission_extra.content = content + # TODO save submission_extra? + + return Response(submission_extra.content) except SubmissionExtras.DoesNotExist: # submission might exist but no SubmissionExtras object has been created return Response({'info': f'nothing found for submission: {s_uuid}'}) diff --git a/kobo/apps/subsequences/integrations/google/base.py b/kobo/apps/subsequences/integrations/google/base.py index 4c8b0e8662..123e6e6a25 100644 --- a/kobo/apps/subsequences/integrations/google/base.py +++ b/kobo/apps/subsequences/integrations/google/base.py @@ -108,19 +108,9 @@ def handle_google_operation( return response @abstractmethod - def process_data(self, qpath: str, options: dict) -> dict: + def process_data(self, xpath: str, options: dict) -> dict: pass - def qpath_to_xpath(self, qpath: str) -> str: - xpath = None - for row in self.asset.content['survey']: - if '$qpath' in row and '$xpath' in row and row['$qpath'] == qpath: - xpath = row['$xpath'] - break - if xpath is None: - raise KeyError(f'xpath for {qpath=} not found') - return xpath - def update_counters(self, amount) -> None: update_nlp_counter( self.counter_name, diff --git a/kobo/apps/subsequences/integrations/google/google_transcribe.py b/kobo/apps/subsequences/integrations/google/google_transcribe.py index 392a9c5a1a..53b3f804cd 100644 --- a/kobo/apps/subsequences/integrations/google/google_transcribe.py +++ b/kobo/apps/subsequences/integrations/google/google_transcribe.py @@ -121,7 +121,7 @@ def get_converted_audio( ) return attachment.get_transcoded_audio('flac', include_duration=True) - def process_data(self, qpath: str, vals: dict) -> dict: + def process_data(self, xpath: str, vals: dict) -> dict: autoparams = vals[GOOGLETS] language_code = autoparams.get('languageCode') region_code = autoparams.get('regionCode') @@ -130,10 +130,7 @@ def process_data(self, qpath: str, vals: dict) -> dict: 'languageCode': language_code, 'regionCode': region_code, } - xpath = self.qpath_to_xpath(qpath) region_or_language_code = region_code or language_code - result_string = '' - results = [] try: flac_content, duration = self.get_converted_audio( xpath, diff --git a/kobo/apps/subsequences/integrations/google/google_translate.py b/kobo/apps/subsequences/integrations/google/google_translate.py index 6604418660..abbff2a2d1 100644 --- a/kobo/apps/subsequences/integrations/google/google_translate.py +++ b/kobo/apps/subsequences/integrations/google/google_translate.py @@ -173,12 +173,11 @@ def get_unique_paths( ) return source_path, output_path - def process_data(self, qpath: str, vals: dict) -> dict: + def process_data(self, xpath: str, vals: dict) -> dict: """ - Translates the value for a given qpath and it's json values. + Translates the value for a given xpath and its json values. """ autoparams = vals[GOOGLETX] - xpath = self.qpath_to_xpath(qpath) try: content = vals['transcript']['value'] source_lang = vals['transcript']['languageCode'] diff --git a/kobo/apps/subsequences/models.py b/kobo/apps/subsequences/models.py index 769f774aae..e5a85a27b1 100644 --- a/kobo/apps/subsequences/models.py +++ b/kobo/apps/subsequences/models.py @@ -33,17 +33,17 @@ def save(self, *args, **kwargs): from .integrations.google.google_translate import GoogleTranslationService features = self.asset.advanced_features - for qpath, vals in self.content.items(): + for xpath, vals in self.content.items(): if 'transcript' in features: options = vals.get(GOOGLETS, {}) if options.get('status') == 'requested': service = GoogleTranscriptionService(self) - vals[GOOGLETS] = service.process_data(qpath, vals) + vals[GOOGLETS] = service.process_data(xpath, vals) if 'translation' in features: options = vals.get(GOOGLETX, {}) if options.get('status') == 'requested': service = GoogleTranslationService(self) - vals[GOOGLETX] = service.process_data(qpath, vals) + vals[GOOGLETX] = service.process_data(xpath, vals) asset_changes = False asset_known_cols = self.asset.known_cols diff --git a/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py b/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py index fefcf7dc78..3cd6128590 100644 --- a/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py +++ b/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py @@ -1,77 +1,112 @@ import json from kpi.models import Asset from jsonschema import validate -from pprint import pprint -EXAMPLES = [{'labels': {'_default': 'Any descriptors?'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_tags', - 'uuid': '00000000-0000-0000-0000-000000000000'}, - {'labels': {'_default': 'Short summary (one sentence)'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_text', - 'uuid': '11111111-1111-1111-1111-111111111111'}, - {'labels': {'_default': 'How many people are heard speaking in this ' - 'response?'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_integer', - 'uuid': '22222222-2222-2222-2222-222222222222'}, - {'choices': [{'labels': {'_default': 'Yes'}, - 'uuid': '44444444-4444-4444-4444-444444444444'}, - {'labels': {'_default': 'No'}, - 'uuid': '55555555-5555-5555-5555-555555555555'}], - 'labels': {'_default': 'Do they describe the facility as being well ' - 'maintained?'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_select_one', - 'uuid': '33333333-3333-3333-3333-333333333333'}, - {'choices': [{'labels': {'_default': 'Lighting'}, - 'uuid': '77777777-7777-7777-7777-777777777777'}, - {'labels': {'_default': 'Ventilation'}, - 'uuid': '88888888-8888-8888-8888-888888888888'}, - {'labels': {'_default': 'Security'}, - 'uuid': '99999999-9999-9999-9999-999999999999'}], - 'labels': {'_default': 'Select any mentioned areas of concern'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_select_multiple', - 'uuid': '66666666-6666-6666-6666-666666666666'}, - {'labels': {'_default': 'Please respect the confidentiality of our ' - 'respondents.'}, - 'qpath': '', - 'scope': 'by_question#survey', - 'type': 'qual_note', - 'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}] +EXAMPLES = [ + { + 'labels': {'_default': 'Any descriptors?'}, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_tags', + 'uuid': '00000000-0000-0000-0000-000000000000', + }, + { + 'labels': {'_default': 'Short summary (one sentence)'}, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_text', + 'uuid': '11111111-1111-1111-1111-111111111111', + }, + { + 'labels': { + '_default': 'How many people are heard speaking in this ' + 'response?' + }, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_integer', + 'uuid': '22222222-2222-2222-2222-222222222222', + }, + { + 'choices': [ + { + 'labels': {'_default': 'Yes'}, + 'uuid': '44444444-4444-4444-4444-444444444444', + }, + { + 'labels': {'_default': 'No'}, + 'uuid': '55555555-5555-5555-5555-555555555555', + }, + ], + 'labels': { + '_default': 'Do they describe the facility as being well ' + 'maintained?' + }, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_select_one', + 'uuid': '33333333-3333-3333-3333-333333333333', + }, + { + 'choices': [ + { + 'labels': {'_default': 'Lighting'}, + 'uuid': '77777777-7777-7777-7777-777777777777', + }, + { + 'labels': {'_default': 'Ventilation'}, + 'uuid': '88888888-8888-8888-8888-888888888888', + }, + { + 'labels': {'_default': 'Security'}, + 'uuid': '99999999-9999-9999-9999-999999999999', + }, + ], + 'labels': {'_default': 'Select any mentioned areas of concern'}, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_select_multiple', + 'uuid': '66666666-6666-6666-6666-666666666666', + }, + { + 'labels': { + '_default': 'Please respect the confidentiality of our ' + 'respondents.' + }, + 'xpath': '', + 'scope': 'by_question#survey', + 'type': 'qual_note', + 'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', + }, +] EXAMPLE_QUAL_SURVEY_JSON = json.dumps({'qual_survey': EXAMPLES}) + def run(): asset = Asset.objects.order_by('-date_created')[0] - final_question_qpath = None + final_question_xpath = None for row in reversed(asset.content['survey']): if row['type'] in ('audio', 'video'): - final_question_qpath = row['$qpath'] - if not final_question_qpath: + final_question_xpath = row['$xpath'] + if not final_question_xpath: raise RuntimeError( 'Survey does not contain any audio or video question' ) asset.advanced_features['qual'] = json.loads( - EXAMPLE_QUAL_SURVEY_JSON.replace('', final_question_qpath) + EXAMPLE_QUAL_SURVEY_JSON.replace('', final_question_xpath) ) asset.save() if asset.submission_extras.count() == 0: - print('If a submission_extras model exists, this script will populate ' - 'it with sample data') - # asset.submission_extras.create(submission_uuid='...') + print( + 'If a submission_extras model exists, this script will populate ' + 'it with sample data' + ) else: subex = asset.submission_extras.last() subex_content_schema = asset.get_advanced_submission_schema() - subex.content[final_question_qpath] = { + subex.content[final_question_xpath] = { 'qual': [ { 'uuid': '00000000-0000-0000-0000-000000000000', diff --git a/kobo/apps/subsequences/tests/test_known_cols_utils.py b/kobo/apps/subsequences/tests/test_known_cols_utils.py index da550dbe59..291ac4283c 100644 --- a/kobo/apps/subsequences/tests/test_known_cols_utils.py +++ b/kobo/apps/subsequences/tests/test_known_cols_utils.py @@ -1,15 +1,10 @@ -import pytest - - -from kobo.apps.subsequences.utils.parse_known_cols import ( - parse_known_cols, -) +from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols def test_known_cols_transc_duplicates(): results = parse_known_cols([ - 'col-qpath:transc_a:en', - 'col-qpath:transc_a:en', + 'col/xpath:transc_a:en', + 'col/xpath:transc_a:en', ]) assert len(results) == 1 assert results[0]['language'] == 'en' @@ -17,63 +12,61 @@ def test_known_cols_transc_duplicates(): def test_known_cols_transl_duplicates(): results = parse_known_cols([ - 'col-qpath:transl_a:fr', - 'col-qpath:transl_a:fr', + 'col/xpath:transl_a:fr', + 'col/xpath:transl_a:fr', ]) assert len(results) == 1 def test_known_cols_transc_uniqs(): results = parse_known_cols([ - 'col-qpath1:transc_a:en', - 'col-qpath1:transc_b:fr', - 'col-qpath2:transc_a:en', - 'col-qpath2:transc_b:fr', + 'col/xpath1:transc_a:en', + 'col/xpath1:transc_b:fr', + 'col/xpath2:transc_a:en', + 'col/xpath2:transc_b:fr', ]) assert len(results) == 4 rs = {} - for prop in ['language', 'label', 'qpath']: + for prop in ['language', 'label', 'xpath']: rs[prop] = [rr[prop] for rr in results] assert rs['language'] == ['en', 'fr', 'en', 'fr'] assert rs['label'] == [ - 'qpath1 - transcript', - 'qpath1 - transcript', - 'qpath2 - transcript', - 'qpath2 - transcript', + 'xpath1 - transcript', + 'xpath1 - transcript', + 'xpath2 - transcript', + 'xpath2 - transcript', ] - assert rs['qpath'] == [ - 'col-qpath1-transcript-en', - 'col-qpath1-transcript-fr', - 'col-qpath2-transcript-en', - 'col-qpath2-transcript-fr', + assert rs['xpath'] == [ + 'col/xpath1/transcript/en', + 'col/xpath1/transcript/fr', + 'col/xpath2/transcript/en', + 'col/xpath2/transcript/fr', ] def test_known_cols_transl_uniqs(): results = parse_known_cols([ - 'col-qpath1:transl_a:en', - 'col-qpath1:transl_b:fr', - 'col-qpath2:transl_a:en', - 'col-qpath2:transl_b:fr', + 'col/xpath1:transl_a:en', + 'col/xpath1:transl_b:fr', + 'col/xpath2:transl_a:en', + 'col/xpath2:transl_b:fr', ]) assert len(results) == 4 - langs = [r['language'] for r in results] - labls = [r['label'] for r in results] - qpths = [r['qpath'] for r in results] - assert qpths == [ - 'col-qpath1-translation-en', - 'col-qpath1-translation-fr', - 'col-qpath2-translation-en', - 'col-qpath2-translation-fr', + xpaths = [r['xpath'] for r in results] + assert xpaths == [ + 'col/xpath1/translation/en', + 'col/xpath1/translation/fr', + 'col/xpath2/translation/en', + 'col/xpath2/translation/fr', ] def test_known_cols_combos(): results = parse_known_cols([ - 'col-qpath1:transl_a:en', - 'col-qpath1:transl_b:fr', - 'col-qpath2:transl_a:en', - 'col-qpath2:transl_b:fr', + 'col/xpath1:transl_a:en', + 'col/xpath1:transl_b:fr', + 'col/xpath2:transl_a:en', + 'col/xpath2:transl_b:fr', ]) langs = [r['language'] for r in results] assert langs == ['en', 'fr', 'en', 'fr'] @@ -86,20 +79,20 @@ def test_known_cols_grouped_source(): results = parse_known_cols([ # `group` is the group name # `question` is the (source) question name - 'group-question:transcript:en', - 'group-question:translation:es', + 'group/question:transcript:en', + 'group/question:translation:es', ]) sources = [r['source'] for r in results] - qpaths = [r['qpath'] for r in results] + xpaths = [r['xpath'] for r in results] names = [r['name'] for r in results] - assert set(sources) == set(('group-question',)) - assert qpaths == [ - 'group-question-transcript-en', - 'group-question-translation-es', + assert set(sources) == set(('group/question',)) + assert xpaths == [ + 'group/question/transcript/en', + 'group/question/translation/es', ] assert names == [ # This can't be right (why a mixture of dash and slash delimiters?) but # it is at least what the front end expects - 'group-question/transcript_en', - 'group-question/translation_es', + 'group/question/transcript_en', + 'group/question/translation_es', ] diff --git a/kobo/apps/subsequences/tests/test_submission_stream.py b/kobo/apps/subsequences/tests/test_submission_stream.py index a8e19b19a2..eaafb11984 100644 --- a/kobo/apps/subsequences/tests/test_submission_stream.py +++ b/kobo/apps/subsequences/tests/test_submission_stream.py @@ -26,7 +26,6 @@ def _create_asset(self): 'type': 'text', '$kuid': 'rc9ak31', 'label': ["What's your name?"], - '$qpath': 'What_s_your_name', '$xpath': 'What_s_your_name', 'required': False, '$autoname': 'What_s_your_name', @@ -35,7 +34,6 @@ def _create_asset(self): 'type': 'audio', '$kuid': 'ff6ek09', 'label': ['Tell me a story!'], - '$qpath': 'Tell_me_a_story', '$xpath': 'Tell_me_a_story', 'required': False, '$autoname': 'Tell_me_a_story', @@ -49,14 +47,14 @@ def _create_asset(self): { 'type': 'qual_integer', 'uuid': '1a2c8eb0-e2ec-4b3c-942a-c1a5410c081a', - 'qpath': 'Tell_me_a_story', + 'xpath': 'Tell_me_a_story', 'scope': 'by_question#survey', 'labels': {'_default': 'When was this recorded?'}, }, { 'type': 'qual_select_one', 'uuid': '1a8b748b-f470-4c40-bc09-ce2b1197f503', - 'qpath': 'Tell_me_a_story', + 'xpath': 'Tell_me_a_story', 'scope': 'by_question#survey', 'labels': { '_default': "What's the source of this story?" diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index d9ce7bceac..2f633da542 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -43,7 +43,6 @@ def advanced_feature_instances(content, actions): - action_instances = [] for action_id, action_params in actions.items(): action_kls = ACTIONS_BY_ID[action_id] if action_params is True: @@ -74,7 +73,7 @@ def populate_paths(_content): logging.error('missing row name', extra={'path': group_stack}) # /HOTFIX 2022-12-06 - row['qpath'] = '-'.join([*group_stack, rowname]) + row['xpath'] = '/'.join([*group_stack, rowname]) return content @@ -160,9 +159,9 @@ def stream_with_extras(submission_stream, asset): else: uuid = submission['_uuid'] - all_supplemental_details = deepcopy(extras.get(uuid, {})) - for qpath, supplemental_details in all_supplemental_details.items(): + # FIXME QPATH + for supplemental_details in all_supplemental_details.values(): try: all_qual_responses = supplemental_details['qual'] except KeyError: diff --git a/kobo/apps/subsequences/utils/deprecation.py b/kobo/apps/subsequences/utils/deprecation.py new file mode 100644 index 0000000000..cd94a2d5cd --- /dev/null +++ b/kobo/apps/subsequences/utils/deprecation.py @@ -0,0 +1,19 @@ + +def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: + """ + We have abandoned `qpath` attribute in favor of `xpath`. + Existing projects may still use it though. + We need to find the equivalent `xpath` + """ + for row in asset.content['survey']: + if '$qpath' in row and '$xpath' in row and row['$qpath'] == qpath: + return row['$xpath'] + + # Could not find it from the survey, let's try to detect it automatically + xpaths = asset.get_attachment_xpaths(deployed=True) + for xpath in xpaths: + dashed_xpath = xpath.replace('/', '-') + if dashed_xpath == qpath: + return xpath + + raise KeyError(f'xpath for {qpath} not found') diff --git a/kobo/apps/subsequences/utils/determine_export_cols_with_values.py b/kobo/apps/subsequences/utils/determine_export_cols_with_values.py index e945e007b3..220e4f12ed 100644 --- a/kobo/apps/subsequences/utils/determine_export_cols_with_values.py +++ b/kobo/apps/subsequences/utils/determine_export_cols_with_values.py @@ -34,32 +34,33 @@ def get_lang_code(key, tvals): def determine_export_cols_indiv(sub_ex_content): - ''' + """ used primarily when a SubmissionExtras object is saved. iterates through content to see which questions have transcripts/translations that need to end up in the export yields strings in this format- - ":transcript:" - ":translation:" - ''' - for qpath in sub_ex_content.keys(): - for key in sub_ex_content[qpath].keys(): - tvals = sub_ex_content[qpath][key] + ":transcript:" + ":translation:" + """ + + for xpath in sub_ex_content.keys(): + for key in sub_ex_content[xpath].keys(): + tvals = sub_ex_content[xpath][key] # if not is_non_null_submext_data(key, tvals): # continue dtype = KEY_TYPE_DICTS.get(key, key) - col_string = f'{qpath}:{dtype}' + col_string = f'{xpath}:{dtype}' for lang_code in get_lang_code(key, tvals): yield f'{col_string}:{lang_code}' def determine_export_cols_with_values(asset_submission_extras_all): - ''' + """ used in management command to rebuild asset.known_cols - ''' + """ col_strings = tuple() for sub_ex in asset_submission_extras_all: for col_string in determine_export_cols_indiv(sub_ex.content): diff --git a/kobo/apps/subsequences/utils/parse_known_cols.py b/kobo/apps/subsequences/utils/parse_known_cols.py index a0afdf19b7..4c4bb5c32b 100644 --- a/kobo/apps/subsequences/utils/parse_known_cols.py +++ b/kobo/apps/subsequences/utils/parse_known_cols.py @@ -1,5 +1,4 @@ -# coding: utf-8 -''' +""" this util parses the string of known_cols saved in the db and builds the structure that formpack expects to see in the asset.analysis_form_json() @@ -10,28 +9,31 @@ - q1:translt:de output is a more descriptive structure. (See test_parse_known_cols) -''' +""" from collections import defaultdict -def extend_col_deets(lang, coltype, label, q_path): +def extend_col_deets(lang: str, coltype: str, label: str, xpath: str) -> dict: # NB: refer to commit d013bfe0f5 when trying to figure out the original # intent here - name = q_path.split('-')[-1] - out = {'label': name} - out['dtpath'] = f'{q_path}/{coltype}_{lang}' - out['type'] = coltype - out['language'] = lang - out['label'] = f'{label} - {coltype}' - out['name'] = f'{q_path}/{coltype}_{lang}' - out['source'] = q_path - out['qpath'] = f'{q_path}-{coltype}-{lang}' - out['settings'] = {'mode': 'manual', 'engine': f'engines/{coltype}_manual'} - out['path'] = [q_path, coltype] + name = xpath.split('/')[-1] + out = { + 'label': name, + 'dtpath': f'{xpath}/{coltype}_{lang}', + 'type': coltype, + 'language': lang, + 'label': f'{label} - {coltype}', + 'name': f'{xpath}/{coltype}_{lang}', + 'source': xpath, + # FIXME QPATH + 'xpath': f'{xpath}/{coltype}/{lang}', + 'settings': {'mode': 'manual', 'engine': f'engines/{coltype}_manual'}, + 'path': [xpath, coltype], + } return out -def parse_field_cols(qpath, fieldcols): +def parse_field_cols(xpath, fieldcols): fcx = {'tsc': [], 'tsl': []} for fc in fieldcols: if 'tx' not in fc: @@ -52,30 +54,39 @@ def parse_field_cols(qpath, fieldcols): if len(langs['tsc']) > 0: for lang in langs['tsc']: - out.append(extend_col_deets(lang=lang, label=qpath.split('-')[-1], q_path=qpath, - coltype='transcript', - )) + out.append( + extend_col_deets( + lang=lang, + label=xpath.split('/')[-1], + xpath=xpath, + coltype='transcript', + ) + ) if len(langs['tsl']) > 0: for lang in langs['tsl']: - out.append(extend_col_deets(lang=lang, label=qpath.split('-')[-1], q_path=qpath, - coltype='translation', - )) + out.append( + extend_col_deets( + lang=lang, + label=xpath.split('/')[-1], + xpath=xpath, + coltype='translation', + ) + ) return out -def parse_known_cols(knownc): - by_qpath = defaultdict(list) - out = [] - if isinstance(knownc, dict): - knownc = knownc.get('known') - for fieldstr in knownc: +def parse_known_cols(known_columns): + by_xpath = defaultdict(list) + if isinstance(known_columns, dict): + known_columns = known_columns.get('known') + for fieldstr in known_columns: sects = fieldstr.split(':') - [qpath, field, *rest] = sects + [xpath, field, *_] = sects item = {'field': field} if len(sects) == 3: item['tx'] = sects[2] - by_qpath[qpath].append(item) - by_qpath_list = [] - for qpath, cols in by_qpath.items(): - by_qpath_list = [*by_qpath_list, *parse_field_cols(qpath, cols)] - return by_qpath_list + by_xpath[xpath].append(item) + by_xpath_list = [] + for xpath, cols in by_xpath.items(): + by_xpath_list = [*by_xpath_list, *parse_field_cols(xpath, cols)] + return by_xpath_list diff --git a/kpi/mixins/formpack_xlsform_utils.py b/kpi/mixins/formpack_xlsform_utils.py index c84ef7f4b6..3a9d3e5b60 100644 --- a/kpi/mixins/formpack_xlsform_utils.py +++ b/kpi/mixins/formpack_xlsform_utils.py @@ -68,7 +68,7 @@ def _autoname(self, content): autoname_fields_in_place(content, '$autoname') autovalue_choices_in_place(content, '$autovalue') - def _insert_qpath(self, content): + def _insert_xpath(self, content): insert_full_paths_in_place(content) def _populate_fields_with_autofields(self, content): diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 5cdee0d240..8f542e884e 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -32,6 +32,7 @@ advanced_feature_instances, advanced_submission_jsonschema, ) +from kobo.apps.subsequences.utils.deprecation import qpath_to_xpath from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols from kpi.constants import ( ASSET_TYPES, @@ -83,7 +84,6 @@ from kpi.utils.object_permission import get_cached_code_names from kpi.utils.sluggify import sluggify_label - class AssetDeploymentStatus(models.TextChoices): ARCHIVED = 'archived', 'Archived' @@ -448,7 +448,7 @@ def adjust_content_on_save(self): self._strip_empty_rows(self.content) self._assign_kuids(self.content) self._autoname(self.content) - self._insert_qpath(self.content) + self._insert_xpath(self.content) self._unlink_list_items(self.content) self._remove_empty_expressions(self.content) self._remove_version(self.content) @@ -492,21 +492,25 @@ def analysis_form_json(self, omit_question_types=None): # # See also injectSupplementalRowsIntoListOfRows() in # assetUtils.ts - qpath = qual_question['qpath'] + try: + xpath = qual_question['xpath'] + except KeyError: + xpath = qpath_to_xpath(qual_question['qpath'], self) + field = dict( label=qual_question['labels']['_default'], - name=f"{qpath}/{qual_question['uuid']}", - dtpath=f"{qpath}/{qual_question['uuid']}", + name=f"{xpath}/{qual_question['uuid']}", + dtpath=f"{xpath}/{qual_question['uuid']}", type=qual_question['type'], # could say '_default' or the language of the transcript, # but really that would be meaningless and misleading language='??', - source=qpath, - qpath=f"{qpath}-{qual_question['uuid']}", + source=xpath, + xpath=f"{xpath}/{qual_question['uuid']}", # seems not applicable given the transx questions describe # manual vs. auto here and which engine was used settings='??', - path=[qpath, qual_question['uuid']], + path=[xpath, qual_question['uuid']], ) if field['type'] in omit_question_types: continue @@ -515,6 +519,7 @@ def analysis_form_json(self, omit_question_types=None): except KeyError: pass additional_fields.append(field) + return output def clone(self, version_uid=None): @@ -618,7 +623,7 @@ def _get_xpaths(survey_: dict) -> Optional[list]: if xpaths := _get_xpaths(survey): return xpaths - self._insert_qpath(content) + self._insert_xpath(content) return _get_xpaths(survey) def get_filters_for_partial_perm( @@ -1119,16 +1124,19 @@ def update_languages(self, children=None): if children: languages = set(obj_languages) - children_languages = [child.summary.get('languages') - for child in children - if child.summary.get('languages')] + children_languages = [ + child.summary.get('languages') + for child in children + if child.summary.get('languages') + ] else: - children_languages = list(self.children - .values_list('summary__languages', - flat=True) - .exclude(Q(summary__languages=[]) | - Q(summary__languages=[None])) - .order_by()) + children_languages = list( + self.children.values_list('summary__languages', flat=True) + .exclude( + Q(summary__languages=[]) | Q(summary__languages=[None]) + ) + .order_by() + ) if children_languages: # Flatten `children_languages` to 1-dimension list. @@ -1148,6 +1156,7 @@ def update_languages(self, children=None): def validate_advanced_features(self): if self.advanced_features is None: self.advanced_features = {} + jsonschema_validate( instance=self.advanced_features, schema=ADVANCED_FEATURES_PARAMS_SCHEMA, @@ -1182,12 +1191,26 @@ def version_number_and_date(self) -> str: return f'{count} {self.date_modified:(%Y-%m-%d %H:%M:%S)}' def _get_additional_fields(self): + + # TODO delete the loop when every asset is repopulated with `xpath` + # instead of `qpath`. + for idx, known_column in enumerate(self.known_cols): + xpath, *rest = known_column.split(':') + # Old `qpath` should not contain "/", but could contain "-". + # If the question does not belong to a group but does contain "-", + # it will enter this condition - which is not a problem except extra + # CPU usage for nothing. + if '-' in xpath and '/' not in xpath: + xpath = qpath_to_xpath(xpath, self) + rest.insert(0, xpath) + self.known_cols[idx] = ':'.join(rest) + return parse_known_cols(self.known_cols) def _get_engines(self): - ''' + """ engines are individual NLP services that can be used - ''' + """ for instance in self.get_advanced_feature_instances(): if hasattr(instance, 'engines'): for key, val in instance.engines(): diff --git a/kpi/tests/test_asset_content.py b/kpi/tests/test_asset_content.py index d2e9f59ef9..5f8bedb7cc 100644 --- a/kpi/tests/test_asset_content.py +++ b/kpi/tests/test_asset_content.py @@ -845,7 +845,7 @@ def test_kuid_persists(): assert content['survey'][1].get('$kuid') == initial_kuid_2 -def test_populates_qpath_xpath_correctly(): +def test_populates_xpath_correctly(): asset = Asset(content={ 'survey': [ {'type': 'begin_group', 'name': 'g1'}, @@ -858,12 +858,11 @@ def test_populates_qpath_xpath_correctly(): }) asset.adjust_content_on_save() rs = asset.content['survey'][0:4] - assert [rr['$qpath'] for rr in rs] == ['g1', 'g1-r1', 'g1-g2', 'g1-g2-r2'] assert [rr['$xpath'] for rr in rs] == ['g1', 'g1/r1', 'g1/g2', 'g1/g2/r2'] @pytest.mark.django_db() -def test_return_xpaths_and_qpath_even_if_missing(): +def test_return_xpaths_even_if_missing(): user = baker.make( settings.AUTH_USER_MODEL, username='johndoe' ) @@ -879,8 +878,8 @@ def test_return_xpaths_and_qpath_even_if_missing(): }) expected = ['g1/r1', 'g1/g2/r2'] - # 'qpath' and 'xpath' are not injected until an Asset object is saved with `adjust_content=True` - # or `adjust_content_on_save()` is called directly. + # 'xpath' is not injected until an Asset object is saved with + # `adjust_content=True` or `adjust_content_on_save()` is called directly. # No matter what, `get_attachment_xpaths()` should be able to return # attachment xpaths. assert asset.get_attachment_xpaths(deployed=False) == expected diff --git a/kpi/tests/test_asset_versions.py b/kpi/tests/test_asset_versions.py index a1ddb8d26a..1e0a524c01 100644 --- a/kpi/tests/test_asset_versions.py +++ b/kpi/tests/test_asset_versions.py @@ -30,12 +30,12 @@ def test_init_asset_version(self): } new_asset = Asset.objects.create(asset_type='survey', content=_content) _vc = deepcopy(new_asset.latest_version.version_content) - pop_atts = ['$kuid', + pop_atts = [ + '$kuid', '$autoname', '$prev', - '$qpath', '$xpath', - ] + ] for row in _vc['survey']: for att in pop_atts: row.pop(att, None) diff --git a/kpi/utils/absolute_paths.py b/kpi/utils/absolute_paths.py index 62d37fc6d9..79053e8da6 100644 --- a/kpi/utils/absolute_paths.py +++ b/kpi/utils/absolute_paths.py @@ -7,12 +7,6 @@ ENDERS = ENDERS + (f'end_{hierarchy_keyword}',) -def concat_paths(name, parent_names): - return DELIMITER.join( - [*parent_names, name or ''] - ) - - def concat_xpath(name, parent_names): return '/'.join( [*parent_names, name or ''] @@ -39,7 +33,6 @@ def insert_full_paths_in_place(content): else: rowname = get_name(row) if rowname is not None: - row['$qpath'] = concat_paths(rowname, hierarchy) row['$xpath'] = concat_xpath(rowname, hierarchy) if row.get('type') in BEGINNERS: hierarchy.append(rowname) diff --git a/kpi/views/environment.py b/kpi/views/environment.py index 310349c143..0ca636ab1a 100644 --- a/kpi/views/environment.py +++ b/kpi/views/environment.py @@ -23,7 +23,7 @@ from kpi.utils.object_permission import get_database_user -def _check_asr_mt_access_for_user(user): +def check_asr_mt_access_for_user(user): # This is for proof-of-concept testing and will be replaced with proper # quotas and accounting if user.is_anonymous: @@ -165,7 +165,7 @@ def process_other_configs(request): ) ) - data['asr_mt_features_enabled'] = _check_asr_mt_access_for_user( + data['asr_mt_features_enabled'] = check_asr_mt_access_for_user( request.user ) data['submission_placeholder'] = SUBMISSION_PLACEHOLDER From ed6401cc9d2e562f5de0fc906926081fb81cdbd7 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 29 Aug 2024 18:42:05 -0400 Subject: [PATCH 053/119] Remove QPath from front-end code --- jsapp/js/assetUtils.ts | 17 ++-- .../analysis/analysisContent.component.tsx | 2 +- .../analysis/analysisHeader.component.tsx | 2 +- .../analysis/analysisQuestions.actions.ts | 4 +- .../analysis/analysisQuestions.reducer.ts | 4 +- .../analysis/analysisTab.component.tsx | 2 +- .../processing/analysis/constants.ts | 6 +- .../list/analysisQuestionsList.component.tsx | 2 +- .../integerResponseForm.component.tsx | 2 +- .../selectMultipleResponseForm.component.tsx | 2 +- .../selectOneResponseForm.component.tsx | 2 +- .../tagsResponseForm.component.tsx | 2 +- .../textResponseForm.component.tsx | 2 +- .../components/processing/analysis/utils.ts | 18 ++--- .../processing/processingActions.ts | 70 ++++++++-------- .../processing/routes.utils.tests.ts | 14 ++-- .../js/components/processing/routes.utils.ts | 39 ++++++--- .../processing/singleProcessingContent.tsx | 1 - .../processing/singleProcessingHeader.tsx | 26 +++--- .../processing/singleProcessingRoute.tsx | 6 +- .../processing/singleProcessingStore.ts | 81 ++++++++++--------- .../transcript/transcriptTab.component.tsx | 8 +- jsapp/js/components/submissions/audioCell.tsx | 4 +- jsapp/js/components/submissions/mediaCell.tsx | 2 +- .../submissions/submissionDataTable.tsx | 2 +- .../submissions/submissionUtils.mocks.es6 | 12 +-- .../components/submissions/submissionUtils.ts | 4 +- jsapp/js/components/submissions/table.tsx | 4 +- jsapp/js/dataInterface.ts | 3 +- jsapp/js/router/permProtectedRoute.tsx | 25 +++++- jsapp/js/router/routerConstants.ts | 4 +- 31 files changed, 207 insertions(+), 165 deletions(-) diff --git a/jsapp/js/assetUtils.ts b/jsapp/js/assetUtils.ts index 18da8ebf7c..6f025576cd 100644 --- a/jsapp/js/assetUtils.ts +++ b/jsapp/js/assetUtils.ts @@ -451,8 +451,8 @@ export function findRow(assetContent: AssetContent, rowName: string) { return assetContent?.survey?.find((row) => getRowName(row) === rowName); } -export function findRowByQpath(assetContent: AssetContent, qpath: string) { - return assetContent?.survey?.find((row) => row.$qpath === qpath); +export function findRowByXpath(assetContent: AssetContent, xpath: string) { + return assetContent?.survey?.find((row) => row.$xpath === xpath); } export function getRowType(assetContent: AssetContent, rowName: string) { @@ -460,8 +460,8 @@ export function getRowType(assetContent: AssetContent, rowName: string) { return foundRow?.type; } -export function getRowNameByQpath(assetContent: AssetContent, qpath: string) { - const foundRow = findRowByQpath(assetContent, qpath); +export function getRowNameByXpath(assetContent: AssetContent, xpath: string) { + const foundRow = findRowByXpath(assetContent, xpath); if (foundRow) { return getRowName(foundRow); } @@ -552,9 +552,8 @@ export function injectSupplementalRowsIntoListOfRows( // Step 4: Inject all the extra columns immediately after source question const outputWithCols: string[] = []; output.forEach((col: string) => { - const qpath = col.replace(/\//g, '-'); outputWithCols.push(col); - (extraColsBySource[qpath] || []).forEach((extraCol) => { + (extraColsBySource[col] || []).forEach((extraCol) => { outputWithCols.push(`_supplementalDetails/${extraCol.dtpath}`); }); }); @@ -702,7 +701,7 @@ export function getAssetSubmissionProcessingUrl( return undefined; } -/** Returns a list of all rows (their `qpath`s) activated for advanced features. */ +/** Returns a list of all rows (their `xpath`s) activated for advanced features. */ export function getAssetProcessingRows(assetUid: string) { const foundAsset = assetStore.getAsset(assetUid); if (foundAsset?.advanced_submission_schema?.properties) { @@ -723,9 +722,9 @@ export function getAssetProcessingRows(assetUid: string) { return undefined; } -export function isRowProcessingEnabled(assetUid: string, qpath: string) { +export function isRowProcessingEnabled(assetUid: string, xpath: string) { const processingRows = getAssetProcessingRows(assetUid); - return Array.isArray(processingRows) && processingRows.includes(qpath); + return Array.isArray(processingRows) && processingRows.includes(xpath); } export function isAssetProcessingActivated(assetUid: string) { diff --git a/jsapp/js/components/processing/analysis/analysisContent.component.tsx b/jsapp/js/components/processing/analysis/analysisContent.component.tsx index 581f80cbcb..255f38cd16 100644 --- a/jsapp/js/components/processing/analysis/analysisContent.component.tsx +++ b/jsapp/js/components/processing/analysis/analysisContent.component.tsx @@ -14,7 +14,7 @@ export default function AnalysisContent() { // We only want to display analysis questions for this survey question const filteredQuestions = analysisQuestions.state.questions.filter( - (question) => question.qpath === singleProcessingStore.currentQuestionQpath + (question) => question.xpath === singleProcessingStore.currentQuestionXpath ); return ( diff --git a/jsapp/js/components/processing/analysis/analysisHeader.component.tsx b/jsapp/js/components/processing/analysis/analysisHeader.component.tsx index d5848ad68c..274f3b30c4 100644 --- a/jsapp/js/components/processing/analysis/analysisHeader.component.tsx +++ b/jsapp/js/components/processing/analysis/analysisHeader.component.tsx @@ -46,7 +46,7 @@ export default function AnalysisHeader() { analysisQuestions?.dispatch({ type: 'addQuestion', payload: { - qpath: singleProcessingStore.currentQuestionQpath, + xpath: singleProcessingStore.currentQuestionXpath, type: definition.type, }, }); diff --git a/jsapp/js/components/processing/analysis/analysisQuestions.actions.ts b/jsapp/js/components/processing/analysis/analysisQuestions.actions.ts index e2b4e8bf7d..108b07fe01 100644 --- a/jsapp/js/components/processing/analysis/analysisQuestions.actions.ts +++ b/jsapp/js/components/processing/analysis/analysisQuestions.actions.ts @@ -8,7 +8,7 @@ export type AnalysisQuestionsAction = // Sets all the quetsion with new ones (useful for initialising) | {type: 'setQuestions'; payload: {questions: AnalysisQuestionInternal[]}} // Creates a draft question of given type with new uid assigned - | {type: 'addQuestion'; payload: {qpath: string; type: AnalysisQuestionType}} + | {type: 'addQuestion'; payload: {xpath: string; type: AnalysisQuestionType}} // Opens question for editing, i.e. causes the editor to be opened for given // question | {type: 'startEditingQuestion'; payload: {uuid: string}} @@ -43,7 +43,7 @@ export type AnalysisQuestionsAction = | { type: 'updateResponseCompleted'; payload: { - qpath: string; + xpath: string; apiResponse: SubmissionProcessingDataResponse; }; } diff --git a/jsapp/js/components/processing/analysis/analysisQuestions.reducer.ts b/jsapp/js/components/processing/analysis/analysisQuestions.reducer.ts index f7571cc3eb..f6703ef408 100644 --- a/jsapp/js/components/processing/analysis/analysisQuestions.reducer.ts +++ b/jsapp/js/components/processing/analysis/analysisQuestions.reducer.ts @@ -73,7 +73,7 @@ export const analysisQuestionsReducer: AnalysisQuestionReducerType = ( } const newQuestion: AnalysisQuestionInternal = { - qpath: action.payload.qpath, + xpath: action.payload.xpath, type: action.payload.type, labels: {_default: ''}, uuid: newUuid, @@ -186,7 +186,7 @@ export const analysisQuestionsReducer: AnalysisQuestionReducerType = ( } case 'updateResponseCompleted': { const newQuestions = applyUpdateResponseToInternalQuestions( - action.payload.qpath, + action.payload.xpath, action.payload.apiResponse, state.questions ); diff --git a/jsapp/js/components/processing/analysis/analysisTab.component.tsx b/jsapp/js/components/processing/analysis/analysisTab.component.tsx index da1f3e0f79..2db65e0c82 100644 --- a/jsapp/js/components/processing/analysis/analysisTab.component.tsx +++ b/jsapp/js/components/processing/analysis/analysisTab.component.tsx @@ -70,7 +70,7 @@ export default function AnalysisTab() { await fetchGetUrl(processingUrl); questions = applyUpdateResponseToInternalQuestions( - singleProcessingStore.currentQuestionQpath, + singleProcessingStore.currentQuestionXpath, apiResponse, questions ); diff --git a/jsapp/js/components/processing/analysis/constants.ts b/jsapp/js/components/processing/analysis/constants.ts index 86876350d4..5208951fc9 100644 --- a/jsapp/js/components/processing/analysis/constants.ts +++ b/jsapp/js/components/processing/analysis/constants.ts @@ -72,7 +72,7 @@ export interface AnalysisQuestionBase { uuid: string; options?: AnalysisQuestionOptions; /** The survey question that this analysis questions is for. */ - qpath: string; + xpath: string; } /** Analysis question definition from the asset's schema (i.e. from Back end) */ @@ -160,7 +160,7 @@ export interface SubmissionAnalysisResponse extends AnalysisQuestionBase { * This is the payload of a request made to update a question response. */ export interface AnalysisResponseUpdateRequest { - [qpath: string]: + [xpath: string]: | { qual: AnalysisRequest[]; } @@ -173,7 +173,7 @@ export interface AnalysisResponseUpdateRequest { * response. */ export interface SubmissionProcessingDataResponse { - [qpath: string]: { + [xpath: string]: { qual: AnalysisResponse[]; }; } diff --git a/jsapp/js/components/processing/analysis/list/analysisQuestionsList.component.tsx b/jsapp/js/components/processing/analysis/list/analysisQuestionsList.component.tsx index 88a117e83d..9236695474 100644 --- a/jsapp/js/components/processing/analysis/list/analysisQuestionsList.component.tsx +++ b/jsapp/js/components/processing/analysis/list/analysisQuestionsList.component.tsx @@ -35,7 +35,7 @@ export default function AnalysisQuestionsList() { // hide them at this point (not filtering the whole list beforehand), // because we need the indexes to match the whole list. And FYI all // analysis questions live on a single list :) - if (question.qpath !== singleProcessingStore.currentQuestionQpath) { + if (question.xpath !== singleProcessingStore.currentQuestionXpath) { return null; } diff --git a/jsapp/js/components/processing/analysis/responseForms/integerResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/integerResponseForm.component.tsx index 195ecd2215..91e6f23111 100644 --- a/jsapp/js/components/processing/analysis/responseForms/integerResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/integerResponseForm.component.tsx @@ -52,7 +52,7 @@ export default function IntegerResponseForm(props: IntegerResponseFormProps) { updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, response diff --git a/jsapp/js/components/processing/analysis/responseForms/selectMultipleResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/selectMultipleResponseForm.component.tsx index 874d6a4ed0..a6b39b2274 100644 --- a/jsapp/js/components/processing/analysis/responseForms/selectMultipleResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/selectMultipleResponseForm.component.tsx @@ -63,7 +63,7 @@ export default function SelectMultipleResponseForm( // Update endpoint and reducer updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, newResponse diff --git a/jsapp/js/components/processing/analysis/responseForms/selectOneResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/selectOneResponseForm.component.tsx index f7e200ef4e..b814f26ad0 100644 --- a/jsapp/js/components/processing/analysis/responseForms/selectOneResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/selectOneResponseForm.component.tsx @@ -57,7 +57,7 @@ export default function SelectOneResponseForm( // Update endpoint and reducer updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, newResponse diff --git a/jsapp/js/components/processing/analysis/responseForms/tagsResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/tagsResponseForm.component.tsx index 444d08a67b..e25fe1ded4 100644 --- a/jsapp/js/components/processing/analysis/responseForms/tagsResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/tagsResponseForm.component.tsx @@ -55,7 +55,7 @@ export default function TagsResponseForm(props: TagsResponseFormProps) { // Update endpoint and reducer updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, newTags diff --git a/jsapp/js/components/processing/analysis/responseForms/textResponseForm.component.tsx b/jsapp/js/components/processing/analysis/responseForms/textResponseForm.component.tsx index 55336726fb..bd8427d0c5 100644 --- a/jsapp/js/components/processing/analysis/responseForms/textResponseForm.component.tsx +++ b/jsapp/js/components/processing/analysis/responseForms/textResponseForm.component.tsx @@ -52,7 +52,7 @@ export default function TextResponseForm(props: TextResponseFormProps) { updateResponseAndReducer( analysisQuestions.dispatch, - question.qpath, + question.xpath, props.uuid, question.type, response diff --git a/jsapp/js/components/processing/analysis/utils.ts b/jsapp/js/components/processing/analysis/utils.ts index 41e7b537c2..f19812e495 100644 --- a/jsapp/js/components/processing/analysis/utils.ts +++ b/jsapp/js/components/processing/analysis/utils.ts @@ -49,7 +49,7 @@ export function convertQuestionsFromInternalToSchema( options: question.options, choices: question.additionalFields?.choices, scope: 'by_question#survey', - qpath: question.qpath, + xpath: question.xpath, }; }); } @@ -64,7 +64,7 @@ export function convertQuestionsFromSchemaToInternal( ): AnalysisQuestionInternal[] { return questions.map((question) => { const output: AnalysisQuestionInternal = { - qpath: question.qpath, + xpath: question.xpath, uuid: question.uuid, type: question.type, labels: question.labels, @@ -85,12 +85,12 @@ export function convertQuestionsFromSchemaToInternal( * internal questions list using the API endpoint response. */ export function applyUpdateResponseToInternalQuestions( - qpath: string, + xpath: string, updateResp: SubmissionProcessingDataResponse, questions: AnalysisQuestionInternal[] ): AnalysisQuestionInternal[] { const newQuestions = clonedeep(questions); - const analysisResponses = updateResp[qpath]?.qual || []; + const analysisResponses = updateResp[xpath]?.qual || []; newQuestions.forEach((question) => { const foundResponse = analysisResponses.find( (analResp) => question.uuid === analResp.uuid @@ -191,7 +191,7 @@ export async function updateSurveyQuestions( async function updateResponse( processingUrl: string, submissionUid: string, - qpath: string, + xpath: string, analysisQuestionUuid: string, analysisQuestionType: AnalysisQuestionType, newResponse: string | string[] | number | null @@ -199,7 +199,7 @@ async function updateResponse( try { const payload: AnalysisResponseUpdateRequest = { submission: submissionUid, - [qpath]: { + [xpath]: { qual: [ { uuid: analysisQuestionUuid, @@ -219,7 +219,7 @@ async function updateResponse( return { apiResponse: apiResponse, - qpath: qpath, + xpath: xpath, }; } catch (err) { return Promise.reject(err); @@ -243,7 +243,7 @@ async function updateResponse( */ export async function updateResponseAndReducer( dispatch: React.Dispatch, - surveyQuestionQpath: string, + surveyQuestionXpath: string, analysisQuestionUuid: string, analysisQuestionType: AnalysisQuestionType, response: string | string[] @@ -284,7 +284,7 @@ export async function updateResponseAndReducer( const result = await updateResponse( processingUrl, singleProcessingStore.currentSubmissionEditId, - surveyQuestionQpath, + surveyQuestionXpath, analysisQuestionUuid, analysisQuestionType, actualResponse diff --git a/jsapp/js/components/processing/processingActions.ts b/jsapp/js/components/processing/processingActions.ts index b473a3f002..3460c4952e 100644 --- a/jsapp/js/components/processing/processingActions.ts +++ b/jsapp/js/components/processing/processingActions.ts @@ -80,13 +80,13 @@ export interface TransxObject extends TransxRequestObject { /** Object we send to Back end when updating transcript text manually. */ interface TranscriptRequest { - [qpath: string]: string | undefined | {transcript: TransxRequestObject}; + [xpath: string]: string | undefined | {transcript: TransxRequestObject}; submission?: string; } /** Object we send to Back end when requesting an automatic transcription. */ interface AutoTranscriptRequest { - [qpath: string]: + [xpath: string]: | string | undefined | {googlets: AutoTranscriptRequestEngineParams}; @@ -100,7 +100,7 @@ interface AutoTranscriptRequestEngineParams { /** Object we send to Back end when updating translation text manually. */ interface TranslationRequest { - [qpath: string]: + [xpath: string]: | string | undefined | {translation: TranslationsRequestObject}; @@ -112,7 +112,7 @@ interface TranslationsRequestObject { /** Object we send to Back end when requesting an automatic translation. */ interface AutoTranslationRequest { - [qpath: string]: + [xpath: string]: | string | undefined | {googletx: AutoTranslationRequestEngineParams}; @@ -261,7 +261,7 @@ processingActions.getProcessingData.failed.listen(() => { */ function setTranscriptInnerMethod( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -273,7 +273,7 @@ function setTranscriptInnerMethod( const data: TranscriptRequest = { submission: submissionEditId, }; - data[qpath] = { + data[xpath] = { transcript: { value: value, languageCode: languageCode, @@ -304,7 +304,7 @@ function setTranscriptInnerMethod( interface SetTranscriptFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -316,7 +316,7 @@ interface SetTranscriptDefinition extends SetTranscriptFn { failed: ListenableCallback; } processingActions.setTranscript.listen( - (assetUid, qpath, submissionEditId, languageCode, value) => { + (assetUid, xpath, submissionEditId, languageCode, value) => { // This first block of code is about getting currently enabled languages. const currentFeatures = getAssetAdvancedFeatures(assetUid); if (currentFeatures?.transcript === undefined) { @@ -332,7 +332,7 @@ processingActions.setTranscript.listen( ) { setTranscriptInnerMethod( assetUid, - qpath, + xpath, submissionEditId, languageCode, value @@ -362,7 +362,7 @@ processingActions.setTranscript.listen( onComplete: setTranscriptInnerMethod.bind( this, assetUid, - qpath, + xpath, submissionEditId, languageCode, value @@ -386,7 +386,7 @@ processingActions.setTranscript.failed.listen(() => { * `advanced_feature` (i.e. makes it "not enabled"). */ interface DeleteTranscriptFn { - (assetUid: string, qpath: string, submissionEditId: string): void; + (assetUid: string, xpath: string, submissionEditId: string): void; } interface DeleteTranscriptDefinition extends DeleteTranscriptFn { listen: (fn: DeleteTranscriptFn) => void; @@ -394,7 +394,7 @@ interface DeleteTranscriptDefinition extends DeleteTranscriptFn { failed: ListenableCallback; } processingActions.deleteTranscript.listen( - (assetUid, qpath, submissionEditId) => { + (assetUid, xpath, submissionEditId) => { const processingUrl = getAssetProcessingUrl(assetUid); if (processingUrl === undefined) { processingActions.deleteTranscript.failed(NO_FEATURE_ERROR); @@ -402,7 +402,7 @@ processingActions.deleteTranscript.listen( const data: TranscriptRequest = { submission: submissionEditId, }; - data[qpath] = { + data[xpath] = { transcript: { value: DELETE_CHAR, languageCode: '', @@ -438,7 +438,7 @@ processingActions.deleteTranscript.failed.listen(() => { interface RequestAutoTranscriptionFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode?: string, regionCode?: string | null @@ -458,7 +458,7 @@ interface RequestAutoTranscriptionDefinition failed: ListenableCallback; } processingActions.requestAutoTranscription.listen( - (assetUid, qpath, submissionEditId, languageCode, regionCode) => { + (assetUid, xpath, submissionEditId, languageCode, regionCode) => { const processingUrl = getAssetProcessingUrl(assetUid); if (processingUrl === undefined) { processingActions.requestAutoTranscription.failed(NO_FEATURE_ERROR); @@ -475,7 +475,7 @@ processingActions.requestAutoTranscription.listen( if (regionCode) { autoparams.regionCode = regionCode; } - data[qpath] = { + data[xpath] = { googlets: autoparams, }; @@ -487,7 +487,7 @@ processingActions.requestAutoTranscription.listen( data: JSON.stringify(data), }) .done((response: ProcessingDataResponse) => { - const responseStatus = response[qpath]?.googlets?.status; + const responseStatus = response[xpath]?.googlets?.status; if (responseStatus === 'requested' || responseStatus === 'in_progress') { processingActions.requestAutoTranscription.in_progress({ @@ -511,10 +511,10 @@ processingActions.requestAutoTranscription.listen( /** A small utility function for getting easier to use data. */ function pickTranslationsFromProcessingDataResponse( response: ProcessingDataResponse, - qpath: string + xpath: string ): TransxObject[] { const translations: TransxObject[] = []; - Object.values(response[qpath]?.translation).forEach((translation) => { + Object.values(response[xpath]?.translation).forEach((translation) => { translations.push(translation); }); return translations; @@ -522,7 +522,7 @@ function pickTranslationsFromProcessingDataResponse( /** A function that builds translation data object for processing endpoint. */ function getTranslationDataObject( - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -537,7 +537,7 @@ function getTranslationDataObject( const data: TranslationRequest = { submission: submissionEditId, }; - data[qpath] = { + data[xpath] = { translation: translationsObj, }; return data; @@ -549,7 +549,7 @@ function getTranslationDataObject( */ function setTranslationInnerMethod( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -559,7 +559,7 @@ function setTranslationInnerMethod( processingActions.setTranslation.failed(NO_FEATURE_ERROR); } else { const data = getTranslationDataObject( - qpath, + xpath, submissionEditId, languageCode, value @@ -573,7 +573,7 @@ function setTranslationInnerMethod( }) .done((response: ProcessingDataResponse) => { processingActions.setTranslation.completed( - pickTranslationsFromProcessingDataResponse(response, qpath) + pickTranslationsFromProcessingDataResponse(response, xpath) ); }) .fail(processingActions.setTranslation.failed); @@ -590,7 +590,7 @@ function setTranslationInnerMethod( interface SetTranslationFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode, value: string @@ -602,7 +602,7 @@ interface SetTranslationDefinition extends SetTranslationFn { failed: ListenableCallback; } processingActions.setTranslation.listen( - (assetUid, qpath, submissionEditId, languageCode, value) => { + (assetUid, xpath, submissionEditId, languageCode, value) => { // This first block of code is about getting currently enabled languages. const currentFeatures = getAssetAdvancedFeatures(assetUid); if (currentFeatures?.translation === undefined) { @@ -618,7 +618,7 @@ processingActions.setTranslation.listen( ) { setTranslationInnerMethod( assetUid, - qpath, + xpath, submissionEditId, languageCode, value @@ -648,7 +648,7 @@ processingActions.setTranslation.listen( onComplete: setTranslationInnerMethod.bind( this, assetUid, - qpath, + xpath, submissionEditId, languageCode, value @@ -674,7 +674,7 @@ processingActions.setTranslation.failed.listen(() => { interface DeleteTranslationFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: LanguageCode ): void; @@ -685,13 +685,13 @@ interface DeleteTranslationDefinition extends DeleteTranslationFn { failed: ListenableCallback; } processingActions.deleteTranslation.listen( - (assetUid, qpath, submissionEditId, languageCode) => { + (assetUid, xpath, submissionEditId, languageCode) => { const processingUrl = getAssetProcessingUrl(assetUid); if (processingUrl === undefined) { processingActions.deleteTranslation.failed(NO_FEATURE_ERROR); } else { const data = getTranslationDataObject( - qpath, + xpath, submissionEditId, languageCode, DELETE_CHAR @@ -722,7 +722,7 @@ processingActions.deleteTranslation.failed.listen(() => { interface RequestAutoTranslationFn { ( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, languageCode: string ): void; @@ -740,7 +740,7 @@ interface RequestAutoTranslationDefinition extends RequestAutoTranslationFn { failed: ListenableCallback; } processingActions.requestAutoTranslation.listen( - (assetUid, qpath, submissionEditId, languageCode) => { + (assetUid, xpath, submissionEditId, languageCode) => { const processingUrl = getAssetProcessingUrl(assetUid); if (processingUrl === undefined) { processingActions.requestAutoTranslation.failed(NO_FEATURE_ERROR); @@ -748,7 +748,7 @@ processingActions.requestAutoTranslation.listen( const data: AutoTranslationRequest = { submission: submissionEditId, }; - data[qpath] = { + data[xpath] = { googletx: { status: 'requested', languageCode: languageCode, @@ -763,7 +763,7 @@ processingActions.requestAutoTranslation.listen( data: JSON.stringify(data), }) .done((response: ProcessingDataResponse) => { - const responseStatus = response[qpath]?.googletx?.status; + const responseStatus = response[xpath]?.googletx?.status; if (responseStatus === 'requested' || responseStatus === 'in_progress') { processingActions.requestAutoTranslation.in_progress({ diff --git a/jsapp/js/components/processing/routes.utils.tests.ts b/jsapp/js/components/processing/routes.utils.tests.ts index 7af424fcf6..4a3468c94d 100644 --- a/jsapp/js/components/processing/routes.utils.tests.ts +++ b/jsapp/js/components/processing/routes.utils.tests.ts @@ -12,7 +12,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: 'abc123', - qpath: 'My_que', + xpath: 'My_que', submissionEditId: 'def-45gh-jklm', tabName: ProcessingTab.Analysis, }); @@ -23,7 +23,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: 'abc123', - qpath: 'My_que', + xpath: 'My_que', submissionEditId: 'def-45gh-jklm', tabName: ProcessingTab.Transcript, }); @@ -34,7 +34,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: 'abc123', - qpath: 'My_que', + xpath: 'My_que', submissionEditId: 'def-45gh-jklm', }); }); @@ -44,7 +44,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: 'abc123', - qpath: 'My_que', + xpath: 'My_que', submissionEditId: 'def-45gh-jklm', }); }); @@ -54,7 +54,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: '', - qpath: '', + xpath: '', submissionEditId: '', }); }); @@ -64,7 +64,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: '', - qpath: '', + xpath: '', submissionEditId: '', }); }); @@ -74,7 +74,7 @@ describe('processing routes.utils tests', () => { const test = getProcessingRouteParts(path); chai.expect(test).to.deep.equal({ assetUid: '', - qpath: '', + xpath: '', submissionEditId: '', }); }); diff --git a/jsapp/js/components/processing/routes.utils.ts b/jsapp/js/components/processing/routes.utils.ts index 7f9c7918ff..45ba23cd80 100644 --- a/jsapp/js/components/processing/routes.utils.ts +++ b/jsapp/js/components/processing/routes.utils.ts @@ -1,7 +1,7 @@ // This is a collection of various utility functions related to processing // routes and navigation. -import {generatePath, matchPath} from 'react-router-dom'; +import {generatePath, matchPath, useNavigate} from 'react-router-dom'; import {router} from 'js/router/legacy'; import {ROUTES, PROCESSING_ROUTES, PROCESSING_ROUTE_GENERIC} from 'js/router/routerConstants'; import {getCurrentPath} from 'js/router/routerUtils'; @@ -24,7 +24,7 @@ const TabToRouteMap: Map = new Map([ interface ProcessingRouteParts { assetUid: string; - qpath: string; + xpath: string; submissionEditId: string; tabName?: ProcessingTab; } @@ -35,7 +35,7 @@ interface ProcessingRouteParts { export function getProcessingRouteParts(path: string): ProcessingRouteParts { const output: ProcessingRouteParts = { assetUid: '', - qpath: '', + xpath: '', submissionEditId: '', }; @@ -57,7 +57,7 @@ export function getProcessingRouteParts(path: string): ProcessingRouteParts { // Step 4. Assign all the found values to output output.assetUid = matchProfile.params.uid as string; - output.qpath = matchProfile.params.qpath as string; + output.xpath = decodeURLParamWithSlash(matchProfile.params.xpath || '') as string; output.submissionEditId = matchProfile.params.submissionEditId as string; if ( 'tabName' in matchProfile.params && @@ -65,10 +65,31 @@ export function getProcessingRouteParts(path: string): ProcessingRouteParts { ) { output.tabName = matchProfile.params.tabName as ProcessingTab; } - return output; }; +/** + * Restore previously encoded value with encodeURLParamWithSlash to its + * original value + * + * @param value + */ +export function decodeURLParamWithSlash(value: string) { + return value.replace('|', '/'); +} + +/** + * Replace slashes ("/") with pipe ("|") + * + * React router seems to decode `%2F` automatically, thus we cannot use + * `encodeComponentURI()` to pass params with encoded slashes (i.e.: %2F) + * to `router.navigate()` without navigating to url with decoded slashes ("/") + * @param value + */ +export function encodeURLParamWithSlash(value: string) { + return encodeURIComponent(value.replace(/\//g, '|')); +} + export function getCurrentProcessingRouteParts(): ProcessingRouteParts { return getProcessingRouteParts(getCurrentPath()); } @@ -82,7 +103,7 @@ function applyCurrentRouteParams(targetRoute: string) { return generatePath(targetRoute, { uid: routeParams.assetUid, - qpath: routeParams.qpath, + xpath: encodeURLParamWithSlash(routeParams.xpath), submissionEditId: routeParams.submissionEditId, }); } @@ -100,7 +121,7 @@ export function isAnyProcessingRoute(path?: string): boolean { return Boolean( processingRouteParts.assetUid && processingRouteParts.submissionEditId && - processingRouteParts.qpath + processingRouteParts.xpath ); } @@ -155,7 +176,7 @@ export function goToTabRoute(targetTabRoute: string) { */ export function goToProcessing( assetUid: string, - qpath: string, + xpath: string, submissionEditId: string, remainOnSameTab?: boolean ) { @@ -173,7 +194,7 @@ export function goToProcessing( const path = generatePath(targetRoute, { uid: assetUid, - qpath, + xpath: encodeURLParamWithSlash(xpath), submissionEditId, }); router!.navigate(path); diff --git a/jsapp/js/components/processing/singleProcessingContent.tsx b/jsapp/js/components/processing/singleProcessingContent.tsx index 855e0135c5..4788e0c973 100644 --- a/jsapp/js/components/processing/singleProcessingContent.tsx +++ b/jsapp/js/components/processing/singleProcessingContent.tsx @@ -58,7 +58,6 @@ export default class SingleProcessingContent extends React.Component<{}> { if (isProcessingRouteActive(PROCESSING_ROUTES.ANALYSIS)) { return ; } - return null; } diff --git a/jsapp/js/components/processing/singleProcessingHeader.tsx b/jsapp/js/components/processing/singleProcessingHeader.tsx index 80dbbc0962..09b18c0776 100644 --- a/jsapp/js/components/processing/singleProcessingHeader.tsx +++ b/jsapp/js/components/processing/singleProcessingHeader.tsx @@ -2,7 +2,7 @@ import React from 'react'; import {QUESTION_TYPES, META_QUESTION_TYPES} from 'js/constants'; import type {AssetContent, AssetResponse} from 'js/dataInterface'; import { - findRowByQpath, + findRowByXpath, getRowTypeIcon, getTranslatedRowLabel, getRowName, @@ -62,9 +62,9 @@ class SingleProcessingHeader extends React.Component< this.forceUpdate(); } - onQuestionSelectChange(newQpath: string | null) { - if (newQpath !== null) { - this.goToSubmission(newQpath, this.props.submissionEditId); + onQuestionSelectChange(newXpath: string | null) { + if (newXpath !== null) { + this.goToSubmission(newXpath, this.props.submissionEditId); } } @@ -98,13 +98,13 @@ class SingleProcessingHeader extends React.Component< } if (editIds) { - Object.keys(editIds).forEach((qpath) => { - const questionData = findRowByQpath(assetContent, qpath); + Object.keys(editIds).forEach((xpath) => { + const questionData = findRowByXpath(assetContent, xpath); // At this point we want to find out whether the question has at least // one editId (i.e. there is at least one transcriptable response to // the question). Otherwise there's no point in having the question as // selectable option. - const questionEditIds = editIds[qpath]; + const questionEditIds = editIds[xpath]; const hasAtLeastOneEditId = Boolean( questionEditIds.find((editIdOrNull) => editIdOrNull !== null) ); @@ -122,7 +122,7 @@ class SingleProcessingHeader extends React.Component< languageIndex ); options.push({ - value: qpath, + value: xpath, label: translatedLabel !== null ? translatedLabel : rowName, icon: getRowTypeIcon(questionData.type), }); @@ -177,15 +177,15 @@ class SingleProcessingHeader extends React.Component< } /** Goes to another submission. */ - goToSubmission(qpath: string, targetSubmissionEditId: string) { - goToProcessing(this.props.assetUid, qpath, targetSubmissionEditId, true); + goToSubmission(xpath: string, targetSubmissionEditId: string) { + goToProcessing(this.props.assetUid, xpath, targetSubmissionEditId, true); } goPrev() { const prevEditId = this.getPrevSubmissionEditId(); if (prevEditId !== null) { this.goToSubmission( - singleProcessingStore.currentQuestionQpath, + singleProcessingStore.currentQuestionXpath, prevEditId ); } @@ -195,7 +195,7 @@ class SingleProcessingHeader extends React.Component< const nextEditId = this.getNextSubmissionEditId(); if (nextEditId !== null) { this.goToSubmission( - singleProcessingStore.currentQuestionQpath, + singleProcessingStore.currentQuestionXpath, nextEditId ); } @@ -303,7 +303,7 @@ class SingleProcessingHeader extends React.Component< type='gray' size='l' options={this.getQuestionSelectorOptions()} - selectedOption={singleProcessingStore.currentQuestionQpath} + selectedOption={singleProcessingStore.currentQuestionXpath} onChange={this.onQuestionSelectChange.bind(this)} /> diff --git a/jsapp/js/components/processing/singleProcessingRoute.tsx b/jsapp/js/components/processing/singleProcessingRoute.tsx index e421b2466b..53c8a56230 100644 --- a/jsapp/js/components/processing/singleProcessingRoute.tsx +++ b/jsapp/js/components/processing/singleProcessingRoute.tsx @@ -18,7 +18,7 @@ const NO_DATA_MESSAGE = t('There is no data for this question for the current su interface SingleProcessingRouteProps extends WithRouterProps { uid: string; - qpath: string; + xpath: string; submissionEditId: string; } @@ -76,10 +76,10 @@ export default class SingleProcessingRoute extends React.Component< /** Is processing enabled for current question. */ isProcessingEnabled() { - if (this.props.params.uid && this.props.params.qpath) { + if (this.props.params.uid && this.props.params.xpath) { return isRowProcessingEnabled( this.props.params.uid, - this.props.params.qpath + this.props.params.xpath ); } return false; diff --git a/jsapp/js/components/processing/singleProcessingStore.ts b/jsapp/js/components/processing/singleProcessingStore.ts index ed8b8de509..5780d01590 100644 --- a/jsapp/js/components/processing/singleProcessingStore.ts +++ b/jsapp/js/components/processing/singleProcessingStore.ts @@ -8,9 +8,9 @@ import { getAssetProcessingRows, isAssetProcessingActivated, getAssetAdvancedFeatures, - findRowByQpath, + findRowByXpath, getRowName, - getRowNameByQpath, + getRowNameByXpath, getFlatQuestionsList, getLanguageIndex, } from 'js/assetUtils'; @@ -99,7 +99,7 @@ interface TransxDraft { * ``` */ interface SubmissionsEditIds { - [qpath: string]: Array<{ + [xpath: string]: Array<{ editId: string; hasResponse: boolean; }>; @@ -186,8 +186,8 @@ class SingleProcessingStore extends Reflux.Store { return getCurrentProcessingRouteParts().assetUid; } - public get currentQuestionQpath() { - return getCurrentProcessingRouteParts().qpath; + public get currentQuestionXpath() { + return getCurrentProcessingRouteParts().xpath; } public get currentSubmissionEditId() { @@ -197,7 +197,7 @@ class SingleProcessingStore extends Reflux.Store { public get currentQuestionName() { const asset = assetStore.getAsset(this.currentAssetUid); if (asset?.content) { - const foundRow = findRowByQpath(asset.content, this.currentQuestionQpath); + const foundRow = findRowByXpath(asset.content, this.currentQuestionXpath); if (foundRow) { return getRowName(foundRow); } @@ -209,9 +209,9 @@ class SingleProcessingStore extends Reflux.Store { public get currentQuestionType(): AnyRowTypeName | undefined { const asset = assetStore.getAsset(this.currentAssetUid); if (asset?.content) { - const foundRow = findRowByQpath( + const foundRow = findRowByXpath( asset?.content, - this.currentQuestionQpath + this.currentQuestionXpath ); return foundRow?.type; } @@ -407,7 +407,7 @@ class SingleProcessingStore extends Reflux.Store { isAnyProcessingRoute(newPath) && previousPathParts && previousPathParts.assetUid === newPathParts.assetUid && - previousPathParts.qpath === newPathParts.qpath && + previousPathParts.xpath === newPathParts.xpath && previousPathParts.submissionEditId === newPathParts.submissionEditId && // This check is needed to avoid going into this in case when route // redirects from no tab (e.g. `/`) into default tab (e.g. `/transcript`). @@ -427,7 +427,7 @@ class SingleProcessingStore extends Reflux.Store { isAnyProcessingRoute(newPath) && previousPathParts && previousPathParts.assetUid === newPathParts.assetUid && - (previousPathParts.qpath !== newPathParts.qpath || + (previousPathParts.xpath !== newPathParts.xpath || previousPathParts.submissionEditId !== newPathParts.submissionEditId) ) { this.fetchProcessingData(); @@ -479,8 +479,8 @@ class SingleProcessingStore extends Reflux.Store { const asset = assetStore.getAsset(this.currentAssetUid); let flatPaths: SurveyFlatPaths = {}; - // We need to get a regular path (not qpath!) for each of the processing - // rows. In theory we could just convert the qpath strings, but it's safer + // We need to get a regular path (not xpath!) for each of the processing + // rows. In theory we could just convert the xpath strings, but it's safer // to use the asset data that we already have. const processingRowsPaths: string[] = []; @@ -488,11 +488,11 @@ class SingleProcessingStore extends Reflux.Store { flatPaths = getSurveyFlatPaths(asset.content.survey); if (processingRows) { - processingRows.forEach((qpath) => { + processingRows.forEach((xpath) => { if (asset?.content) { - // Here we need to "convert" qpath into name, as flatPaths work with - // names only. We search the row by qpath and use its name. - const rowName = getRowNameByQpath(asset.content, qpath); + // Here we need to "convert" xpath into name, as flatPaths work with + // names only. We search the row by xpath and use its name. + const rowName = getRowNameByXpath(asset.content, xpath); if (rowName && flatPaths[rowName]) { processingRowsPaths.push(flatPaths[rowName]); @@ -521,16 +521,16 @@ class SingleProcessingStore extends Reflux.Store { flatPaths = getSurveyFlatPaths(asset.content.survey); if (processingRows !== undefined) { - processingRows.forEach((qpath) => { - submissionsEditIds[qpath] = []; + processingRows.forEach((xpath) => { + submissionsEditIds[xpath] = []; }); response.results.forEach((result) => { - processingRows.forEach((qpath) => { + processingRows.forEach((xpath) => { if (asset?.content) { - // Here we need to "convert" qpath into name, as flatPaths work with - // names only. We search the row by qpath and use its name. - const rowName = getRowNameByQpath(asset.content, qpath); + // Here we need to "convert" xpath into name, as flatPaths work with + // names only. We search the row by xpath and use its name. + const rowName = getRowNameByXpath(asset.content, xpath); if (rowName) { // `meta/rootUuid` is persistent across edits while `_uuid` is not; @@ -539,7 +539,7 @@ class SingleProcessingStore extends Reflux.Store { if (uuid === undefined) { uuid = result['_uuid']; } - submissionsEditIds[qpath].push({ + submissionsEditIds[xpath].push({ editId: uuid, hasResponse: Object.keys(result).includes(flatPaths[rowName]), }); @@ -580,7 +580,7 @@ class SingleProcessingStore extends Reflux.Store { } private onFetchProcessingDataCompleted(response: ProcessingDataResponse) { - const transcriptResponse = response[this.currentQuestionQpath]?.transcript; + const transcriptResponse = response[this.currentQuestionXpath]?.transcript; // NOTE: we treat empty transcript object same as nonexistent one this.data.transcript = undefined; if (transcriptResponse?.value && transcriptResponse?.languageCode) { @@ -588,8 +588,9 @@ class SingleProcessingStore extends Reflux.Store { } const translationsResponse = - response[this.currentQuestionQpath]?.translation; + response[this.currentQuestionXpath]?.translation; const translationsArray: Transx[] = []; + if (translationsResponse) { Object.keys(translationsResponse).forEach( (languageCode: LanguageCode) => { @@ -640,7 +641,7 @@ class SingleProcessingStore extends Reflux.Store { } private onSetTranscriptCompleted(response: ProcessingDataResponse) { - const transcriptResponse = response[this.currentQuestionQpath]?.transcript; + const transcriptResponse = response[this.currentQuestionXpath]?.transcript; this.data.isFetchingData = false; @@ -664,7 +665,7 @@ class SingleProcessingStore extends Reflux.Store { // Note: previously initiated automatic transcriptions may no longer be // applicable to the current route const googleTsResponse = - event.response[this.currentQuestionQpath]?.googlets; + event.response[this.currentQuestionXpath]?.googlets; return ( event.submissionEditId === this.currentSubmissionEditId && googleTsResponse && @@ -677,14 +678,14 @@ class SingleProcessingStore extends Reflux.Store { private onRequestAutoTranscriptionCompleted(event: AutoTransxEvent) { if ( - !this.currentQuestionQpath || + !this.currentQuestionXpath || !this.data.isPollingForTranscript || !this.data.transcriptDraft ) { return; } - const googleTsResponse = event.response[this.currentQuestionQpath]?.googlets; + const googleTsResponse = event.response[this.currentQuestionXpath]?.googlets; if (googleTsResponse && this.isAutoTranscriptionEventApplicable(event)) { this.data.isPollingForTranscript = false; this.data.transcriptDraft.value = googleTsResponse.value; @@ -725,7 +726,7 @@ class SingleProcessingStore extends Reflux.Store { private isAutoTranslationEventApplicable(event: AutoTransxEvent) { const googleTxResponse = - event.response[this.currentQuestionQpath]?.googletx; + event.response[this.currentQuestionXpath]?.googletx; return ( event.submissionEditId === this.currentSubmissionEditId && googleTxResponse && @@ -738,14 +739,14 @@ class SingleProcessingStore extends Reflux.Store { private onRequestAutoTranslationCompleted(event: AutoTransxEvent) { if ( - !this.currentQuestionQpath || + !this.currentQuestionXpath || !this.data.isPollingForTranslation || !this.data.translationDraft ) { return; } - const googleTxResponse = event.response[this.currentQuestionQpath]?.googletx; + const googleTxResponse = event.response[this.currentQuestionXpath]?.googletx; if ( googleTxResponse && this.isAutoTranslationEventApplicable(event) @@ -767,7 +768,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isPollingForTranslation = true; console.log('trying to poll!'); // TEMP DELETEME this.requestAutoTranslation( - event.response[this.currentQuestionQpath]!.googlets!.languageCode + event.response[this.currentQuestionXpath]!.googlets!.languageCode ); } else { console.log('no more polling!'); // TEMP DELETEME @@ -816,7 +817,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.setTranscript( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, languageCode, value @@ -828,7 +829,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.deleteTranscript( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId ); this.trigger(this.data); @@ -838,7 +839,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isPollingForTranscript = true; processingActions.requestAutoTranscription( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, this.data.transcriptDraft?.languageCode, this.data.transcriptDraft?.regionCode @@ -902,7 +903,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.setTranslation( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, languageCode, value @@ -914,7 +915,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.deleteTranslation( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, languageCode ); @@ -925,7 +926,7 @@ class SingleProcessingStore extends Reflux.Store { this.data.isFetchingData = true; processingActions.requestAutoTranslation( this.currentAssetUid, - this.currentQuestionQpath, + this.currentQuestionXpath, this.currentSubmissionEditId, languageCode ); @@ -979,7 +980,7 @@ class SingleProcessingStore extends Reflux.Store { /** NOTE: Returns editIds for current question name, not for all of them. */ getCurrentQuestionSubmissionsEditIds() { if (this.data.submissionsEditIds !== undefined) { - return this.data.submissionsEditIds[this.currentQuestionQpath]; + return this.data.submissionsEditIds[this.currentQuestionXpath]; } return undefined; } diff --git a/jsapp/js/components/processing/transcript/transcriptTab.component.tsx b/jsapp/js/components/processing/transcript/transcriptTab.component.tsx index 0112dcd866..1b4dadb533 100644 --- a/jsapp/js/components/processing/transcript/transcriptTab.component.tsx +++ b/jsapp/js/components/processing/transcript/transcriptTab.component.tsx @@ -43,7 +43,7 @@ export default class TranscriptTab extends React.Component<{}> { } // Step 2: Config - for selecting the transcript language and mode. - // We display it when there is ongoing draft, but it doesn't have a language + // We display it when there is ongoing draft, but it doesn't have a language // or a value, and the region code is not selected. if ( draft !== undefined && @@ -53,9 +53,9 @@ export default class TranscriptTab extends React.Component<{}> { return ; } - // Step 2.1: Config Automatic - for selecting region and other automatic + // Step 2.1: Config Automatic - for selecting region and other automatic // options. - // We display it when there is ongoing draft, but it doesn't have a language + // We display it when there is ongoing draft, but it doesn't have a language // or a value, and the region code is selected. if ( draft !== undefined && @@ -71,7 +71,7 @@ export default class TranscriptTab extends React.Component<{}> { } // Step 4: Viewer - display existing (on backend) transcript. - // We display it when there is transcript in the store, and there is no + // We display it when there is transcript in the store, and there is no // ongoing draft (we only support single transcript ATM). if ( singleProcessingStore.getTranscript() !== undefined && diff --git a/jsapp/js/components/submissions/audioCell.tsx b/jsapp/js/components/submissions/audioCell.tsx index dea417e925..5cc1394129 100644 --- a/jsapp/js/components/submissions/audioCell.tsx +++ b/jsapp/js/components/submissions/audioCell.tsx @@ -11,7 +11,7 @@ bem.AudioCell = makeBem(null, 'audio-cell'); interface AudioCellProps { assetUid: string; - qpath: string; + xpath: string; /* submissionEditId is meta/rootUuid || _uuid */ submissionEditId: string; /** Required by the mini player. String passed is an error message */ @@ -44,7 +44,7 @@ export default function AudioCell(props: AudioCellProps) { label={t('Open')} isDisabled={typeof props.mediaAttachment === 'string'} onClick={() => { - goToProcessing(props.assetUid, props.qpath, props.submissionEditId); + goToProcessing(props.assetUid, props.xpath, props.submissionEditId); }} /> diff --git a/jsapp/js/components/submissions/mediaCell.tsx b/jsapp/js/components/submissions/mediaCell.tsx index 58e89eb9aa..0029b889f5 100644 --- a/jsapp/js/components/submissions/mediaCell.tsx +++ b/jsapp/js/components/submissions/mediaCell.tsx @@ -39,7 +39,7 @@ interface MediaCellProps { /** Total submissions for text questions. */ submissionTotal: number; assetUid: string; - qpath: string; + xpath: string; submissionUuid: string; } diff --git a/jsapp/js/components/submissions/submissionDataTable.tsx b/jsapp/js/components/submissions/submissionDataTable.tsx index 1cc22d4b78..46712a4210 100644 --- a/jsapp/js/components/submissions/submissionDataTable.tsx +++ b/jsapp/js/components/submissions/submissionDataTable.tsx @@ -58,7 +58,7 @@ class SubmissionDataTable extends React.Component { if (foundRow) { goToProcessing( this.props.asset.uid, - foundRow.$qpath, + foundRow.$xpath, this.props.submissionData._uuid ); } diff --git a/jsapp/js/components/submissions/submissionUtils.mocks.es6 b/jsapp/js/components/submissions/submissionUtils.mocks.es6 index a5d7f140a2..e73af844a6 100644 --- a/jsapp/js/components/submissions/submissionUtils.mocks.es6 +++ b/jsapp/js/components/submissions/submissionUtils.mocks.es6 @@ -1914,7 +1914,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_text', 'uuid': 'ab0e40e1-fbcc-43e9-9d00-b9b3314089cb', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'What is?', @@ -1923,7 +1923,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_integer', 'uuid': '97fd5387-ac2b-4108-b5b4-37fa91ae0e22', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'How much is the fish?', @@ -1932,7 +1932,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_tags', 'uuid': 'b05f29f7-8b58-4dd7-8695-c29cb04f3f7a', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'Another tag question here?', @@ -1941,7 +1941,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_select_multiple', 'uuid': '1a89e0da-3344-4b5d-b919-ab8b072e0918', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'Choose multiple', @@ -1970,7 +1970,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_auto_keyword_count', 'uuid': 'd4813284-d928-43b7-bde5-133eabe76024', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'How many swear words were used?', @@ -1979,7 +1979,7 @@ export const assetWithSupplementalDetails = { { 'type': 'qual_tags', 'uuid': '056c8f57-0733-4669-a84e-aa9726ffbf6b', - 'qpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', + 'xpath': 'Use_the_camera_s_mic_ne_to_record_a_sound', 'scope': 'by_question#survey', 'labels': { '_default': 'Do tags work?', diff --git a/jsapp/js/components/submissions/submissionUtils.ts b/jsapp/js/components/submissions/submissionUtils.ts index 4a15b99fd3..3cbb2ca5bd 100644 --- a/jsapp/js/components/submissions/submissionUtils.ts +++ b/jsapp/js/components/submissions/submissionUtils.ts @@ -372,8 +372,8 @@ export function getSubmissionDisplayData( ); parentGroup.addChild(rowObj); - const rowqpath = flatPaths[rowName].replace(/\//g, '-'); - supplementalDetailKeys[rowqpath]?.forEach((sdKey: string) => { + const rowxpath = flatPaths[rowName]; + supplementalDetailKeys[rowxpath]?.forEach((sdKey: string) => { parentGroup.addChild( new DisplayResponse( null, diff --git a/jsapp/js/components/submissions/table.tsx b/jsapp/js/components/submissions/table.tsx index 13b1da15ef..542f89c72c 100644 --- a/jsapp/js/components/submissions/table.tsx +++ b/jsapp/js/components/submissions/table.tsx @@ -969,7 +969,7 @@ export class DataTable extends React.Component { return ( @@ -986,7 +986,7 @@ export class DataTable extends React.Component { submissionIndex={row.index + 1} submissionTotal={this.state.submissions.length} assetUid={this.props.asset.uid} - qpath={q.$qpath} + xpath={q.$xpath} submissionUuid={row.original._uuid} /> ); diff --git a/jsapp/js/dataInterface.ts b/jsapp/js/dataInterface.ts index 9b1d8ed9b5..fd8681be6c 100644 --- a/jsapp/js/dataInterface.ts +++ b/jsapp/js/dataInterface.ts @@ -351,7 +351,6 @@ interface ExportSettingSettings { */ export interface SurveyRow { /** This is a unique identifier that includes both name and path (names of parents). */ - $qpath: string; $xpath: string; $autoname: string; $kuid: string; @@ -579,7 +578,7 @@ export interface AnalysisFormJsonField { type: string; language: string; source: string; - qpath: string; + xpath: string; settings: { mode: string; engine: string; diff --git a/jsapp/js/router/permProtectedRoute.tsx b/jsapp/js/router/permProtectedRoute.tsx index aa7b936585..ada4c47702 100644 --- a/jsapp/js/router/permProtectedRoute.tsx +++ b/jsapp/js/router/permProtectedRoute.tsx @@ -8,6 +8,7 @@ import assetStore from 'js/assetStore'; import type {PermissionCodename} from 'js/components/permissions/permConstants'; import type {WithRouterProps} from 'jsapp/js/router/legacy'; import type {AssetResponse, FailResponse} from 'js/dataInterface'; +import {decodeURLParamWithSlash} from "js/components/processing/routes.utils"; interface PermProtectedRouteProps extends WithRouterProps { /** One of PATHS */ @@ -137,6 +138,27 @@ class PermProtectedRoute extends React.Component< } } + filterProps(props: any) { + const {params, ...rest} = props; + if (!params?.xpath) { + return props; + } + + const {xpath, ...restParams} = params; + const decodedXPath = decodeURLParamWithSlash(xpath); + if (xpath !== decodedXPath) { + return { + ...rest, + params: { + xpath: decodedXPath, + ...restParams, + }, + }; + } else { + return props; + } + } + getUserHasRequiredPermission( asset: AssetResponse, requiredPermission: PermissionCodename @@ -168,10 +190,11 @@ class PermProtectedRoute extends React.Component< if (!this.state.isLoadAssetFinished) { return ; } else if (this.state.userHasRequiredPermissions) { + const filteredProps = this.filterProps(this.props); return ( }> diff --git a/jsapp/js/router/routerConstants.ts b/jsapp/js/router/routerConstants.ts index 0fa0a564b8..0ef491d8aa 100644 --- a/jsapp/js/router/routerConstants.ts +++ b/jsapp/js/router/routerConstants.ts @@ -34,8 +34,8 @@ export const ROUTES = Object.freeze({ FORM_GALLERY: '/forms/:uid/data/gallery', FORM_MAP: '/forms/:uid/data/map', FORM_MAP_BY: '/forms/:uid/data/map/:viewby', - /** Has: :uid, :qpath, :submissionEditId */ - FORM_PROCESSING_ROOT: '/forms/:uid/data/processing/:qpath/:submissionEditId', + /** Has: :uid, :xpath, :submissionEditId */ + FORM_PROCESSING_ROOT: '/forms/:uid/data/processing/:xpath/:submissionEditId', FORM_SETTINGS: '/forms/:uid/settings', FORM_MEDIA: '/forms/:uid/settings/media', FORM_SHARING: '/forms/:uid/settings/sharing', From 97e8e57de105dd3c264b5392645bd65269ca30e0 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Fri, 30 Aug 2024 11:13:57 -0400 Subject: [PATCH 054/119] Support advanced features JSON validation with qpath --- .../advanced_features_params_schema.py | 1 + kpi/models/asset.py | 30 +++++++++++++++---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/kobo/apps/subsequences/advanced_features_params_schema.py b/kobo/apps/subsequences/advanced_features_params_schema.py index 846ce6f2ca..3efe7eb9a1 100644 --- a/kobo/apps/subsequences/advanced_features_params_schema.py +++ b/kobo/apps/subsequences/advanced_features_params_schema.py @@ -89,6 +89,7 @@ 'required': ['labels', 'uuid'], }, } + ADVANCED_FEATURES_PARAMS_SCHEMA['properties']['qual'] = { 'type': 'object', 'additionalProperties': False, diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 8f542e884e..2d212f1c48 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -19,7 +19,8 @@ from formpack.utils.flatten_content import flatten_content from formpack.utils.json_hash import json_hash from formpack.utils.kobo_locking import strip_kobo_locking_profile -from jsonschema import validate as jsonschema_validate +from jsonschema import exceptions, validate as jsonschema_validate + from kobo.apps.reports.constants import ( SPECIFIC_REPORTS_KEY, @@ -1157,10 +1158,29 @@ def validate_advanced_features(self): if self.advanced_features is None: self.advanced_features = {} - jsonschema_validate( - instance=self.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) + try: + jsonschema_validate( + instance=self.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) + except exceptions.ValidationError as e: + if "'qpath' was unexpected" not in str(e): + raise + + # TODO delete the try/except when every asset is repopulated with + # `xpath` instead of `qpath`. + qual_survey_orig = self.advanced_features['qual']['qual_survey'] + qual_survey_iter = copy.deepcopy(qual_survey_orig) + for idx, qual_q in enumerate(qual_survey_iter): + qpath = qual_survey_orig[idx]['qpath'] + xpath = qpath_to_xpath(qpath, self) + del qual_survey_orig[idx]['qpath'] + qual_survey_orig[idx]['xpath'] = xpath + + jsonschema_validate( + instance=self.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) @property def version__content_hash(self): From 5583793fe591ad1bf4d00b98ebc57e1554990556 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Sat, 31 Aug 2024 15:10:34 -0400 Subject: [PATCH 055/119] Refactoring --- kobo/apps/subsequences/api_view.py | 22 ++--- kobo/apps/subsequences/utils/deprecation.py | 95 +++++++++++++++++++++ kpi/models/asset.py | 58 ++++--------- kpi/models/import_export_task.py | 2 +- kpi/serializers/v2/asset.py | 5 +- 5 files changed, 123 insertions(+), 59 deletions(-) diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index 77385b5681..7cf40fc96c 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -8,7 +8,9 @@ from rest_framework.views import APIView from kobo.apps.subsequences.models import SubmissionExtras -from kobo.apps.subsequences.utils.deprecation import qpath_to_xpath +from kobo.apps.subsequences.utils.deprecation import ( + sanitize_submission_extra_content, +) from kpi.models import Asset from kpi.permissions import SubmissionPermission from kpi.views.environment import check_asr_mt_access_for_user @@ -100,23 +102,9 @@ def get_submission_processing(asset, s_uuid): try: submission_extra = asset.submission_extras.get(submission_uuid=s_uuid) - # TODO delete the loop when every asset is repopulated with `xpath` + # TODO delete line below when every asset is repopulated with `xpath` # instead of `qpath`. - content = deepcopy(submission_extra.content) - changed = False - for old_xpath, values in submission_extra.content.items(): - if '-' in old_xpath and '/' not in old_xpath: - xpath = qpath_to_xpath(old_xpath, asset) - if xpath == old_xpath: - continue - - del content[old_xpath] - content[xpath] = values - changed = True - - if changed: - submission_extra.content = content - # TODO save submission_extra? + sanitize_submission_extra_content(submission_extra, asset) return Response(submission_extra.content) except SubmissionExtras.DoesNotExist: diff --git a/kobo/apps/subsequences/utils/deprecation.py b/kobo/apps/subsequences/utils/deprecation.py index cd94a2d5cd..1c35b73f97 100644 --- a/kobo/apps/subsequences/utils/deprecation.py +++ b/kobo/apps/subsequences/utils/deprecation.py @@ -1,3 +1,39 @@ +from __future__ import annotations + +from copy import deepcopy +from typing import Optional + +import jsonschema + +from kpi.fields import WritableJSONField +from ..advanced_features_params_schema import ( + ADVANCED_FEATURES_PARAMS_SCHEMA, +) + + +def jsonschema_validate(asset: 'Asset'): + try: + jsonschema.validate( + instance=asset.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) + except jsonschema.exceptions.ValidationError as e: + if "'qpath' was unexpected" not in str(e): + raise + + qual_survey_orig = asset.advanced_features['qual']['qual_survey'] + qual_survey_iter = deepcopy(qual_survey_orig) + for idx, qual_q in enumerate(qual_survey_iter): + qpath = qual_survey_orig[idx]['qpath'] + xpath = qpath_to_xpath(qpath, asset) + del qual_survey_orig[idx]['qpath'] + qual_survey_orig[idx]['xpath'] = xpath + + jsonschema.validate( + instance=asset.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) + def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: """ @@ -17,3 +53,62 @@ def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: return xpath raise KeyError(f'xpath for {qpath} not found') + + +def sanitize_known_columns(asset: 'Asset'): + for idx, known_column in enumerate(asset.known_cols): + xpath, *rest = known_column.split(':') + # Old `qpath` should not contain "/", but could contain "-". + # If the question does not belong to a group but does contain "-", + # it will enter this condition - which is not a problem except extra + # CPU usage for nothing. + if '-' in xpath and '/' not in xpath: + xpath = qpath_to_xpath(xpath, asset) + rest.insert(0, xpath) + asset.known_cols[idx] = ':'.join(rest) + + # TODO Should we save asset.known_cols if it has changed? + + +def sanitize_submission_extra_content( + submission_extra: 'SubmissionExtras', asset: 'Asset' +) -> Optional[dict]: + """ + Replace with `qpath` attribute (if it exists) with `xpath` counterpart + """ + content = deepcopy(submission_extra.content) + changed = False + for old_xpath, values in submission_extra.content.items(): + if '-' in old_xpath and '/' not in old_xpath: + xpath = qpath_to_xpath(old_xpath, asset) + if xpath == old_xpath: + continue + + del content[old_xpath] + content[xpath] = values + changed = True + + if changed: + submission_extra.content = content + # TODO Should we save submission_extra? + + +class WritableAdvancedFeaturesField(WritableJSONField): + """ + This class brings support to old projects which are still using + `qpath` as the identifier for questions for advanced features. + + It should be deleted and replaced with WritableJSONField when all + assets are repopulated. + """ + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._model_instance = None + + def to_representation(self, value): + self._model_instance.validate_advanced_features() + return value + + def get_attribute(self, instance): + self._model_instance = instance + return super().get_attribute(instance) diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 2d212f1c48..f9cdc2a18f 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -19,21 +19,22 @@ from formpack.utils.flatten_content import flatten_content from formpack.utils.json_hash import json_hash from formpack.utils.kobo_locking import strip_kobo_locking_profile -from jsonschema import exceptions, validate as jsonschema_validate from kobo.apps.reports.constants import ( SPECIFIC_REPORTS_KEY, DEFAULT_REPORTS_KEY, ) -from kobo.apps.subsequences.advanced_features_params_schema import ( - ADVANCED_FEATURES_PARAMS_SCHEMA, -) from kobo.apps.subsequences.utils import ( advanced_feature_instances, advanced_submission_jsonschema, ) -from kobo.apps.subsequences.utils.deprecation import qpath_to_xpath +from kobo.apps.subsequences.utils.deprecation import ( + jsonschema_validate, + qpath_to_xpath, + sanitize_known_columns, + sanitize_submission_extra_content, +) from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols from kpi.constants import ( ASSET_TYPES, @@ -1095,6 +1096,7 @@ def update_submission_extra(self, content, user=None): .first() ) instances = self.get_advanced_feature_instances() + sanitize_submission_extra_content(sub, self) compiled_content = {**sub.content} for instance in instances: compiled_content = instance.compile_revised_record( @@ -1158,29 +1160,15 @@ def validate_advanced_features(self): if self.advanced_features is None: self.advanced_features = {} - try: - jsonschema_validate( - instance=self.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) - except exceptions.ValidationError as e: - if "'qpath' was unexpected" not in str(e): - raise - - # TODO delete the try/except when every asset is repopulated with - # `xpath` instead of `qpath`. - qual_survey_orig = self.advanced_features['qual']['qual_survey'] - qual_survey_iter = copy.deepcopy(qual_survey_orig) - for idx, qual_q in enumerate(qual_survey_iter): - qpath = qual_survey_orig[idx]['qpath'] - xpath = qpath_to_xpath(qpath, self) - del qual_survey_orig[idx]['qpath'] - qual_survey_orig[idx]['xpath'] = xpath - - jsonschema_validate( - instance=self.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) + # TODO uncomment the 4 lines below… + # jsonschema_validate( + # instance=self.advanced_features, + # schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + # ) + + # TODO … and delete this one when every asset is repopulated with + # `xpath` instead of `qpath`. + jsonschema_validate(self) @property def version__content_hash(self): @@ -1212,18 +1200,8 @@ def version_number_and_date(self) -> str: def _get_additional_fields(self): - # TODO delete the loop when every asset is repopulated with `xpath` - # instead of `qpath`. - for idx, known_column in enumerate(self.known_cols): - xpath, *rest = known_column.split(':') - # Old `qpath` should not contain "/", but could contain "-". - # If the question does not belong to a group but does contain "-", - # it will enter this condition - which is not a problem except extra - # CPU usage for nothing. - if '-' in xpath and '/' not in xpath: - xpath = qpath_to_xpath(xpath, self) - rest.insert(0, xpath) - self.known_cols[idx] = ':'.join(rest) + # TODO line below when when every asset is repopulated with `xpath` + sanitize_known_columns(self) return parse_known_cols(self.known_cols) diff --git a/kpi/models/import_export_task.py b/kpi/models/import_export_task.py index 1c7ad6c3e4..436eae476e 100644 --- a/kpi/models/import_export_task.py +++ b/kpi/models/import_export_task.py @@ -16,6 +16,7 @@ from backports.zoneinfo import ZoneInfo import constance +import formpack import requests from django.conf import settings from django.contrib.postgres.indexes import BTreeIndex, HashIndex @@ -24,7 +25,6 @@ from django.db.models import F from django.urls import reverse from django.utils.translation import gettext as t -import formpack from formpack.constants import ( KOBO_LOCK_SHEET, ) diff --git a/kpi/serializers/v2/asset.py b/kpi/serializers/v2/asset.py index b8fecd4973..afc3d699a7 100644 --- a/kpi/serializers/v2/asset.py +++ b/kpi/serializers/v2/asset.py @@ -18,6 +18,9 @@ from kobo.apps.reports.constants import FUZZY_VERSION_PATTERN from kobo.apps.reports.report_data import build_formpack +from kobo.apps.subsequences.utils.deprecation import ( + WritableAdvancedFeaturesField, +) from kobo.apps.trash_bin.exceptions import ( TrashIntegrityError, TrashTaskInProgressError, @@ -297,7 +300,7 @@ class AssetSerializer(serializers.HyperlinkedModelSerializer): report_custom = WritableJSONField(required=False) map_styles = WritableJSONField(required=False) map_custom = WritableJSONField(required=False) - advanced_features = WritableJSONField(required=False) + advanced_features = WritableAdvancedFeaturesField(required=False) advanced_submission_schema = serializers.SerializerMethodField() files = serializers.SerializerMethodField() analysis_form_json = serializers.SerializerMethodField() From 9c1061a63c4c825db3ac16c5ab83261baf27f1d7 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 3 Sep 2024 12:53:13 -0400 Subject: [PATCH 056/119] Update formpack dependencies --- dependencies/pip/dev_requirements.txt | 2 +- dependencies/pip/requirements.in | 2 +- dependencies/pip/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index a0d327ba18..a5b8744c83 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -8,7 +8,7 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/trevoriancox/django-dont-vary-on.git@01a804122b7ddcdc22f50b40993f91c27b03bef6#egg=django-dont-vary-on # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index da85d168d3..3a5e1a81d8 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -2,7 +2,7 @@ # https://github.com/bndr/pipreqs is a handy utility, too. # formpack --e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack # More up-to-date version of django-digest than PyPI seems to have. # Also, python-digest is an unlisted dependency thereof. diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index 3299d51d17..e7c527dca5 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -8,7 +8,7 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/trevoriancox/django-dont-vary-on.git@01a804122b7ddcdc22f50b40993f91c27b03bef6#egg=django-dont-vary-on # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/formpack.git@451df4cd2a0d614be69a3b3309259c67369f7efb#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in From 2cf67ef8456f9c2eb4fb0700f64cb842636aa705 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 3 Sep 2024 14:45:11 -0400 Subject: [PATCH 057/119] Update FormPack --- dependencies/pip/dev_requirements.txt | 2 +- dependencies/pip/requirements.in | 2 +- dependencies/pip/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index a5b8744c83..ea5267af66 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -8,7 +8,7 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/trevoriancox/django-dont-vary-on.git@01a804122b7ddcdc22f50b40993f91c27b03bef6#egg=django-dont-vary-on # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@5a8cda8cc37a56a2313c98b88fcacc18049ef477#egg=formpack # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index 3a5e1a81d8..3ab614bb6d 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -2,7 +2,7 @@ # https://github.com/bndr/pipreqs is a handy utility, too. # formpack --e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@5a8cda8cc37a56a2313c98b88fcacc18049ef477#egg=formpack # More up-to-date version of django-digest than PyPI seems to have. # Also, python-digest is an unlisted dependency thereof. diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index e7c527dca5..5d3f83fe9f 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -8,7 +8,7 @@ # via -r dependencies/pip/requirements.in -e git+https://github.com/trevoriancox/django-dont-vary-on.git@01a804122b7ddcdc22f50b40993f91c27b03bef6#egg=django-dont-vary-on # via -r dependencies/pip/requirements.in --e git+https://github.com/kobotoolbox/formpack.git@eea375baccce809b9bfe393ec1abe1245e63ab04#egg=formpack +-e git+https://github.com/kobotoolbox/formpack.git@5a8cda8cc37a56a2313c98b88fcacc18049ef477#egg=formpack # via -r dependencies/pip/requirements.in -e git+https://github.com/dimagi/python-digest@5c94bb74516b977b60180ee832765c0695ff2b56#egg=python_digest # via -r dependencies/pip/requirements.in From 6723dd6b140833597f3cf28181935ae298505e82 Mon Sep 17 00:00:00 2001 From: Leszek Date: Wed, 4 Sep 2024 14:13:12 +0200 Subject: [PATCH 058/119] Fix isProcessingRouteActive having problems with "|" character --- jsapp/js/components/processing/routes.utils.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/jsapp/js/components/processing/routes.utils.ts b/jsapp/js/components/processing/routes.utils.ts index 45ba23cd80..4e669aa708 100644 --- a/jsapp/js/components/processing/routes.utils.ts +++ b/jsapp/js/components/processing/routes.utils.ts @@ -137,7 +137,14 @@ export function isAnyProcessingRouteActive(): boolean { * is active) */ export function isProcessingRouteActive(targetRoute: string) { - return getCurrentPath().startsWith(applyCurrentRouteParams(targetRoute)); + // We need to apply actual values for the route definition `:param`s here. + // After that we use `decodeURI` to ensure that `|` in the test route is the + // same as `|` in the `getCurrentPath`. Without this we would be comparing + // string that could be "exactly" the same, just one containing `|` and + // the other `%7C` (ASCII for `|`) - resulting in incorrect `false`. + const routeToTest = decodeURI(applyCurrentRouteParams(targetRoute)); + + return getCurrentPath().startsWith(routeToTest); } /** From 2dabaec8ad73a4438c02af18ebd6a5e6153b6478 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 4 Sep 2024 15:56:26 -0400 Subject: [PATCH 059/119] Fix wrong attribute --- kpi/deployment_backends/openrosa_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 53579d5a4b..7816df6709 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -1385,7 +1385,7 @@ def _delete_openrosa_metadata( """ # Delete MetaData object and its related file (on storage) try: - metadata = MetaData.objects.get(pk=metadata_file_['id']) + metadata = MetaData.objects.get(pk=metadata_file_['pk']) except MetaData.DoesNotExist: pass else: From 48b4a8164f4f9d67c16c8bb184a4ace1203aefe0 Mon Sep 17 00:00:00 2001 From: Leszek Date: Wed, 4 Sep 2024 23:08:03 +0200 Subject: [PATCH 060/119] further improve isProcessingRouteActive in some edge cases --- .../permissions/userAssetPermsEditor.component.tsx | 6 +++--- jsapp/js/components/processing/routes.utils.ts | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/jsapp/js/components/permissions/userAssetPermsEditor.component.tsx b/jsapp/js/components/permissions/userAssetPermsEditor.component.tsx index 4e3cb20062..67e03765fe 100644 --- a/jsapp/js/components/permissions/userAssetPermsEditor.component.tsx +++ b/jsapp/js/components/permissions/userAssetPermsEditor.component.tsx @@ -478,10 +478,10 @@ export default class UserAssetPermsEditor extends React.Component< false, true ); - for (const [, qPath] of Object.entries(flatPaths)) { + for (const [, path] of Object.entries(flatPaths)) { output.push({ - value: qPath, - label: qPath, + value: path, + label: path, }); } } diff --git a/jsapp/js/components/processing/routes.utils.ts b/jsapp/js/components/processing/routes.utils.ts index 4e669aa708..ec460342b9 100644 --- a/jsapp/js/components/processing/routes.utils.ts +++ b/jsapp/js/components/processing/routes.utils.ts @@ -143,8 +143,10 @@ export function isProcessingRouteActive(targetRoute: string) { // string that could be "exactly" the same, just one containing `|` and // the other `%7C` (ASCII for `|`) - resulting in incorrect `false`. const routeToTest = decodeURI(applyCurrentRouteParams(targetRoute)); - - return getCurrentPath().startsWith(routeToTest); + // Sometimes current path containts `|` and sometimes with `%7C` so we need to + // be extra safe here. + const currentPath = decodeURI(getCurrentPath()); + return currentPath.startsWith(routeToTest); } /** From ae91f5ccf3e5127546645241103a3c7d24425444 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 5 Sep 2024 13:25:38 -0400 Subject: [PATCH 061/119] Fix DDA bug without service account --- kobo/apps/openrosa/apps/api/tools.py | 40 +++++++++++-------- .../apps/api/viewsets/xform_list_api.py | 40 ++++++++++++++++++- .../openrosa/apps/main/models/meta_data.py | 34 ---------------- kpi/views/v2/paired_data.py | 19 ++++----- 4 files changed, 72 insertions(+), 61 deletions(-) diff --git a/kobo/apps/openrosa/apps/api/tools.py b/kobo/apps/openrosa/apps/api/tools.py index 34fc551cf7..bab408a532 100644 --- a/kobo/apps/openrosa/apps/api/tools.py +++ b/kobo/apps/openrosa/apps/api/tools.py @@ -1,11 +1,8 @@ -# coding: utf-8 import inspect import os import re -import time from datetime import datetime -import requests import rest_framework.views as rest_framework_views from django import forms from django.conf import settings @@ -14,6 +11,7 @@ HttpResponseNotFound, HttpResponseRedirect, ) +from django.urls import resolve, Resolver404 from django.utils.translation import gettext as t from rest_framework import exceptions from rest_framework.request import Request @@ -35,6 +33,11 @@ from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) +from kpi.views.v2.paired_data import ( + PairedDataViewset, + SubmissionXMLRenderer, + XMLExternalDataPermission, +) DECIMAL_PRECISION = 2 @@ -136,7 +139,8 @@ class TagForm(forms.Form): def get_media_file_response( - metadata: MetaData, request: Request = None + metadata: MetaData, + request: Request = None, ) -> HttpResponse: if metadata.data_file: file_path = metadata.data_file.name @@ -156,18 +160,22 @@ def get_media_file_response( return HttpResponseRedirect(metadata.data_value) # When `request.user` is authenticated, their authentication is lost with - # an HTTP redirection. We use KoBoCAT to proxy the response from KPI - # Send the request internally to avoid extra traffic on the public interface - internal_url = metadata.data_value.replace( - settings.KOBOFORM_URL, settings.KOBOFORM_INTERNAL_URL - ) - response = requests.get(internal_url) - - return HttpResponse( - content=response.content, - status=response.status_code, - content_type=response.headers['content-type'], - ) + # an HTTP redirection. We need to call KPI viewset directly + internal_url = metadata.data_value.replace(settings.KOBOFORM_URL, '') + try: + resolver_match = resolve(internal_url) + except Resolver404: + return HttpResponseNotFound() + + args = resolver_match.args + kwargs = resolver_match.kwargs + + paired_data_viewset = PairedDataViewset.as_view({'get': 'external'}) + django_http_request = request._request + paired_data_viewset.cls.permission_classes = [XMLExternalDataPermission] + paired_data_viewset.cls.renderer_classes = [SubmissionXMLRenderer] + paired_data_viewset.cls.filter_backends = [] + return paired_data_viewset(request=django_http_request, *args, **kwargs) def get_view_name(view_obj): diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py b/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py index 37481d4f30..789cfa4a83 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py @@ -1,4 +1,3 @@ -# coding: utf-8 from datetime import datetime try: from zoneinfo import ZoneInfo @@ -8,7 +7,9 @@ from django.conf import settings from django.http import Http404 from django.shortcuts import get_object_or_404 +from django.utils import timezone from rest_framework import permissions, status +from rest_framework.request import Request from rest_framework.response import Response from rest_framework.decorators import action @@ -141,7 +142,7 @@ def manifest(self, request, *args, **kwargs): # Expired files may have an out-of-date hash which needs to be refreshed # before being exposed to the serializer for obj in object_list: - if not obj.has_expired: + if not self._is_metadata_expired(obj, request): media_files[obj.pk] = obj continue expired_objects = True @@ -188,3 +189,38 @@ def media(self, request, *args, **kwargs): return self.get_response_for_head_request() return get_media_file_response(meta_obj, request) + + @staticmethod + def _is_metadata_expired(obj: MetaData, request: Request) -> bool: + """ + It validates whether the file has been modified for the last X minutes. + (where `X` equals `settings.PAIRED_DATA_EXPIRATION`) + + Notes: Only `xml-external` (paired data XML) files expire. + """ + if not obj.is_paired_data: + return False + + timedelta = timezone.now() - obj.date_modified + if timedelta.total_seconds() > settings.PAIRED_DATA_EXPIRATION: + # Force external xml regeneration + get_media_file_response(obj, request) + + # We update the modification time here to avoid requesting that KPI + # resynchronize this file multiple times per the + # `PAIRED_DATA_EXPIRATION` period. However, this introduces a race + # condition where it's possible that KPI *deletes* this file before + # we attempt to update it. We avoid that by locking the row + + # TODO: this previously used `select_for_update()`, which locked + # the object for the duration of the *entire* request due to + # Django's `ATOMIC_REQUESTS`. `ATOMIC_REQUESTS` is not True by + # default anymore and the `update()` method is itself + # atomic since it does not reference any value previously read + # from the database. Is that enough? + MetaData.objects.filter(pk=obj.pk).update( + date_modified=timezone.now() + ) + return True + + return False diff --git a/kobo/apps/openrosa/apps/main/models/meta_data.py b/kobo/apps/openrosa/apps/main/models/meta_data.py index be49726e3c..0e808d314a 100644 --- a/kobo/apps/openrosa/apps/main/models/meta_data.py +++ b/kobo/apps/openrosa/apps/main/models/meta_data.py @@ -167,40 +167,6 @@ def save(self, *args, **kwargs): def md5_hash(self) -> str: return self._set_hash() - @property - def has_expired(self) -> bool: - """ - It validates whether the file has been modified for the last X minutes. - (where `X` equals `settings.PAIRED_DATA_EXPIRATION`) - - Notes: Only `xml-external` (paired data XML) files expire. - """ - if not self.is_paired_data: - return False - - timedelta = timezone.now() - self.date_modified - if timedelta.total_seconds() > settings.PAIRED_DATA_EXPIRATION: - # No need to download the whole file. Sending a `HEAD` request to - # KPI will cause KPI to delete and recreate the file in KoBoCAT if - # needed - requests.head(self.data_value) - # We update the modification time here to avoid requesting that KPI - # resynchronize this file multiple times per the - # `PAIRED_DATA_EXPIRATION` period. However, this introduces a race - # condition where it's possible that KPI *deletes* this file before - # we attempt to update it. We avoid that by locking the row - ### TODO: this previously used `select_for_update()`, which locked - ### the object for the duration of the *entire* request due to - ### Django's `ATOMIC_REQUESTS`. The `update()` method is itself - ### atomic since it does not reference any value previously read - ### from the database. Is that enough? - MetaData.objects.filter(pk=self.pk).update( - date_modified=timezone.now() - ) - return True - - return False - @property def filename(self) -> str: diff --git a/kpi/views/v2/paired_data.py b/kpi/views/v2/paired_data.py index 5b67b653cd..8331a4ebd1 100644 --- a/kpi/views/v2/paired_data.py +++ b/kpi/views/v2/paired_data.py @@ -22,9 +22,9 @@ from kpi.utils.xml import strip_nodes, add_xml_declaration -class PairedDataViewset(AssetNestedObjectViewsetMixin, - NestedViewSetMixin, - viewsets.ModelViewSet): +class PairedDataViewset( + AssetNestedObjectViewsetMixin, NestedViewSetMixin, viewsets.ModelViewSet +): """ ## List of paired project endpoints @@ -179,12 +179,13 @@ class PairedDataViewset(AssetNestedObjectViewsetMixin, permission_classes = (AssetEditorPermission,) serializer_class = PairedDataSerializer - @action(detail=True, - methods=['GET'], - permission_classes=[XMLExternalDataPermission], - renderer_classes=[SubmissionXMLRenderer], - filter_backends=[], - ) + @action( + detail=True, + methods=['GET'], + permission_classes=[XMLExternalDataPermission], + renderer_classes=[SubmissionXMLRenderer], + filter_backends=[], + ) def external(self, request, paired_data_uid, **kwargs): """ Returns an XML which contains data submitted to paired asset From d7bd068b1b2d050d9cbec147d805fd8ca50f9bcd Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 5 Sep 2024 14:51:06 -0400 Subject: [PATCH 062/119] Fix tests --- kpi/deployment_backends/openrosa_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index b82f452724..59faeb2662 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -339,7 +339,7 @@ def duplicate_submission( # Cast to list to help unit tests to pass. return self._rewrite_json_attachment_urls( - next(self.get_submissions(user, submission_id=instance.pk)), request + self.get_submission(user=user, submission_id=instance.pk), request ) def edit_submission( From 9e1034138c23b64a1f6bbe1bdeaaceea949f2998 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 12 Sep 2024 10:26:00 -0400 Subject: [PATCH 063/119] Add reverse operation when deleting old tables --- .../main/migrations/0015_drop_old_restservice_tables.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/kobo/apps/openrosa/apps/main/migrations/0015_drop_old_restservice_tables.py b/kobo/apps/openrosa/apps/main/migrations/0015_drop_old_restservice_tables.py index 1b58d492d7..1ab76440fa 100644 --- a/kobo/apps/openrosa/apps/main/migrations/0015_drop_old_restservice_tables.py +++ b/kobo/apps/openrosa/apps/main/migrations/0015_drop_old_restservice_tables.py @@ -56,6 +56,10 @@ def get_operations(): return operations +def noop(*args, **kwargs): + pass + + def print_migration_warning(apps, schema_editor): if settings.TESTING or settings.SKIP_HEAVY_MIGRATIONS: return @@ -74,4 +78,7 @@ class Migration(migrations.Migration): ('main', '0014_drop_old_formdisclaimer_tables'), ] - operations = [migrations.RunPython(print_migration_warning), *get_operations()] + operations = [ + migrations.RunPython(print_migration_warning, noop), + *get_operations(), + ] From fde9526aecdce8a78c0fd879da787fec070012bc Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 12 Sep 2024 10:26:41 -0400 Subject: [PATCH 064/119] Do not use "safe_create_instance" anymore in DeploymentBackend classes --- kpi/deployment_backends/base_backend.py | 50 +++++++++++++-------- kpi/deployment_backends/mock_backend.py | 7 ++- kpi/deployment_backends/openrosa_backend.py | 19 ++++---- 3 files changed, 42 insertions(+), 34 deletions(-) diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index c848803e7b..aed6e231e9 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -22,6 +22,9 @@ from rest_framework.pagination import _positive_int as positive_int from shortuuid import ShortUUID +from kobo.apps.openrosa.libs.utils.logger_tools import ( + http_open_rosa_error_handler, +) from kpi.constants import ( SUBMISSION_FORMAT_TYPE_XML, SUBMISSION_FORMAT_TYPE_JSON, @@ -149,7 +152,7 @@ def bulk_update_submissions( ) } - backend_responses = [] + backend_results = [] for submission in submissions: xml_parsed = fromstring_preserve_root_xmlns(submission) @@ -177,19 +180,24 @@ def bulk_update_submissions( for path, value in update_data.items(): edit_submission_xml(xml_parsed, path, value) - backend_response = self.store_submission( - user, - xml_tostring(xml_parsed), - _uuid, - request=kwargs.get('request'), - ) - backend_responses.append( - { - 'uuid': _uuid, - 'response': backend_response, - } - ) - return self.prepare_bulk_update_response(backend_responses) + request = kwargs.get('request') + with http_open_rosa_error_handler( + lambda: self.store_submission( + user, + xml_tostring(xml_parsed), + _uuid, + request=request, + ), + request, + ) as handler: + backend_results.append( + { + 'uuid': _uuid, + 'error': handler.error, + 'result': handler.func_return + } + ) + return self.prepare_bulk_update_response(backend_results) @abc.abstractmethod def calculated_submission_count(self, user: settings.AUTH_USER_MODEL, **kwargs): @@ -384,6 +392,10 @@ def last_submission_time(self): def mongo_userform_id(self): return None + @abc.abstractmethod + def prepare_bulk_update_response(self, backend_results: list[dict]) -> dict: + pass + @abc.abstractmethod def redeploy(self, active: bool = None): pass @@ -673,11 +685,11 @@ def validate_access_with_partial_perms( allowed_submission_ids = [] if not submission_ids: - # if no submission ids are provided, the back end must rebuild the + # If no submission ids are provided, the back end must rebuild the # query to retrieve the related submissions. Unfortunately, the # current back end (KoBoCAT) does not support row level permissions. # Thus, we need to fetch all the submissions the user is allowed to - # see in order to to compare the requested subset of submissions to + # see in order to compare the requested subset of submissions to # all all_submissions = self.get_submissions( user=user, @@ -692,8 +704,8 @@ def validate_access_with_partial_perms( if not allowed_submission_ids: raise PermissionDenied - # if `query` is not provided, the action is performed on all - # submissions. There are no needs to go further. + # If `query` is not provided, the action is performed on all + # submissions. There is no need to go further. if not query: return allowed_submission_ids @@ -719,7 +731,7 @@ def validate_access_with_partial_perms( and set(requested_submission_ids).issubset(allowed_submission_ids)) or sorted(requested_submission_ids) == sorted(submission_ids) ): - # Regardless of whether or not the request contained a query or a + # Regardless of whether the request contained a query or a # list of IDs, always return IDs here because the results of a # query may contain submissions that the requesting user is not # allowed to access. For example, diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 4dfa0b8e28..9124488282 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -12,7 +12,7 @@ from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.libs.utils.logger_tools import ( dict2xform, - safe_create_instance, + create_instance, ) from kpi.constants import PERM_ADD_SUBMISSIONS, SUBMISSION_FORMAT_TYPE_JSON from kpi.tests.utils.dicts import nested_dict_from_keys @@ -108,7 +108,8 @@ class FakeRequest: xml_string = dict2xform(sub_copy, self.xform.id_string) xml_file = io.StringIO(xml_string) - error, instance = safe_create_instance( + + instance = create_instance( owner_username, xml_file, media_files, @@ -117,8 +118,6 @@ class FakeRequest: ), request=request, ) - if error: - raise Exception(error) # Inject (or update) real PKs in submission… submission['_id'] = instance.pk diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index c580714eb2..72780079d4 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -41,7 +41,6 @@ from kobo.apps.openrosa.libs.utils.logger_tools import ( create_instance, publish_xls_form, - safe_create_instance, ) from kobo.apps.subsequences.utils import stream_with_extras from kobo.apps.trackers.models import NLPUsageCounter @@ -86,7 +85,7 @@ class OpenRosaDeploymentBackend(BaseDeploymentBackend): """ - Used to deploy a project into KoboCAT. + Deploy a project to OpenRosa server """ SYNCED_DATA_FILE_TYPES = { @@ -886,7 +885,7 @@ def rename_enketo_id_key(self, previous_owner_username: str): pass @staticmethod - def prepare_bulk_update_response(backend_responses: list) -> dict: + def prepare_bulk_update_response(backend_results: list[dict]) -> dict: """ Formatting the response to allow for partial successes to be seen more explicitly. @@ -894,13 +893,11 @@ def prepare_bulk_update_response(backend_responses: list) -> dict: results = [] cpt_successes = 0 - for backend_response in backend_responses: - uuid = backend_response['uuid'] - error, instance = backend_response['response'] - - message = t('Something went wrong') - status_code = status.HTTP_400_BAD_REQUEST - if not error: + for backend_result in backend_results: + uuid = backend_result['uuid'] + if message := backend_result['error']: + status_code = status.HTTP_400_BAD_REQUEST + else: cpt_successes += 1 message = t('Successful submission') status_code = status.HTTP_201_CREATED @@ -1110,7 +1107,7 @@ def store_submission( media_file for media_file in attachments.values() ) - return safe_create_instance( + return create_instance( username=self.asset.owner.username, xml_file=ContentFile(xml_submission), media_files=media_files, From 5b4173f72885b003bb8666481d453004983830ca Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 12 Sep 2024 10:27:16 -0400 Subject: [PATCH 065/119] Remove import --- kobo/apps/hook/tests/hook_test_case.py | 1 - 1 file changed, 1 deletion(-) diff --git a/kobo/apps/hook/tests/hook_test_case.py b/kobo/apps/hook/tests/hook_test_case.py index e040fa30dc..7a8a90e5d2 100644 --- a/kobo/apps/hook/tests/hook_test_case.py +++ b/kobo/apps/hook/tests/hook_test_case.py @@ -6,7 +6,6 @@ import responses from django.conf import settings from django.urls import reverse -from ipaddress import ip_address from rest_framework import status from kpi.constants import SUBMISSION_FORMAT_TYPE_JSON, SUBMISSION_FORMAT_TYPE_XML From 2c945a0b1491304812501d38777ffcb585496920 Mon Sep 17 00:00:00 2001 From: Leszek Date: Fri, 13 Sep 2024 11:07:47 +0200 Subject: [PATCH 066/119] Create `UniversalTable` and `PaginatedQueryUniversalTable` components Also update few `angle-*` icons --- ...paginatedQueryUniversalTable.component.tsx | 78 +++++ .../universalTable.component.tsx | 325 ++++++++++++++++++ .../universalTable/universalTable.module.scss | 232 +++++++++++++ jsapp/svg-icons/angle-bar-left.svg | 1 + jsapp/svg-icons/angle-bar-right.svg | 1 + jsapp/svg-icons/angle-down.svg | 2 +- jsapp/svg-icons/angle-left.svg | 2 +- jsapp/svg-icons/angle-right.svg | 2 +- jsapp/svg-icons/angle-up.svg | 2 +- package-lock.json | 45 +++ package.json | 1 + 11 files changed, 687 insertions(+), 4 deletions(-) create mode 100644 jsapp/js/universalTable/paginatedQueryUniversalTable.component.tsx create mode 100644 jsapp/js/universalTable/universalTable.component.tsx create mode 100644 jsapp/js/universalTable/universalTable.module.scss create mode 100644 jsapp/svg-icons/angle-bar-left.svg create mode 100644 jsapp/svg-icons/angle-bar-right.svg diff --git a/jsapp/js/universalTable/paginatedQueryUniversalTable.component.tsx b/jsapp/js/universalTable/paginatedQueryUniversalTable.component.tsx new file mode 100644 index 0000000000..b0af53d43a --- /dev/null +++ b/jsapp/js/universalTable/paginatedQueryUniversalTable.component.tsx @@ -0,0 +1,78 @@ +// Libraries +import React, {useState, useMemo} from 'react'; + +// Partial components +import UniversalTable from './universalTable.component'; + +// Types +import type {UseQueryResult} from '@tanstack/react-query'; +import type {PaginatedResponse} from 'js/dataInterface'; +import type {UniversalTableColumn} from './universalTable.component'; + +interface PaginatedQueryHook extends Function { + (limit: number, offset: number): UseQueryResult>; +} + +interface PaginatedQueryUniversalTableProps { + queryHook: PaginatedQueryHook; + // Below are props from `UniversalTable` that should come from the parent + // component (these are kind of "configuration" props). The other + // `UniversalTable` props are being handled here internally. + columns: UniversalTableColumn[]; +} + +const PAGE_SIZES = [10, 30, 50, 100]; +const DEFAULT_PAGE_SIZE = PAGE_SIZES[0]; + +/** + * This is a wrapper component for `UniversalTable`. It should be used in + * situations when you use `react-query` to fetch data, and the data is + * paginated. This component handles pagination in a neat, DRY way. + * + * All the rest of the functionalities are the same as `UniversalTable`. + */ +export default function PaginatedQueryUniversalTable( + props: PaginatedQueryUniversalTableProps +) { + const [pagination, setPagination] = useState({ + limit: DEFAULT_PAGE_SIZE, + offset: 0, + }); + + const paginatedQuery = props.queryHook(pagination.limit, pagination.offset); + + const availablePages = useMemo( + () => Math.ceil((paginatedQuery.data?.count ?? 0) / pagination.limit), + [paginatedQuery.data, pagination] + ); + + const currentPageIndex = useMemo( + () => Math.ceil(pagination.offset / pagination.limit), + [pagination] + ); + + const data = paginatedQuery.data?.results || []; + + return ( + + columns={props.columns} + data={data} + pageIndex={currentPageIndex} + pageCount={availablePages} + pageSize={pagination.limit} + pageSizes={PAGE_SIZES} + onRequestPaginationChange={(newPageInfo, oldPageInfo) => { + // Calculate new offset and limit from what we've got + let newOffset = newPageInfo.pageIndex * newPageInfo.pageSize; + const newLimit = newPageInfo.pageSize; + + // If we change page size, we switch back to first page + if (newPageInfo.pageSize !== oldPageInfo.pageSize) { + newOffset = 0; + } + + setPagination({limit: newLimit, offset: newOffset}); + }} + /> + ); +} diff --git a/jsapp/js/universalTable/universalTable.component.tsx b/jsapp/js/universalTable/universalTable.component.tsx new file mode 100644 index 0000000000..c221c77d06 --- /dev/null +++ b/jsapp/js/universalTable/universalTable.component.tsx @@ -0,0 +1,325 @@ +// Libraries +import React from 'react'; +import cx from 'classnames'; +import { + flexRender, + getCoreRowModel, + useReactTable, + type CellContext, + type Column, + type PaginationState, + type TableOptions, +} from '@tanstack/react-table'; + +// Partial components +import Button from 'js/components/common/button'; +import KoboSelect from 'js/components/common/koboSelect'; + +// Utilities +import {generateUuid} from 'js/utils'; + +// Styles +import styles from './universalTable.module.scss'; + +export interface UniversalTableColumn { + /** + * Pairs to data object properties. It is using dot notation, so it's possible + * to match data from a nested object :ok:. + */ + key: string; + /** + * Most of the times this would be just a string, but we are open to + * anything really. + */ + label: React.ReactNode; + isPinned?: boolean; + /** + * This is override for the default width of a column. Use it if you need more + * space for your data, or if you display something very short. + */ + size?: number; + /** + * This is an optional formatter function that will be used when rendering + * the cell value. Without it a literal text value will be rendered. + */ + cellFormatter?: (value: string) => React.ReactNode; +} + +interface UniversalTableProps { + /** A list of column definitions */ + columns: UniversalTableColumn[]; + data: DataItem[]; + // PAGINATION + // To see footer with pagination you need to pass all these below: + /** Starts with `0` */ + pageIndex?: number; + /** Total number of pages of data. */ + pageCount?: number; + /** + * One of `pageSizes`. It is de facto the `limit` from the `offset` + `limit` + * pair used for paginatin the endpoint. + */ + pageSize?: number; + pageSizes?: number[]; + /** + * A way for the table to say "user wants to change pagination". It's being + * triggered for both page size and page changes. + */ + onRequestPaginationChange?: ( + /** + * Provides an object with current `pageIndex` and `pageSize` (one or both + * values are new). The second object shows previous pagination, use it to + * compare what has happened :) + */ + newPageInfo: PaginationState, + oldPageInfo: PaginationState + ) => void; + // ENDPAGINATION +} + +const DEFAULT_COLUMN_SIZE = { + size: 200, // starting column size + minSize: 100, // enforced during column resizing + maxSize: 600, // enforced during column resizing +}; + +/** + * This is a nice wrapper for the `@tanstack/react-table`. It uses only + * a limited selection of all possible features, and provides consistent looks. + * + * You are responsible for passing column definitions (important!) and the data + * to match these definitions (obviously). When using it, you need to also pass + * the TS type of the data item, so it knows what to expect. + * + * It has column pinning (on column definition level, i.e. you need to tell it + * which columns are pinned), and column resizing (works out of the box!). + * + * It has (optional) pagination. If you pass all the required props, you can + * expect to get user pagination requests through the callback function named + * `onRequestPaginationChange`. + */ +export default function UniversalTable( + props: UniversalTableProps +) { + function getCommonClassNames(column: Column) { + return cx({ + [styles.isPinned]: Boolean(column.getIsPinned()), + }); + } + + const columns = props.columns.map((columnDef) => { + return { + accessorKey: columnDef.key, + header: () => columnDef.label, + cell: (cellProps: CellContext) => { + if (columnDef.cellFormatter) { + return columnDef.cellFormatter(cellProps.getValue()); + } else { + return cellProps.renderValue(); + } + }, + size: columnDef.size || DEFAULT_COLUMN_SIZE.size, + }; + }); + + // We define options as separate object to make the optional pagination truly + // optional. + const options: TableOptions = { + columns: columns, + data: props.data, + getCoreRowModel: getCoreRowModel(), + columnResizeMode: 'onChange', + //override default column sizing + defaultColumn: DEFAULT_COLUMN_SIZE, + }; + + options.state = {}; + + // Set separately to not get overriden by pagination options. This is a list + // of columns that are pinned to the left side. + const pinnedColumns = props.columns + .filter((col) => col.isPinned) + .map((col) => col.key); + options.state.columnPinning = {left: pinnedColumns || []}; + + const hasPagination = ( + props.pageIndex !== undefined && + props.pageCount !== undefined && + props.pageSize !== undefined && + props.pageSizes !== undefined && + props.onRequestPaginationChange !== undefined + ); + + // Add pagination related options if needed + if ( + hasPagination && + // `hasPagination` handles everything, but we need these two for TypeScript: + props.pageSize !== undefined && + props.pageIndex !== undefined + ) { + options.manualPagination = true; + options.pageCount = props.pageCount; + options.state.pagination = { + pageSize: props.pageSize, + pageIndex: props.pageIndex, + }; + //update the pagination state when internal APIs mutate the pagination state + options.onPaginationChange = (updater) => { + // make sure updater is callable (to avoid typescript warning) + if (typeof updater !== 'function') { + return; + } + + // The `table` below is defined before usage, but we are sure it will be + // there, given this is a callback function for it. + // eslint-disable-next-line @typescript-eslint/no-use-before-define + const oldPageInfo = table.getState().pagination; + + const newPageInfo = updater(oldPageInfo); + + if (props.onRequestPaginationChange) { + props.onRequestPaginationChange(newPageInfo, oldPageInfo); + } + }; + } + + // Here we build the headless table that we would render below + const table = useReactTable(options); + + const currentPageString = String(table.getState().pagination.pageIndex + 1); + const totalPagesString = String(table.getPageCount()); + + return ( +
+
+
+ + + {table.getHeaderGroups().map((headerGroup) => ( + + {headerGroup.headers.map((header) => ( + + ))} + + ))} + + + {table.getRowModel().rows.map((row) => ( + + {row.getVisibleCells().map((cell) => ( + + ))} + + ))} + +
+ {!header.isPlaceholder && + flexRender( + header.column.columnDef.header, + header.getContext() + ) + } + + {/* + TODO: if we ever see performance issues while resizing, + there is a way to fix that, see: + https://tanstack.com/table/latest/docs/guide/column-sizing#advanced-column-resizing-performance + */} +
header.column.resetSize()} + onMouseDown={header.getResizeHandler()} + onTouchStart={header.getResizeHandler()} + className={cx(styles.resizer, { + [styles.isResizing]: header.column.getIsResizing(), + })} + /> +
+ {flexRender( + cell.column.columnDef.cell, + cell.getContext() + )} +
+
+ + {hasPagination && ( +
+
+
+ + { + return { + value: String(pageSize), + label: t('##number## rows').replace('##number##', String(pageSize)), + }; + })} + selectedOption={String(table.getState().pagination.pageSize)} + onChange={(newSelectedOption: string | null) => { + table.setPageSize(Number(newSelectedOption)); + }} + placement='up-left' + /> +
+ )} +
+
+ ); +} diff --git a/jsapp/js/universalTable/universalTable.module.scss b/jsapp/js/universalTable/universalTable.module.scss new file mode 100644 index 0000000000..58f3196a98 --- /dev/null +++ b/jsapp/js/universalTable/universalTable.module.scss @@ -0,0 +1,232 @@ +@use 'scss/colors'; +@use 'scss/mixins'; + +// Because of scrollable content of the table, we need to do something more +// fancy with rounded corners. +// The actual radius (used for outer wrapper's borders): +$universal-table-border-radius: 6px; +// The radius of the inner elements (used for elements with backgrounds): +$universal-table-border-radius-inner: $universal-table-border-radius - 2px; + +// TODO see if this needs to be something from `z-indexes` file, or if such +// local numbers would be ok. +$z-index-resizer: 2; +$z-index-pinned: 3; +$z-index-pinned-header: 4; +$z-index-resizer-active: 5; + +// We need this to have `overflow: hidden` on the table, but we can't do it on +// one of the other wrappers, as we need borders (sorry!). +.universalTableRootContainer { + overflow: hidden; +} + +.universalTableRoot { + border: 1px solid colors.$kobo-gray-200; + background-color: colors.$kobo-white; + border-radius: $universal-table-border-radius; + width: 100%; + // We set it here intentionally, so noone will think about setting it to + // `hidden`, as it breaks some non obvious things in the table (e.g. the page + // size dropdown in footer). + overflow: visible; +} + +.tableContainer { + overflow-x: auto; + position: relative; + border-radius: $universal-table-border-radius-inner; +} + +.table { + // reset table browser styles first + margin: 0; + padding: 0; + background: none; + border: none; + border-spacing: 0; + background-image: none; + // the actual styles: + background-color: colors.$kobo-white; + // box-shadow and borders will not work with positon: sticky otherwise + border-collapse: separate !important; + // This is needed so that the table takes whole width if there is small amount + // of columns + min-width: 100%; +} + +.tableCell { + background-color: colors.$kobo-white; +} + +.tableHeaderCell { + background-color: colors.$kobo-gray-100; + color: colors.$kobo-gray-700; + position: relative; + font-size: 12px; + font-weight: normal; + text-align: initial; +} + +.tableCell, +.tableHeaderCell { + padding: 12px 20px; + border-bottom: 1px solid colors.$kobo-gray-200; +} + +// ----------------------------------------------------------------------------- +// Pinned column styles: +.tableCell.isPinned, +.tableHeaderCell.isPinned { + position: sticky; + // react-table can handle left and right pinning, but we are only interested + // in left pinning here + left: 0; + border-right: 1px solid colors.$kobo-gray-200; + + &::after { + content: ''; + position: absolute; + left: calc(100% + 1px); + top: 0; + height: 100%; + width: 7px; + background: linear-gradient(to right, rgba(0, 0, 0, 6%), transparent); + } +} + +.tableCell.isPinned { + // For sure it needs to be over .resizer, so it doesn't appear in weird/funny + // position when scrolling table horizontally + z-index: $z-index-pinned; +} + +.tableHeaderCell.isPinned { + z-index: $z-index-pinned-header; +} +// ----------------------------------------------------------------------------- + +// ----------------------------------------------------------------------------- +// Column resizing styles: +// We display resizer of a cell on a right side of it, the left side would be +// handled by previous cell. To make things easier, left side resizer of current +// cell will be a "fake" resizer. +.tableHeaderCell::before, +.resizer { + position: absolute; + background-color: colors.$kobo-gray-500; + height: 28px; + width: 1px; + top: 50%; + transform: translateY(-50%); + z-index: $z-index-resizer; + cursor: col-resize; + touch-action: none; + user-select: none; + border-radius: 2px; + // We start off with these not being visible + display: none; +} + +.tableHeaderCell::before { + content: ''; + left: -1px; + pointer-events: none; +} + +.resizer { + right: 0; +} + +// We want the resizer to have more active space than what's being seen. This +// will improve UX by makin it easier to aim and hit it :) +.resizer::after { + content: ''; + position: absolute; + background-color: transparent; + width: 24px; + height: 150%; + top: -25%; + left: -12px; +} + +// This is the line that we display while resizing the table. It takes whole +// height of the table. +.resizer::before { + display: none; + content: ''; + position: absolute; + top: -24px; + width: 100%; + // We want the line to be seen as taking the whole table, but in reality it + // has the height of the viewport - if user has a really long table, and tries + // to scroll down while dragging the resizer, they will possibly see that + // the line has its limits. We are ok with that :ok:. + height: 100vh; + background-color: colors.$kobo-blue; +} + +.resizer:hover { + background-color: colors.$kobo-blue; + outline: 4px solid colors.$kobo-light-blue; +} + +.resizer.isResizing { + background-color: colors.$kobo-blue; + outline: none; + z-index: $z-index-resizer; +} + +// We display two resizers when mouse is over the cell for them. We also display +// them while resizing is being done (useful for a moment, when user drags +// the resizer further away from the cell, and it didn't move yet due to +// animation happening or lag). +// We want to display resizer of this cell, and a fake resizer on the left side. +// When user moves mouse to the fake resizer, the previous cell resizer (so +// an actual one) will be used. +.tableHeaderCell:hover::before, +.tableHeaderCell:hover .resizer, +.resizer.isResizing, +.resizer.isResizing::before { + display: initial; +} + +// We need the resizer to appear over the pinned column - this is needed for +// a moment when we resize pinned column. +.resizer.isResizing, +.resizer.isResizing::before { + z-index: $z-index-resizer-active; +} + +// We need this to avoid having empty space to the right of the last table +// column due to the resizer active space ::after "hack" +.tableHeaderCell:last-child .resizer::after { + width: 12px; +} +// ----------------------------------------------------------------------------- + +// ----------------------------------------------------------------------------- +// Table footer and pagination styles: +.tableFooter { + @include mixins.centerRowFlex; + justify-content: space-between; + background-color: colors.$kobo-gray-100; + padding: 10px 20px; + border-bottom-left-radius: $universal-table-border-radius-inner; + border-bottom-right-radius: $universal-table-border-radius-inner; +} + +.pagination { + @include mixins.centerRowFlex; +} + +.paginationNumbering { + display: inline; + margin: 0 15px; +} + +.pageSizeSelect { + width: auto !important; + min-width: 120px; +} +// ----------------------------------------------------------------------------- diff --git a/jsapp/svg-icons/angle-bar-left.svg b/jsapp/svg-icons/angle-bar-left.svg new file mode 100644 index 0000000000..90c2970405 --- /dev/null +++ b/jsapp/svg-icons/angle-bar-left.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/jsapp/svg-icons/angle-bar-right.svg b/jsapp/svg-icons/angle-bar-right.svg new file mode 100644 index 0000000000..fb3d3652c8 --- /dev/null +++ b/jsapp/svg-icons/angle-bar-right.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/jsapp/svg-icons/angle-down.svg b/jsapp/svg-icons/angle-down.svg index 7dfd9b1204..06d28b9b4c 100644 --- a/jsapp/svg-icons/angle-down.svg +++ b/jsapp/svg-icons/angle-down.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/jsapp/svg-icons/angle-left.svg b/jsapp/svg-icons/angle-left.svg index 575b535a15..5d60411ab0 100644 --- a/jsapp/svg-icons/angle-left.svg +++ b/jsapp/svg-icons/angle-left.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/jsapp/svg-icons/angle-right.svg b/jsapp/svg-icons/angle-right.svg index ce86320b98..38c9f008e0 100644 --- a/jsapp/svg-icons/angle-right.svg +++ b/jsapp/svg-icons/angle-right.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/jsapp/svg-icons/angle-up.svg b/jsapp/svg-icons/angle-up.svg index bfd8dd9227..22bb3c5d11 100644 --- a/jsapp/svg-icons/angle-up.svg +++ b/jsapp/svg-icons/angle-up.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 476c374338..186116b775 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,6 +15,7 @@ "@mapbox/leaflet-omnivore": "^0.3.4", "@sentry/react": "^7.61.0", "@tanstack/react-query": "^5.49.2", + "@tanstack/react-table": "^8.20.5", "alertifyjs": "^1.13.1", "backbone": "^1.4.0", "backbone-validation": "^0.11.5", @@ -7126,6 +7127,37 @@ "react": "^18 || ^19" } }, + "node_modules/@tanstack/react-table": { + "version": "8.20.5", + "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.20.5.tgz", + "integrity": "sha512-WEHopKw3znbUZ61s9i0+i9g8drmDo6asTWbrQh8Us63DAk/M0FkmIqERew6P71HI75ksZ2Pxyuf4vvKh9rAkiA==", + "dependencies": { + "@tanstack/table-core": "8.20.5" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" + } + }, + "node_modules/@tanstack/table-core": { + "version": "8.20.5", + "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.20.5.tgz", + "integrity": "sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==", + "engines": { + "node": ">=12" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, "node_modules/@testing-library/dom": { "version": "9.3.4", "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.4.tgz", @@ -30986,6 +31018,19 @@ "@tanstack/query-devtools": "5.50.1" } }, + "@tanstack/react-table": { + "version": "8.20.5", + "resolved": "https://registry.npmjs.org/@tanstack/react-table/-/react-table-8.20.5.tgz", + "integrity": "sha512-WEHopKw3znbUZ61s9i0+i9g8drmDo6asTWbrQh8Us63DAk/M0FkmIqERew6P71HI75ksZ2Pxyuf4vvKh9rAkiA==", + "requires": { + "@tanstack/table-core": "8.20.5" + } + }, + "@tanstack/table-core": { + "version": "8.20.5", + "resolved": "https://registry.npmjs.org/@tanstack/table-core/-/table-core-8.20.5.tgz", + "integrity": "sha512-P9dF7XbibHph2PFRz8gfBKEXEY/HJPOhym8CHmjF8y3q5mWpKx9xtZapXQUWCgkqvsK0R46Azuz+VaxD4Xl+Tg==" + }, "@testing-library/dom": { "version": "9.3.4", "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.4.tgz", diff --git a/package.json b/package.json index e39af32745..d47ca67ce1 100644 --- a/package.json +++ b/package.json @@ -11,6 +11,7 @@ "@mapbox/leaflet-omnivore": "^0.3.4", "@sentry/react": "^7.61.0", "@tanstack/react-query": "^5.49.2", + "@tanstack/react-table": "^8.20.5", "alertifyjs": "^1.13.1", "backbone": "^1.4.0", "backbone-validation": "^0.11.5", From d0d605080c8082d40e6115066b6aeaa1041750bd Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Fri, 13 Sep 2024 16:53:01 -0400 Subject: [PATCH 067/119] Monkey patch "get_candidate_relations_to_delete" to avoid inter-database cascade deletions --- ...0006_delete_digest_and_auth_token_table.py | 79 +++++++++++++++++++ kpi/utils/monkey_patching.py | 44 ++++++++++- 2 files changed, 122 insertions(+), 1 deletion(-) create mode 100644 kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py diff --git a/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py b/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py new file mode 100644 index 0000000000..7a86c8c3e7 --- /dev/null +++ b/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py @@ -0,0 +1,79 @@ +# Generated by Django 4.2.15 on 2024-09-13 13:52 + +from django.db import migrations, connections +from django.conf import settings + + +KC_TABLES_TO_DELETE = [ + 'django_digest_partialdigest', + 'django_digest_usernonce', + 'authtoken_token', +] + + +def get_operations(): + if settings.TESTING or settings.SKIP_HEAVY_MIGRATIONS: + # Skip this migration if running in test environment or because we want + # to voluntarily skip it. + return [] + + tables = KC_TABLES_TO_DELETE + operations = [] + + sql = """ + SELECT con.conname + FROM pg_catalog.pg_constraint con + INNER JOIN pg_catalog.pg_class rel + ON rel.oid = con.conrelid + INNER JOIN pg_catalog.pg_namespace nsp + ON nsp.oid = connamespace + WHERE nsp.nspname = 'public' + AND rel.relname = %s; + """ + with connections[settings.OPENROSA_DB_ALIAS].cursor() as cursor: + drop_table_queries = [] + for table in tables: + cursor.execute(sql, [table]) + drop_index_queries = [] + for row in cursor.fetchall(): + if not row[0].endswith('_pkey'): + drop_index_queries.append( + f'ALTER TABLE public.{table} DROP CONSTRAINT {row[0]};' + ) + drop_table_queries.append(f'DROP TABLE IF EXISTS {table};') + operations.append( + migrations.RunSQL( + sql=''.join(drop_index_queries), + reverse_sql=migrations.RunSQL.noop, + ) + ) + + operations.append( + migrations.RunSQL( + sql=''.join(drop_table_queries), + reverse_sql=migrations.RunSQL.noop, + ) + ) + + return operations + + +def print_migration_warning(apps, schema_editor): + if settings.TESTING or settings.SKIP_HEAVY_MIGRATIONS: + return + print( + """ + This migration might take a while. If it is too slow, you may want to + re-run migrations with SKIP_HEAVY_MIGRATIONS=True and apply this one + manually from the django shell. + """ + ) + + +class Migration(migrations.Migration): + + dependencies = [ + ('api', '0005_delete_onetimeauthtoken'), + ] + + operations = [migrations.RunPython(print_migration_warning), *get_operations()] diff --git a/kpi/utils/monkey_patching.py b/kpi/utils/monkey_patching.py index 6b0251e572..511903ccf6 100644 --- a/kpi/utils/monkey_patching.py +++ b/kpi/utils/monkey_patching.py @@ -1,4 +1,7 @@ import django.contrib.auth.management +import django.db.models.deletion +from django.apps import apps +from django.db import router from django.conf import settings from django.contrib.auth.management import ( create_permissions as django_create_permissions, @@ -6,11 +9,12 @@ ) from kobo.apps.openrosa.libs.constants import OPENROSA_APP_LABELS +from kpi.constants import SHARED_APP_LABELS def create_permissions(app_config, using=DEFAULT_DB_ALIAS, **kwargs): """ - Avoid to create permissions on the wrong database when post signal is + Avoid creating permissions on the wrong database when post-signal is emitted on migrations """ if ( @@ -23,4 +27,42 @@ def create_permissions(app_config, using=DEFAULT_DB_ALIAS, **kwargs): return django_create_permissions(app_config=app_config, using=using, **kwargs) +def get_candidate_relations_to_delete(opts): + """ + Filter relations that are not from the same database + + The db connection is set at db router level. See `db_for_read()` and + `db_for_write()` in `kpi/db_routers.py::DefaultDatabaseRouter` class. + Unfortunately, it does not persist for candidates below. + So, even if db connection is set to one database, Django could detect + candidates to delete, based on `on_delete` attribute, from the other + database - which obviously raises an error because the table does not exist. + """ + + db_connection = router.db_for_write(opts.model) + + return ( + f + for f in opts.get_fields(include_hidden=True) + if f.auto_created + and not f.concrete + and (f.one_to_one or f.one_to_many) + and ( + f.remote_field.model._meta.app_label in SHARED_APP_LABELS + or ( + ( + f.remote_field.model._meta.app_label in OPENROSA_APP_LABELS + and db_connection == settings.OPENROSA_DB_ALIAS + ) + or ( + f.remote_field.model._meta.app_label + not in OPENROSA_APP_LABELS + and db_connection == DEFAULT_DB_ALIAS + ) + ) + ) + ) + + django.contrib.auth.management.create_permissions = create_permissions +django.db.models.deletion.get_candidate_relations_to_delete = get_candidate_relations_to_delete From 3e732b0a682a9f70397dac5d10ee7b66e25857be Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 16 Sep 2024 14:35:35 -0400 Subject: [PATCH 068/119] Use User model directly to delete user in KC database instead of proxy --- kpi/deployment_backends/kc_access/utils.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/kpi/deployment_backends/kc_access/utils.py b/kpi/deployment_backends/kc_access/utils.py index 47aa22a3fd..f2caebafd1 100644 --- a/kpi/deployment_backends/kc_access/utils.py +++ b/kpi/deployment_backends/kc_access/utils.py @@ -15,6 +15,7 @@ from kobo.apps.kobo_auth.shortcuts import User from kpi.exceptions import KobocatProfileException +from kpi.utils.database import use_db from kpi.utils.log import logging from kpi.utils.permissions import is_user_anonymous from .shadow_models import ( @@ -414,12 +415,10 @@ def reset_kc_permissions( def delete_kc_user(username: str): - url = settings.KOBOCAT_INTERNAL_URL + f'/api/v1/users/{username}' - - response = requests.delete( - url, headers=get_request_headers(username) - ) - response.raise_for_status() + with use_db(settings.OPENROSA_DB_ALIAS): + # Do not use `.using()` here because it does not bubble down to the + # Collector. + User.objects.filter(username=username).delete() def kc_transaction_atomic(using='kobocat', *args, **kwargs): From fa24ddd23f7e0a1c45292fec96c76235238a9322 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 16 Sep 2024 17:25:54 -0400 Subject: [PATCH 069/119] Fix migration: specify table for connamespace --- .../api/migrations/0006_delete_digest_and_auth_token_table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py b/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py index 7a86c8c3e7..55101a93d9 100644 --- a/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py +++ b/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py @@ -26,7 +26,7 @@ def get_operations(): INNER JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid INNER JOIN pg_catalog.pg_namespace nsp - ON nsp.oid = connamespace + ON nsp.oid = con.connamespace WHERE nsp.nspname = 'public' AND rel.relname = %s; """ From 6f62c3e1412c464395764cfa4a8107244547ec36 Mon Sep 17 00:00:00 2001 From: Leszek Date: Tue, 17 Sep 2024 15:45:51 +0200 Subject: [PATCH 070/119] code review fixes --- ...paginatedQueryUniversalTable.component.tsx | 2 +- .../universalTable.component.tsx | 297 ++++++++++-------- .../universalTable/universalTable.module.scss | 48 ++- 3 files changed, 189 insertions(+), 158 deletions(-) diff --git a/jsapp/js/universalTable/paginatedQueryUniversalTable.component.tsx b/jsapp/js/universalTable/paginatedQueryUniversalTable.component.tsx index b0af53d43a..167d49059c 100644 --- a/jsapp/js/universalTable/paginatedQueryUniversalTable.component.tsx +++ b/jsapp/js/universalTable/paginatedQueryUniversalTable.component.tsx @@ -60,7 +60,7 @@ export default function PaginatedQueryUniversalTable( pageIndex={currentPageIndex} pageCount={availablePages} pageSize={pagination.limit} - pageSizes={PAGE_SIZES} + pageSizeOptions={PAGE_SIZES} onRequestPaginationChange={(newPageInfo, oldPageInfo) => { // Calculate new offset and limit from what we've got let newOffset = newPageInfo.pageIndex * newPageInfo.pageSize; diff --git a/jsapp/js/universalTable/universalTable.component.tsx b/jsapp/js/universalTable/universalTable.component.tsx index c221c77d06..490cb1a770 100644 --- a/jsapp/js/universalTable/universalTable.component.tsx +++ b/jsapp/js/universalTable/universalTable.component.tsx @@ -1,5 +1,5 @@ // Libraries -import React from 'react'; +import React, {useState, useRef, useCallback} from 'react'; import cx from 'classnames'; import { flexRender, @@ -56,11 +56,11 @@ interface UniversalTableProps { /** Total number of pages of data. */ pageCount?: number; /** - * One of `pageSizes`. It is de facto the `limit` from the `offset` + `limit` + * One of `pageSizeOptions`. It is de facto the `limit` from the `offset` + `limit` * pair used for paginatin the endpoint. */ pageSize?: number; - pageSizes?: number[]; + pageSizeOptions?: number[]; /** * A way for the table to say "user wants to change pagination". It's being * triggered for both page size and page changes. @@ -101,6 +101,26 @@ const DEFAULT_COLUMN_SIZE = { export default function UniversalTable( props: UniversalTableProps ) { + // We need table height for the resizers + const [tableHeight, setTableHeight] = useState(0); + const tableRef = useRef(null); + + const moveCallback = useCallback(() => { + if (tableRef.current) { + setTableHeight(tableRef.current.clientHeight); + } + }, []); + + function onResizerStart() { + document.addEventListener('mousemove', moveCallback); + document.addEventListener('touchmove', moveCallback); + } + + function onResizerEnd() { + document.removeEventListener('mousemove', moveCallback); + document.removeEventListener('touchmove', moveCallback); + } + function getCommonClassNames(column: Column) { return cx({ [styles.isPinned]: Boolean(column.getIsPinned()), @@ -129,7 +149,7 @@ export default function UniversalTable( data: props.data, getCoreRowModel: getCoreRowModel(), columnResizeMode: 'onChange', - //override default column sizing + // Override default column sizing defaultColumn: DEFAULT_COLUMN_SIZE, }; @@ -144,17 +164,17 @@ export default function UniversalTable( const hasPagination = ( props.pageIndex !== undefined && - props.pageCount !== undefined && - props.pageSize !== undefined && - props.pageSizes !== undefined && - props.onRequestPaginationChange !== undefined + props.pageCount && + props.pageSize && + props.pageSizeOptions && + props.onRequestPaginationChange ); // Add pagination related options if needed if ( hasPagination && // `hasPagination` handles everything, but we need these two for TypeScript: - props.pageSize !== undefined && + props.pageSize && props.pageIndex !== undefined ) { options.manualPagination = true; @@ -190,136 +210,153 @@ export default function UniversalTable( const totalPagesString = String(table.getPageCount()); return ( -
-
-
- - - {table.getHeaderGroups().map((headerGroup) => ( - - {headerGroup.headers.map((header) => ( - + ))} + + ))} + + + {table.getRowModel().rows.map((row) => ( + + {row.getVisibleCells().map((cell) => ( + + ))} + + ))} + +
- {!header.isPlaceholder && - flexRender( - header.column.columnDef.header, - header.getContext() - ) - } +
+
+ + + {table.getHeaderGroups().map((headerGroup) => ( + + {headerGroup.headers.map((header) => ( + - ))} - - ))} - - - {table.getRowModel().rows.map((row) => ( - - {row.getVisibleCells().map((cell) => ( - - ))} - - ))} - -
+ {!header.isPlaceholder && + flexRender( + header.column.columnDef.header, + header.getContext() + ) + } - {/* - TODO: if we ever see performance issues while resizing, - there is a way to fix that, see: - https://tanstack.com/table/latest/docs/guide/column-sizing#advanced-column-resizing-performance - */} -
header.column.resetSize()} - onMouseDown={header.getResizeHandler()} - onTouchStart={header.getResizeHandler()} - className={cx(styles.resizer, { - [styles.isResizing]: header.column.getIsResizing(), - })} - /> -
header.column.resetSize()} + onMouseDown={(event) => { + onResizerStart(); + header.getResizeHandler()(event); + }} + onTouchStart={(event) => { + onResizerStart(); + header.getResizeHandler()(event); + }} + onMouseUp={() => {onResizerEnd();}} + onTouchEnd={() => {onResizerEnd();}} + className={cx(styles.resizer, { + [styles.isResizing]: header.column.getIsResizing(), + })} > - {flexRender( - cell.column.columnDef.cell, - cell.getContext() - )} -
-
- - {hasPagination && ( -
-
-
+
+ {flexRender( + cell.column.columnDef.cell, + cell.getContext() + )} +
+
-
+ + + { + return { + value: String(pageSize), + label: t('##number## rows').replace('##number##', String(pageSize)), + }; + })} + selectedOption={String(table.getState().pagination.pageSize)} + onChange={(newSelectedOption: string | null) => { + table.setPageSize(Number(newSelectedOption)); + }} + placement='up-left' + /> + + )}
); } diff --git a/jsapp/js/universalTable/universalTable.module.scss b/jsapp/js/universalTable/universalTable.module.scss index 58f3196a98..c6128fe281 100644 --- a/jsapp/js/universalTable/universalTable.module.scss +++ b/jsapp/js/universalTable/universalTable.module.scss @@ -8,6 +8,8 @@ $universal-table-border-radius: 6px; // The radius of the inner elements (used for elements with backgrounds): $universal-table-border-radius-inner: $universal-table-border-radius - 2px; +$universal-table-resizer-top: 8px; + // TODO see if this needs to be something from `z-indexes` file, or if such // local numbers would be ok. $z-index-resizer: 2; @@ -15,12 +17,6 @@ $z-index-pinned: 3; $z-index-pinned-header: 4; $z-index-resizer-active: 5; -// We need this to have `overflow: hidden` on the table, but we can't do it on -// one of the other wrappers, as we need borders (sorry!). -.universalTableRootContainer { - overflow: hidden; -} - .universalTableRoot { border: 1px solid colors.$kobo-gray-200; background-color: colors.$kobo-white; @@ -115,10 +111,9 @@ $z-index-resizer-active: 5; .resizer { position: absolute; background-color: colors.$kobo-gray-500; - height: 28px; width: 1px; - top: 50%; - transform: translateY(-50%); + top: $universal-table-resizer-top; + bottom: $universal-table-resizer-top; z-index: $z-index-resizer; cursor: col-resize; touch-action: none; @@ -152,17 +147,13 @@ $z-index-resizer-active: 5; // This is the line that we display while resizing the table. It takes whole // height of the table. -.resizer::before { - display: none; +.resizerLine { content: ''; position: absolute; - top: -24px; + top: -1 * $universal-table-resizer-top; width: 100%; - // We want the line to be seen as taking the whole table, but in reality it - // has the height of the viewport - if user has a really long table, and tries - // to scroll down while dragging the resizer, they will possibly see that - // the line has its limits. We are ok with that :ok:. - height: 100vh; + // Height is being handled by JS code + height: auto; background-color: colors.$kobo-blue; } @@ -174,7 +165,9 @@ $z-index-resizer-active: 5; .resizer.isResizing { background-color: colors.$kobo-blue; outline: none; - z-index: $z-index-resizer; + // We need the resizer to appear over the pinned column - this is needed for + // a moment when we resize pinned column. + z-index: $z-index-resizer-active; } // We display two resizers when mouse is over the cell for them. We also display @@ -182,20 +175,21 @@ $z-index-resizer-active: 5; // the resizer further away from the cell, and it didn't move yet due to // animation happening or lag). // We want to display resizer of this cell, and a fake resizer on the left side. -// When user moves mouse to the fake resizer, the previous cell resizer (so -// an actual one) will be used. +// When user moves mouse to the fake resizer, the right-side resizer of +// the previous cell resizer will be used instead. .tableHeaderCell:hover::before, .tableHeaderCell:hover .resizer, -.resizer.isResizing, -.resizer.isResizing::before { +.resizer.isResizing { display: initial; } -// We need the resizer to appear over the pinned column - this is needed for -// a moment when we resize pinned column. -.resizer.isResizing, -.resizer.isResizing::before { - z-index: $z-index-resizer-active; +// On screens without hover we want the resizers to be always visible +@media (hover: none) { + .tableHeaderCell::before, + .tableHeaderCell .resizer, + .resizer.isResizing { + display: initial; + } } // We need this to avoid having empty space to the right of the last table From 1098de0faf2d98bc79e5fee69a39e5b16cea75b0 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 18 Sep 2024 16:20:11 -0400 Subject: [PATCH 071/119] Fix linter errors --- kobo/apps/subsequences/actions/keyword_search.py | 4 ++-- kobo/apps/subsequences/utils/__init__.py | 1 - kobo/apps/subsequences/utils/parse_known_cols.py | 3 --- kpi/models/asset.py | 1 + kpi/serializers/v2/asset.py | 3 --- 5 files changed, 3 insertions(+), 9 deletions(-) diff --git a/kobo/apps/subsequences/actions/keyword_search.py b/kobo/apps/subsequences/actions/keyword_search.py index 312162eb4e..3e83748e77 100644 --- a/kobo/apps/subsequences/actions/keyword_search.py +++ b/kobo/apps/subsequences/actions/keyword_search.py @@ -69,7 +69,7 @@ def check_submission_status(self, submission): response = self._traverse_object(submission, source) except KeyError: continue - # FIXME QPATH + xpath = source.split('/')[0] all_output = submission[xpath].setdefault(self.ID, []) this_output = self._get_matching_element(all_output, **query) @@ -92,7 +92,7 @@ def run_change(self, submission): matches = 0 for keyword in query['keywords']: matches += response['value'].count(keyword) - # FIXME QPATH + xpath = source.split('/')[0] all_output = submission[xpath].setdefault(self.ID, []) this_output = self._get_matching_element(all_output, **query) diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index 2f633da542..75a28cec6e 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -160,7 +160,6 @@ def stream_with_extras(submission_stream, asset): uuid = submission['_uuid'] all_supplemental_details = deepcopy(extras.get(uuid, {})) - # FIXME QPATH for supplemental_details in all_supplemental_details.values(): try: all_qual_responses = supplemental_details['qual'] diff --git a/kobo/apps/subsequences/utils/parse_known_cols.py b/kobo/apps/subsequences/utils/parse_known_cols.py index 4c4bb5c32b..53701ef622 100644 --- a/kobo/apps/subsequences/utils/parse_known_cols.py +++ b/kobo/apps/subsequences/utils/parse_known_cols.py @@ -16,16 +16,13 @@ def extend_col_deets(lang: str, coltype: str, label: str, xpath: str) -> dict: # NB: refer to commit d013bfe0f5 when trying to figure out the original # intent here - name = xpath.split('/')[-1] out = { - 'label': name, 'dtpath': f'{xpath}/{coltype}_{lang}', 'type': coltype, 'language': lang, 'label': f'{label} - {coltype}', 'name': f'{xpath}/{coltype}_{lang}', 'source': xpath, - # FIXME QPATH 'xpath': f'{xpath}/{coltype}/{lang}', 'settings': {'mode': 'manual', 'engine': f'engines/{coltype}_manual'}, 'path': [xpath, coltype], diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 3a9e71a73a..64301925bd 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -86,6 +86,7 @@ from kpi.utils.object_permission import get_cached_code_names from kpi.utils.sluggify import sluggify_label + class AssetDeploymentStatus(models.TextChoices): ARCHIVED = 'archived', 'Archived' diff --git a/kpi/serializers/v2/asset.py b/kpi/serializers/v2/asset.py index afc3d699a7..af8a2d6808 100644 --- a/kpi/serializers/v2/asset.py +++ b/kpi/serializers/v2/asset.py @@ -689,9 +689,6 @@ def get_permissions(self, obj): queryset.all(), many=True, read_only=True, context=context ).data - def get_project_ownership(self, asset) -> Optional[dict]: - pass - def get_project_ownership(self, asset) -> Optional[dict]: if not (transfer := asset.transfers.order_by('-date_created').first()): return From 020a26a83592daae97028a611c7370bdf77d5f7e Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Wed, 18 Sep 2024 16:51:57 -0400 Subject: [PATCH 072/119] Apply requested changes --- kobo/apps/__init__.py | 9 +++++---- .../0006_delete_digest_and_auth_token_table.py | 4 ++++ .../apps/main/migrations/0011_drop_old_kpi_tables.py | 6 +++++- .../migrations/0014_drop_old_formdisclaimer_tables.py | 6 +++++- kpi/utils/monkey_patching.py | 3 +++ 5 files changed, 22 insertions(+), 6 deletions(-) diff --git a/kobo/apps/__init__.py b/kobo/apps/__init__.py index 64ee5bdbca..cf07273c96 100644 --- a/kobo/apps/__init__.py +++ b/kobo/apps/__init__.py @@ -2,9 +2,10 @@ from django.apps import AppConfig from django.core.checks import register, Tags -import kpi.utils.monkey_patching # Needed to apply monkey patch. -from kpi.utils.two_database_configuration_checker import \ - TwoDatabaseConfigurationChecker +import kpi.utils.monkey_patching # noqa +from kpi.utils.two_database_configuration_checker import ( + TwoDatabaseConfigurationChecker, +) class KpiConfig(AppConfig): @@ -13,7 +14,7 @@ class KpiConfig(AppConfig): def ready(self, *args, **kwargs): # Register signals only when the app is ready to avoid issues with models # not loaded yet. - import kpi.signals + import kpi.signals # noqa return super().ready(*args, **kwargs) diff --git a/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py b/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py index 55101a93d9..0a2ecc9e30 100644 --- a/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py +++ b/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py @@ -20,6 +20,7 @@ def get_operations(): tables = KC_TABLES_TO_DELETE operations = [] + # SQL query to retrieve every constraint and foreign key of a specific table sql = """ SELECT con.conname FROM pg_catalog.pg_constraint con @@ -32,6 +33,9 @@ def get_operations(): """ with connections[settings.OPENROSA_DB_ALIAS].cursor() as cursor: drop_table_queries = [] + # Loop on every table needed to be deleted: + # 1) Remove every constraint/FK of the table first + # 2) Drop the table for table in tables: cursor.execute(sql, [table]) drop_index_queries = [] diff --git a/kobo/apps/openrosa/apps/main/migrations/0011_drop_old_kpi_tables.py b/kobo/apps/openrosa/apps/main/migrations/0011_drop_old_kpi_tables.py index 939f09a271..9822e38ae7 100644 --- a/kobo/apps/openrosa/apps/main/migrations/0011_drop_old_kpi_tables.py +++ b/kobo/apps/openrosa/apps/main/migrations/0011_drop_old_kpi_tables.py @@ -60,18 +60,22 @@ def get_operations(): tables = DEPRECATED_TABLES + KPI_TABLES operations = [] + # SQL query to retrieve every constraint and foreign key of a specific table sql = """ SELECT con.conname FROM pg_catalog.pg_constraint con INNER JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid INNER JOIN pg_catalog.pg_namespace nsp - ON nsp.oid = connamespace + ON nsp.oid = con.connamespace WHERE nsp.nspname = 'public' AND rel.relname = %s; """ with connections[settings.OPENROSA_DB_ALIAS].cursor() as cursor: drop_table_queries = [] + # Loop on every table needed to be deleted: + # 1) Remove every constraint/FK of the table first + # 2) Drop the table for table in tables: cursor.execute(sql, [table]) drop_index_queries = [] diff --git a/kobo/apps/openrosa/apps/main/migrations/0014_drop_old_formdisclaimer_tables.py b/kobo/apps/openrosa/apps/main/migrations/0014_drop_old_formdisclaimer_tables.py index eb5252e087..9d5c0ce7b0 100644 --- a/kobo/apps/openrosa/apps/main/migrations/0014_drop_old_formdisclaimer_tables.py +++ b/kobo/apps/openrosa/apps/main/migrations/0014_drop_old_formdisclaimer_tables.py @@ -18,18 +18,22 @@ def get_operations(): tables = KC_FORM_DISCLAIMER_TABLES operations = [] + # SQL query to retrieve every constraint and foreign key of a specific table sql = """ SELECT con.conname FROM pg_catalog.pg_constraint con INNER JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid INNER JOIN pg_catalog.pg_namespace nsp - ON nsp.oid = connamespace + ON nsp.oid = con.connamespace WHERE nsp.nspname = 'public' AND rel.relname = %s; """ with connections[settings.OPENROSA_DB_ALIAS].cursor() as cursor: drop_table_queries = [] + # Loop on every table needed to be deleted: + # 1) Remove every constraint/FK of the table first + # 2) Drop the table for table in tables: cursor.execute(sql, [table]) drop_index_queries = [] diff --git a/kpi/utils/monkey_patching.py b/kpi/utils/monkey_patching.py index 511903ccf6..c244b43b6f 100644 --- a/kpi/utils/monkey_patching.py +++ b/kpi/utils/monkey_patching.py @@ -37,6 +37,8 @@ def get_candidate_relations_to_delete(opts): So, even if db connection is set to one database, Django could detect candidates to delete, based on `on_delete` attribute, from the other database - which obviously raises an error because the table does not exist. + + See https://github.com/django/django/blob/52116774549e27ac5d1ba9423e2fe61c5503a4a4/django/db/models/deletion.py#L86-L93 """ db_connection = router.db_for_write(opts.model) @@ -47,6 +49,7 @@ def get_candidate_relations_to_delete(opts): if f.auto_created and not f.concrete and (f.one_to_one or f.one_to_many) + # new condition below from monkey-patching and ( f.remote_field.model._meta.app_label in SHARED_APP_LABELS or ( From 98b58ce64bc5e0b2a66b2699626e62d6f8409e27 Mon Sep 17 00:00:00 2001 From: rgraber Date: Tue, 3 Sep 2024 14:01:55 -0400 Subject: [PATCH 073/119] refactor: make access logs a proxy class --- .../audit_log/migrations/0010_accesslog.py | 24 ++++++++ kobo/apps/audit_log/mixins.py | 5 +- kobo/apps/audit_log/models.py | 60 +++++++++++++------ kobo/apps/audit_log/signals.py | 7 +-- .../tests/api/v2/test_api_audit_log.py | 34 ++++------- kobo/apps/audit_log/tests/test_models.py | 46 +++++++------- kobo/apps/audit_log/tests/test_signals.py | 2 +- kobo/apps/audit_log/views.py | 7 +-- 8 files changed, 109 insertions(+), 76 deletions(-) create mode 100644 kobo/apps/audit_log/migrations/0010_accesslog.py diff --git a/kobo/apps/audit_log/migrations/0010_accesslog.py b/kobo/apps/audit_log/migrations/0010_accesslog.py new file mode 100644 index 0000000000..8fb5b7b167 --- /dev/null +++ b/kobo/apps/audit_log/migrations/0010_accesslog.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.15 on 2024-09-03 16:38 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('audit_log', '0009_alter_auditlog_action'), + ] + + operations = [ + migrations.CreateModel( + name='AccessLog', + fields=[ + ], + options={ + 'proxy': True, + 'indexes': [], + 'constraints': [], + }, + bases=('audit_log.auditlog',), + ), + ] diff --git a/kobo/apps/audit_log/mixins.py b/kobo/apps/audit_log/mixins.py index 1268a7f932..790aa11891 100644 --- a/kobo/apps/audit_log/mixins.py +++ b/kobo/apps/audit_log/mixins.py @@ -1,4 +1,4 @@ -from .models import AuditLog +from .models import AuditLog, AccessLog class RequiresAccessLogMixin: @@ -7,7 +7,6 @@ class RequiresAccessLogMixin: """ def create_access_log(self, request, user, auth_type): - log = AuditLog.create_access_log_for_request( + AccessLog.create_from_request( request, user, authentication_type=auth_type ) - log.save() diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index a78e63f470..edcb67062e 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -18,6 +18,7 @@ ACCESS_LOG_UNKNOWN_AUTH_TYPE, ) from kpi.fields.kpi_uid import UUID_LENGTH +from kpi.utils.log import logging class AuditAction(models.TextChoices): @@ -94,12 +95,48 @@ def save( update_fields=update_fields, ) +class AccessLogManager(models.Manager): + def get_queryset(self): + return super().get_queryset().filter(log_type=AuditType.ACCESS) + + def create(self, **kwargs): + # remove any attempt to set fields that should always be the same on an access log + app_label=kwargs.pop('app_label', None) + if app_label is not None: + logging.warning(f'Ignoring attempt to set {app_label=} on access log') + model_name=kwargs.pop('model_name', None) + if model_name is not None: + logging.warning(f'Ignoring attempt to set {model_name=} on access log') + action=kwargs.pop('action', None) + if action is not None: + logging.warning(f'Ignoring attempt to set {action=} on access log') + log_type=kwargs.pop('log_type', None) + if log_type is not None: + logging.warning(f'Ignoring attempt to set {log_type=} on access log') + # when we initialize manually we have a user, but when we initialize from the db we don't + user = kwargs.pop('user') + return super().create( + # set the fields that are always the same for access logs, pass along the rest to the original constructor + app_label=ACCESS_LOG_KOBO_AUTH_APP_LABEL, + model_name=User.__qualname__, + action=AuditAction.AUTH, + log_type=AuditType.ACCESS, + user=user, + object_id=user.id, + user_uid=user.extra_details.uid, + **kwargs + ) + + +class AccessLog(AuditLog): + objects = AccessLogManager() + + class Meta: + proxy = True + @staticmethod - def create_access_log_for_request( - request, - user=None, - authentication_type: str = None, - extra_metadata: dict = None, + def create_from_request( + request, user=None, authentication_type: str = None, extra_metadata:dict = None ): """ Create an access log for a request, assigned to either the given user or request.user if not supplied @@ -154,7 +191,6 @@ def create_access_log_for_request( 'source': source, 'auth_type': auth_type, } - # add extra information if needed for django-loginas if is_loginas: metadata['initial_user_uid'] = initial_user.extra_details.uid @@ -162,14 +198,4 @@ def create_access_log_for_request( # add any other metadata the caller may want if extra_metadata is not None: metadata.update(extra_metadata) - audit_log = AuditLog( - user=logged_in_user, - app_label=ACCESS_LOG_KOBO_AUTH_APP_LABEL, - model_name=User.__qualname__, - object_id=logged_in_user.id, - user_uid=logged_in_user.extra_details.uid, - action=AuditAction.AUTH, - metadata=metadata, - log_type=AuditType.ACCESS, - ) - return audit_log + return AccessLog.objects.create(user=logged_in_user, metadata=metadata) diff --git a/kobo/apps/audit_log/signals.py b/kobo/apps/audit_log/signals.py index 17ba9047fb..9190695945 100644 --- a/kobo/apps/audit_log/signals.py +++ b/kobo/apps/audit_log/signals.py @@ -2,7 +2,7 @@ from django.dispatch import receiver from kpi.utils.log import logging -from .models import AuditLog +from .models import AuditLog, AccessLog @receiver(user_logged_in) @@ -11,7 +11,6 @@ def create_access_log(sender, user, **kwargs): if not hasattr(request, 'user'): # This should never happen outside of tests logging.warning('Request does not have authenticated user attached.') - log = AuditLog.create_access_log_for_request(request, user) + AccessLog.create_from_request(request, user) else: - log = AuditLog.create_access_log_for_request(request) - log.save() + AccessLog.create_from_request(request) diff --git a/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py b/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py index de36509aef..5fca232e77 100644 --- a/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py +++ b/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py @@ -5,7 +5,7 @@ from rest_framework import status from rest_framework.reverse import reverse -from kobo.apps.audit_log.models import AuditAction, AuditLog, AuditType +from kobo.apps.audit_log.models import AuditAction, AuditLog, AuditType, AccessLog from kobo.apps.audit_log.serializers import AuditLogSerializer from kobo.apps.audit_log.tests.test_signals import skip_login_access_log from kobo.apps.kobo_auth.shortcuts import User @@ -25,19 +25,6 @@ def setUp(self): super(BaseAuditLogTestCase, self).setUp() self.url = reverse(self._get_endpoint(self.get_endpoint_basename())) - def create_access_log(self, user, date_created=None): - params = { - 'user': user, - 'app_label': 'kobo_auth', - 'model_name': 'user', - 'action': AuditAction.AUTH, - 'object_id': user.id, - } - if date_created is not None: - params['date_created'] = date_created - log = AuditLog.objects.create(**params) - log.save() - return log def login_user(self, username, password): # always skip creating the access logs for logins so we have full control over the logs in the test db @@ -52,7 +39,7 @@ def force_login_user(self, user): def assert_audit_log_results_equal(self, response, expected_kwargs): # utility method for tests that are just comparing the results of an api call to the results of # manually applying the expected query (simple filters only) - expected = AuditLog.objects.filter(**expected_kwargs).order_by( + expected = AccessLog.objects.filter(**expected_kwargs).order_by( '-date_created' ) expected_count = expected.count() @@ -175,9 +162,9 @@ def setUp(self): user1 = User.objects.get(username='someuser') user2 = User.objects.get(username='anotheruser') # generate 3 access logs, 2 for user1, 1 for user2 - self.create_access_log(user1) - self.create_access_log(user1) - self.create_access_log(user2) + AccessLog.objects.create(user=user1) + AccessLog.objects.create(user=user1) + AccessLog.objects.create(user=user2) # create a random non-auth audit log log = AuditLog.objects.create( @@ -225,9 +212,10 @@ def setUp(self): super_user = User.objects.get(username='admin') user2 = User.objects.get(username='anotheruser') # generate 3 access logs, 2 for superuser, 1 for user2 - self.create_access_log(super_user) - self.create_access_log(super_user) - self.create_access_log(user2) + # generate 3 access logs, 2 for user1, 1 for user2 + user_1_log_1 = AccessLog.objects.create(user=super_user) + user_1_log_2 = AccessLog.objects.create(user=super_user) + user_2_log_1 = AccessLog.objects.create(user=super_user) # create a random non-auth audit log log = AuditLog.objects.create( @@ -237,7 +225,6 @@ def setUp(self): object_id=1, action=AuditAction.DELETE, ) - log.save() self.assertEqual(AuditLog.objects.count(), 4) def test_list_as_anonymous_returns_unauthorized(self): @@ -272,8 +259,7 @@ def test_can_search_access_logs_by_date(self): self.client.force_login(User.objects.get(username='admin')) tomorrow = timezone.now() + timedelta(days=1) tomorrow_str = tomorrow.strftime('%Y-%m-%d') - log = self.create_access_log(user=another_user, date_created=tomorrow) - + log = AccessLog.objects.create(user=another_user, date_created=tomorrow) response = self.client.get( f'{self.url}?q=date_created__gte:"{tomorrow_str}"' ) diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index 70e5bce893..80c8b128d0 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -10,7 +10,7 @@ ACCESS_LOG_UNKNOWN_AUTH_TYPE, AuditAction, AuditLog, - AuditType, + AuditType, AccessLog, ) from kobo.apps.kobo_auth.shortcuts import User from kpi.tests.base_test_case import BaseTestCase @@ -21,7 +21,7 @@ return_value='source', ) @patch('kobo.apps.audit_log.models.get_client_ip', return_value='127.0.0.1') -class AuditLogModelTestCase(BaseTestCase): +class AccessLogModelTestCase(BaseTestCase): @classmethod def setUpClass(cls): @@ -41,14 +41,14 @@ def _create_request(self, url: str, cached_user, new_user): request.resolver_match = resolve(url) return request - def _check_common_fields(self, audit_log: AuditLog, user): - self.assertEqual(audit_log.user.id, user.id) - self.assertEqual(audit_log.app_label, ACCESS_LOG_KOBO_AUTH_APP_LABEL) - self.assertEqual(audit_log.model_name, 'User') - self.assertEqual(audit_log.object_id, user.id) - self.assertEqual(audit_log.user_uid, user.extra_details.uid) - self.assertEqual(audit_log.action, AuditAction.AUTH) - self.assertEqual(audit_log.log_type, AuditType.ACCESS) + def _check_common_fields(self, access_log: AccessLog, user): + self.assertEqual(access_log.user.id, user.id) + self.assertEqual(access_log.app_label, ACCESS_LOG_KOBO_AUTH_APP_LABEL) + self.assertEqual(access_log.model_name, 'User') + self.assertEqual(access_log.object_id, user.id) + self.assertEqual(access_log.user_uid, user.extra_details.uid) + self.assertEqual(access_log.action, AuditAction.AUTH) + self.assertEqual(access_log.log_type, AuditType.ACCESS) def test_basic_create_auth_log_from_request( self, patched_ip, patched_source @@ -56,16 +56,16 @@ def test_basic_create_auth_log_from_request( request = self._create_request( reverse('kobo_login'), AnonymousUser(), - AuditLogModelTestCase.super_user, + AccessLogModelTestCase.super_user, ) - log: AuditLog = AuditLog.create_access_log_for_request(request) - self._check_common_fields(log, AuditLogModelTestCase.super_user) + log: AccessLog = AccessLog.create_from_request(request) + self._check_common_fields(log, AccessLogModelTestCase.super_user) self.assertDictEqual( log.metadata, { 'ip_address': '127.0.0.1', 'source': 'source', - 'auth_type': AuditLogModelTestCase.super_user.backend, + 'auth_type': AccessLogModelTestCase.super_user.backend, }, ) @@ -78,10 +78,10 @@ def test_create_auth_log_from_loginas_request( second_user.save() request = self._create_request( reverse('loginas-user-login', args=(second_user.id,)), - AuditLogModelTestCase.super_user, + AccessLogModelTestCase.super_user, second_user, ) - log: AuditLog = AuditLog.create_access_log_for_request(request) + log: AccessLog = AccessLog.create_from_request(request) self._check_common_fields(log, second_user) self.assertDictEqual( log.metadata, @@ -89,8 +89,8 @@ def test_create_auth_log_from_loginas_request( 'ip_address': '127.0.0.1', 'source': 'source', 'auth_type': ACCESS_LOG_LOGINAS_AUTH_TYPE, - 'initial_user_uid': AuditLogModelTestCase.super_user.extra_details.uid, - 'initial_user_username': AuditLogModelTestCase.super_user.username, + 'initial_user_uid': AccessLogModelTestCase.super_user.extra_details.uid, + 'initial_user_username': AccessLogModelTestCase.super_user.username, }, ) @@ -100,12 +100,12 @@ def test_create_auth_log_with_different_auth_type( request = self._create_request( reverse('api_v2:asset-list'), AnonymousUser(), - AuditLogModelTestCase.super_user, + AccessLogModelTestCase.super_user, ) - log: AuditLog = AuditLog.create_access_log_for_request( + log: AccessLog = AccessLog.create_from_request( request, authentication_type='Token' ) - self._check_common_fields(log, AuditLogModelTestCase.super_user) + self._check_common_fields(log, AccessLogModelTestCase.super_user) self.assertDictEqual( log.metadata, { @@ -125,10 +125,10 @@ def test_create_auth_log_unknown_authenticator( second_user.save() request = self._create_request( reverse('api_v2:asset-list'), - AuditLogModelTestCase.super_user, + AccessLogModelTestCase.super_user, second_user, ) - log: AuditLog = AuditLog.create_access_log_for_request(request) + log: AccessLog = AccessLog.create_from_request(request) self._check_common_fields(log, second_user) self.assertDictEqual( log.metadata, diff --git a/kobo/apps/audit_log/tests/test_signals.py b/kobo/apps/audit_log/tests/test_signals.py index 178b6b9c52..145685d45b 100644 --- a/kobo/apps/audit_log/tests/test_signals.py +++ b/kobo/apps/audit_log/tests/test_signals.py @@ -42,7 +42,7 @@ def setUpClass(cls): cls.user.backend = 'django.contrib.auth.backends.ModelBackend' cls.user.save() - @patch('kobo.apps.audit_log.signals.AuditLog.create_access_log_for_request') + @patch('kobo.apps.audit_log.signals.AccessLog.create_from_request') def test_audit_log_created_on_login(self, patched_create): """ Basic plumbing test to make sure the signal is hooked up diff --git a/kobo/apps/audit_log/views.py b/kobo/apps/audit_log/views.py index ef9cc28eb8..9e30c7115e 100644 --- a/kobo/apps/audit_log/views.py +++ b/kobo/apps/audit_log/views.py @@ -4,7 +4,7 @@ from kpi.filters import SearchFilter from kpi.permissions import IsAuthenticated from .filters import AccessLogPermissionsFilter -from .models import AuditAction, AuditLog +from .models import AuditAction, AuditLog, AccessLog from .permissions import SuperUserPermission from .serializers import AuditLogSerializer @@ -131,7 +131,7 @@ class AllAccessLogViewSet(AuditLogViewSet): """ queryset = ( - AuditLog.objects.select_related('user') + AccessLog.objects.select_related('user') .filter(action=AuditAction.AUTH) .order_by('-date_created') ) @@ -185,8 +185,7 @@ class AccessLogViewSet(AuditLogViewSet): """ queryset = ( - AuditLog.objects.select_related('user') - .filter(action=AuditAction.AUTH) + AccessLog.objects.select_related('user') .order_by('-date_created') ) permission_classes = (IsAuthenticated,) From fefb1a8ba391f47bc2128f55933614636be5f5a4 Mon Sep 17 00:00:00 2001 From: rgraber Date: Tue, 3 Sep 2024 14:17:01 -0400 Subject: [PATCH 074/119] fixup!: new tests --- kobo/apps/audit_log/models.py | 1 - kobo/apps/audit_log/tests/test_models.py | 27 ++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index edcb67062e..825e7635e4 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -113,7 +113,6 @@ def create(self, **kwargs): log_type=kwargs.pop('log_type', None) if log_type is not None: logging.warning(f'Ignoring attempt to set {log_type=} on access log') - # when we initialize manually we have a user, but when we initialize from the db we don't user = kwargs.pop('user') return super().create( # set the fields that are always the same for access logs, pass along the rest to the original constructor diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index 80c8b128d0..80c12c81ec 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -1,9 +1,13 @@ +from datetime import timedelta from unittest.mock import patch +from django.utils import timezone + from django.contrib.auth.models import AnonymousUser from django.test.client import RequestFactory from django.urls import resolve, reverse +import kobo.apps.audit_log.models from kobo.apps.audit_log.models import ( ACCESS_LOG_KOBO_AUTH_APP_LABEL, ACCESS_LOG_LOGINAS_AUTH_TYPE, @@ -14,6 +18,7 @@ ) from kobo.apps.kobo_auth.shortcuts import User from kpi.tests.base_test_case import BaseTestCase +from kpi.utils.log import logging @patch( @@ -50,6 +55,28 @@ def _check_common_fields(self, access_log: AccessLog, user): self.assertEqual(access_log.action, AuditAction.AUTH) self.assertEqual(access_log.log_type, AuditType.ACCESS) + def create_access_log_sets_standard_fields(self, patched_ip, patched_source): + yesterday = timezone.now() - timedelta(days=1) + log = AccessLog.objects.create(user=AccessLogModelTestCase.super_user, metadata={'foo': 'bar'}, date_created=yesterday) + self._check_common_fields(log, AccessLogModelTestCase.super_user) + self.assertEquals(log.date_created, yesterday) + self.assertDictEqual(log.metadata, {'foo':'bar'}) + + @patch('kobo.apps.audit_log.models.logging.warning') + def create_access_log_ignores_attempt_to_override_standard_fields(self, patched_warning, patched_ip, patched_source): + log = AccessLog.objects.create( + log_type=AuditType.DATA_EDITING, + action=AuditAction.CREATE, + model_name='foo', + app_label='bar', + user=AccessLogModelTestCase.super_user + ) + # the standard fields should be set the same as any other access logs + self._check_common_fields(log, AccessLogModelTestCase.super_user) + # we logged a warning for each attempt to override a field + self.assertEquals(patched_warning.call_count, 4) + + def test_basic_create_auth_log_from_request( self, patched_ip, patched_source ): From ad9513c95b42cf0cbe43fcbda4faaeacbaba7d19 Mon Sep 17 00:00:00 2001 From: rgraber Date: Tue, 3 Sep 2024 14:20:43 -0400 Subject: [PATCH 075/119] fixup!: cleanup --- .../audit_log/migrations/0010_accesslog.py | 3 +- kobo/apps/audit_log/mixins.py | 2 +- kobo/apps/audit_log/models.py | 23 +++++++++------ kobo/apps/audit_log/signals.py | 2 +- .../tests/api/v2/test_api_audit_log.py | 8 ++++-- kobo/apps/audit_log/tests/test_models.py | 28 +++++++++++-------- kobo/apps/audit_log/views.py | 7 ++--- 7 files changed, 43 insertions(+), 30 deletions(-) diff --git a/kobo/apps/audit_log/migrations/0010_accesslog.py b/kobo/apps/audit_log/migrations/0010_accesslog.py index 8fb5b7b167..c369661d34 100644 --- a/kobo/apps/audit_log/migrations/0010_accesslog.py +++ b/kobo/apps/audit_log/migrations/0010_accesslog.py @@ -12,8 +12,7 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( name='AccessLog', - fields=[ - ], + fields=[], options={ 'proxy': True, 'indexes': [], diff --git a/kobo/apps/audit_log/mixins.py b/kobo/apps/audit_log/mixins.py index 790aa11891..d9102df79e 100644 --- a/kobo/apps/audit_log/mixins.py +++ b/kobo/apps/audit_log/mixins.py @@ -1,4 +1,4 @@ -from .models import AuditLog, AccessLog +from .models import AccessLog class RequiresAccessLogMixin: diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index 825e7635e4..6410ceb487 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -95,24 +95,31 @@ def save( update_fields=update_fields, ) + class AccessLogManager(models.Manager): def get_queryset(self): return super().get_queryset().filter(log_type=AuditType.ACCESS) def create(self, **kwargs): # remove any attempt to set fields that should always be the same on an access log - app_label=kwargs.pop('app_label', None) + app_label = kwargs.pop('app_label', None) if app_label is not None: - logging.warning(f'Ignoring attempt to set {app_label=} on access log') - model_name=kwargs.pop('model_name', None) + logging.warning( + f'Ignoring attempt to set {app_label=} on access log' + ) + model_name = kwargs.pop('model_name', None) if model_name is not None: - logging.warning(f'Ignoring attempt to set {model_name=} on access log') - action=kwargs.pop('action', None) + logging.warning( + f'Ignoring attempt to set {model_name=} on access log' + ) + action = kwargs.pop('action', None) if action is not None: logging.warning(f'Ignoring attempt to set {action=} on access log') - log_type=kwargs.pop('log_type', None) + log_type = kwargs.pop('log_type', None) if log_type is not None: - logging.warning(f'Ignoring attempt to set {log_type=} on access log') + logging.warning( + f'Ignoring attempt to set {log_type=} on access log' + ) user = kwargs.pop('user') return super().create( # set the fields that are always the same for access logs, pass along the rest to the original constructor @@ -123,7 +130,7 @@ def create(self, **kwargs): user=user, object_id=user.id, user_uid=user.extra_details.uid, - **kwargs + **kwargs, ) diff --git a/kobo/apps/audit_log/signals.py b/kobo/apps/audit_log/signals.py index 9190695945..046ea518eb 100644 --- a/kobo/apps/audit_log/signals.py +++ b/kobo/apps/audit_log/signals.py @@ -2,7 +2,7 @@ from django.dispatch import receiver from kpi.utils.log import logging -from .models import AuditLog, AccessLog +from .models import AccessLog @receiver(user_logged_in) diff --git a/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py b/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py index 5fca232e77..3be43faba1 100644 --- a/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py +++ b/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py @@ -5,7 +5,12 @@ from rest_framework import status from rest_framework.reverse import reverse -from kobo.apps.audit_log.models import AuditAction, AuditLog, AuditType, AccessLog +from kobo.apps.audit_log.models import ( + AccessLog, + AuditAction, + AuditLog, + AuditType, +) from kobo.apps.audit_log.serializers import AuditLogSerializer from kobo.apps.audit_log.tests.test_signals import skip_login_access_log from kobo.apps.kobo_auth.shortcuts import User @@ -25,7 +30,6 @@ def setUp(self): super(BaseAuditLogTestCase, self).setUp() self.url = reverse(self._get_endpoint(self.get_endpoint_basename())) - def login_user(self, username, password): # always skip creating the access logs for logins so we have full control over the logs in the test db with skip_login_access_log(): diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index 80c12c81ec..43adcd395e 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -1,24 +1,21 @@ from datetime import timedelta from unittest.mock import patch -from django.utils import timezone - from django.contrib.auth.models import AnonymousUser from django.test.client import RequestFactory from django.urls import resolve, reverse +from django.utils import timezone -import kobo.apps.audit_log.models from kobo.apps.audit_log.models import ( ACCESS_LOG_KOBO_AUTH_APP_LABEL, ACCESS_LOG_LOGINAS_AUTH_TYPE, ACCESS_LOG_UNKNOWN_AUTH_TYPE, + AccessLog, AuditAction, - AuditLog, - AuditType, AccessLog, + AuditType, ) from kobo.apps.kobo_auth.shortcuts import User from kpi.tests.base_test_case import BaseTestCase -from kpi.utils.log import logging @patch( @@ -55,28 +52,35 @@ def _check_common_fields(self, access_log: AccessLog, user): self.assertEqual(access_log.action, AuditAction.AUTH) self.assertEqual(access_log.log_type, AuditType.ACCESS) - def create_access_log_sets_standard_fields(self, patched_ip, patched_source): + def create_access_log_sets_standard_fields( + self, patched_ip, patched_source + ): yesterday = timezone.now() - timedelta(days=1) - log = AccessLog.objects.create(user=AccessLogModelTestCase.super_user, metadata={'foo': 'bar'}, date_created=yesterday) + log = AccessLog.objects.create( + user=AccessLogModelTestCase.super_user, + metadata={'foo': 'bar'}, + date_created=yesterday, + ) self._check_common_fields(log, AccessLogModelTestCase.super_user) self.assertEquals(log.date_created, yesterday) - self.assertDictEqual(log.metadata, {'foo':'bar'}) + self.assertDictEqual(log.metadata, {'foo': 'bar'}) @patch('kobo.apps.audit_log.models.logging.warning') - def create_access_log_ignores_attempt_to_override_standard_fields(self, patched_warning, patched_ip, patched_source): + def create_access_log_ignores_attempt_to_override_standard_fields( + self, patched_warning, patched_ip, patched_source + ): log = AccessLog.objects.create( log_type=AuditType.DATA_EDITING, action=AuditAction.CREATE, model_name='foo', app_label='bar', - user=AccessLogModelTestCase.super_user + user=AccessLogModelTestCase.super_user, ) # the standard fields should be set the same as any other access logs self._check_common_fields(log, AccessLogModelTestCase.super_user) # we logged a warning for each attempt to override a field self.assertEquals(patched_warning.call_count, 4) - def test_basic_create_auth_log_from_request( self, patched_ip, patched_source ): diff --git a/kobo/apps/audit_log/views.py b/kobo/apps/audit_log/views.py index 9e30c7115e..8d43b4ff08 100644 --- a/kobo/apps/audit_log/views.py +++ b/kobo/apps/audit_log/views.py @@ -4,7 +4,7 @@ from kpi.filters import SearchFilter from kpi.permissions import IsAuthenticated from .filters import AccessLogPermissionsFilter -from .models import AuditAction, AuditLog, AccessLog +from .models import AccessLog, AuditAction, AuditLog from .permissions import SuperUserPermission from .serializers import AuditLogSerializer @@ -184,9 +184,8 @@ class AccessLogViewSet(AuditLogViewSet): """ - queryset = ( - AccessLog.objects.select_related('user') - .order_by('-date_created') + queryset = AccessLog.objects.select_related('user').order_by( + '-date_created' ) permission_classes = (IsAuthenticated,) filter_backends = (AccessLogPermissionsFilter,) From efcfd8ec2f39500a2f93872c7caa9eb2f4daf492 Mon Sep 17 00:00:00 2001 From: rgraber Date: Tue, 3 Sep 2024 16:15:22 -0400 Subject: [PATCH 076/119] fixup!: authorized app --- kpi/views/__init__.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/kpi/views/__init__.py b/kpi/views/__init__.py index d30884957b..62f1a05a29 100644 --- a/kpi/views/__init__.py +++ b/kpi/views/__init__.py @@ -5,7 +5,7 @@ from rest_framework.decorators import api_view, authentication_classes from rest_framework.response import Response -from kobo.apps.audit_log.models import AuditLog +from kobo.apps.audit_log.models import AccessLog from kobo.apps.kobo_auth.shortcuts import User from kpi.constants import ACCESS_LOG_AUTHORIZED_APP_TYPE from kpi.models import AuthorizedApplication @@ -64,10 +64,9 @@ def authorized_application_authenticate_user(request): # authenticated as the application and not the user, we do it here so # we can have the user information extra_metadata_for_log = {'authorized_app_name': request.auth.name} - log = AuditLog.create_access_log_for_request( + AccessLog.create_from_request( request, user, ACCESS_LOG_AUTHORIZED_APP_TYPE, extra_metadata_for_log ) - log.save() return Response(response_data) From 38204b4c8015c1beb7400b5de98014f33b3bbcef Mon Sep 17 00:00:00 2001 From: rgraber Date: Tue, 3 Sep 2024 16:16:12 -0400 Subject: [PATCH 077/119] fixup!: fix rebase artifact --- kobo/apps/audit_log/tests/test_models.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index 43adcd395e..b3b556e8bb 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -176,13 +176,13 @@ def test_create_auth_log_with_extra_metadata( request = self._create_request( reverse('api_v2:asset-list'), AnonymousUser(), - AuditLogModelTestCase.super_user, + AccessLogModelTestCase.super_user, ) extra_metadata = {'foo': 'bar'} - log: AuditLog = AuditLog.create_access_log_for_request( + log: AccessLog = AccessLog.create_from_request( request, authentication_type='Token', extra_metadata=extra_metadata ) - self._check_common_fields(log, AuditLogModelTestCase.super_user) + self._check_common_fields(log, AccessLogModelTestCase.super_user) self.assertDictEqual( log.metadata, { From e18d825da0a8488a0261c700e4435530443e5153 Mon Sep 17 00:00:00 2001 From: rgraber Date: Mon, 16 Sep 2024 16:51:05 -0400 Subject: [PATCH 078/119] fixup!: use lowercase user --- kobo/apps/audit_log/models.py | 5 ++--- kobo/apps/audit_log/tests/test_models.py | 5 ++--- kpi/constants.py | 1 - 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index 6410ceb487..549a9f6482 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -12,7 +12,6 @@ get_human_readable_client_user_agent, ) from kpi.constants import ( - ACCESS_LOG_KOBO_AUTH_APP_LABEL, ACCESS_LOG_LOGINAS_AUTH_TYPE, ACCESS_LOG_SUBMISSION_AUTH_TYPE, ACCESS_LOG_UNKNOWN_AUTH_TYPE, @@ -123,8 +122,8 @@ def create(self, **kwargs): user = kwargs.pop('user') return super().create( # set the fields that are always the same for access logs, pass along the rest to the original constructor - app_label=ACCESS_LOG_KOBO_AUTH_APP_LABEL, - model_name=User.__qualname__, + app_label=User._meta.app_label, + model_name=User._meta.model_name, action=AuditAction.AUTH, log_type=AuditType.ACCESS, user=user, diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index b3b556e8bb..7419e05c45 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -7,7 +7,6 @@ from django.utils import timezone from kobo.apps.audit_log.models import ( - ACCESS_LOG_KOBO_AUTH_APP_LABEL, ACCESS_LOG_LOGINAS_AUTH_TYPE, ACCESS_LOG_UNKNOWN_AUTH_TYPE, AccessLog, @@ -45,8 +44,8 @@ def _create_request(self, url: str, cached_user, new_user): def _check_common_fields(self, access_log: AccessLog, user): self.assertEqual(access_log.user.id, user.id) - self.assertEqual(access_log.app_label, ACCESS_LOG_KOBO_AUTH_APP_LABEL) - self.assertEqual(access_log.model_name, 'User') + self.assertEqual(access_log.app_label, 'kobo_auth') + self.assertEqual(access_log.model_name, 'user') self.assertEqual(access_log.object_id, user.id) self.assertEqual(access_log.user_uid, user.extra_details.uid) self.assertEqual(access_log.action, AuditAction.AUTH) diff --git a/kpi/constants.py b/kpi/constants.py index 4bcf4c29f0..e7e1569fca 100644 --- a/kpi/constants.py +++ b/kpi/constants.py @@ -136,7 +136,6 @@ LIMIT_HOURS_23 = 82800 -ACCESS_LOG_KOBO_AUTH_APP_LABEL = 'kobo_auth' ACCESS_LOG_LOGINAS_AUTH_TYPE = 'django-loginas' ACCESS_LOG_UNKNOWN_AUTH_TYPE = 'unknown' ACCESS_LOG_SUBMISSION_AUTH_TYPE = 'submission' From 6eed9d510afcc047b9d2631ca74feca64e687e8a Mon Sep 17 00:00:00 2001 From: rgraber Date: Tue, 17 Sep 2024 08:38:29 -0400 Subject: [PATCH 079/119] fixup!: migration --- .../audit_log/migrations/0010_accesslog.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/kobo/apps/audit_log/migrations/0010_accesslog.py b/kobo/apps/audit_log/migrations/0010_accesslog.py index c369661d34..3b0daea4f1 100644 --- a/kobo/apps/audit_log/migrations/0010_accesslog.py +++ b/kobo/apps/audit_log/migrations/0010_accesslog.py @@ -1,7 +1,21 @@ # Generated by Django 4.2.15 on 2024-09-03 16:38 from django.db import migrations +from django.conf import settings +def manually_update_model_name(apps, schema_editor): + print( + """ + !!! ATTENTION !!! + If you have existing projects you may wish to run the SQL query below in PostgreSQL directly: + + > "UPDATE audit_log_auditlog SET model_name = 'user' where model_name='User'" + + This update is not strictly necessary for functionality but foregoing + it may cause unexpected behavior when querying the audit_log_auditlog table. + + """ + ) class Migration(migrations.Migration): @@ -21,3 +35,17 @@ class Migration(migrations.Migration): bases=('audit_log.auditlog',), ), ] + + if settings.SKIP_HEAVY_MIGRATIONS: + operations.append( + migrations.RunPython( + manually_update_model_name, + migrations.RunPython.noop, + ) + ) + else: + operations.append( + migrations.RunSQL( + sql="UPDATE audit_log_auditlog SET model_name = 'user' where model_name='User'", + reverse_sql=migrations.RunSQL.noop, + )) From 6628e8cd0315cc2387732618ce188bc485a0fd75 Mon Sep 17 00:00:00 2001 From: rgraber Date: Tue, 17 Sep 2024 11:52:09 -0400 Subject: [PATCH 080/119] fixup!: format --- kobo/apps/audit_log/migrations/0010_accesslog.py | 5 ++++- kobo/apps/audit_log/models.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/kobo/apps/audit_log/migrations/0010_accesslog.py b/kobo/apps/audit_log/migrations/0010_accesslog.py index 3b0daea4f1..24950091c8 100644 --- a/kobo/apps/audit_log/migrations/0010_accesslog.py +++ b/kobo/apps/audit_log/migrations/0010_accesslog.py @@ -3,6 +3,7 @@ from django.db import migrations from django.conf import settings + def manually_update_model_name(apps, schema_editor): print( """ @@ -17,6 +18,7 @@ def manually_update_model_name(apps, schema_editor): """ ) + class Migration(migrations.Migration): dependencies = [ @@ -48,4 +50,5 @@ class Migration(migrations.Migration): migrations.RunSQL( sql="UPDATE audit_log_auditlog SET model_name = 'user' where model_name='User'", reverse_sql=migrations.RunSQL.noop, - )) + ) + ) diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index 549a9f6482..6002fdbeed 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -141,7 +141,10 @@ class Meta: @staticmethod def create_from_request( - request, user=None, authentication_type: str = None, extra_metadata:dict = None + request, + user=None, + authentication_type: str = None, + extra_metadata: dict = None, ): """ Create an access log for a request, assigned to either the given user or request.user if not supplied From 85fbc2f5440421350d43e38b0bf51956410fd02a Mon Sep 17 00:00:00 2001 From: rgraber Date: Wed, 18 Sep 2024 14:28:16 -0400 Subject: [PATCH 081/119] fixup!: isort --- kobo/apps/audit_log/migrations/0010_accesslog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kobo/apps/audit_log/migrations/0010_accesslog.py b/kobo/apps/audit_log/migrations/0010_accesslog.py index 24950091c8..382a8053d7 100644 --- a/kobo/apps/audit_log/migrations/0010_accesslog.py +++ b/kobo/apps/audit_log/migrations/0010_accesslog.py @@ -1,7 +1,7 @@ # Generated by Django 4.2.15 on 2024-09-03 16:38 -from django.db import migrations from django.conf import settings +from django.db import migrations def manually_update_model_name(apps, schema_editor): From 720086cff55b175f33c2d25ce340397e5cb15725 Mon Sep 17 00:00:00 2001 From: rgraber Date: Thu, 19 Sep 2024 09:14:46 -0400 Subject: [PATCH 082/119] fixup!: run darker --- kobo/apps/audit_log/mixins.py | 4 +--- kobo/apps/audit_log/models.py | 14 +++----------- .../audit_log/tests/api/v2/test_api_audit_log.py | 11 ++--------- kobo/apps/audit_log/tests/test_models.py | 4 +--- kobo/apps/audit_log/tests/test_signals.py | 2 +- kobo/apps/audit_log/views.py | 4 +--- kpi/constants.py | 12 ++++++------ kpi/views/__init__.py | 10 ++++++---- 8 files changed, 21 insertions(+), 40 deletions(-) diff --git a/kobo/apps/audit_log/mixins.py b/kobo/apps/audit_log/mixins.py index d9102df79e..19eda8eb64 100644 --- a/kobo/apps/audit_log/mixins.py +++ b/kobo/apps/audit_log/mixins.py @@ -7,6 +7,4 @@ class RequiresAccessLogMixin: """ def create_access_log(self, request, user, auth_type): - AccessLog.create_from_request( - request, user, authentication_type=auth_type - ) + AccessLog.create_from_request(request, user, authentication_type=auth_type) diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index 6002fdbeed..e386abe9eb 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -1,8 +1,6 @@ import logging from django.conf import settings -from django.contrib.auth.models import AnonymousUser -from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils import timezone @@ -103,22 +101,16 @@ def create(self, **kwargs): # remove any attempt to set fields that should always be the same on an access log app_label = kwargs.pop('app_label', None) if app_label is not None: - logging.warning( - f'Ignoring attempt to set {app_label=} on access log' - ) + logging.warning(f'Ignoring attempt to set {app_label=} on access log') model_name = kwargs.pop('model_name', None) if model_name is not None: - logging.warning( - f'Ignoring attempt to set {model_name=} on access log' - ) + logging.warning(f'Ignoring attempt to set {model_name=} on access log') action = kwargs.pop('action', None) if action is not None: logging.warning(f'Ignoring attempt to set {action=} on access log') log_type = kwargs.pop('log_type', None) if log_type is not None: - logging.warning( - f'Ignoring attempt to set {log_type=} on access log' - ) + logging.warning(f'Ignoring attempt to set {log_type=} on access log') user = kwargs.pop('user') return super().create( # set the fields that are always the same for access logs, pass along the rest to the original constructor diff --git a/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py b/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py index 3be43faba1..e8fb41c002 100644 --- a/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py +++ b/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py @@ -5,12 +5,7 @@ from rest_framework import status from rest_framework.reverse import reverse -from kobo.apps.audit_log.models import ( - AccessLog, - AuditAction, - AuditLog, - AuditType, -) +from kobo.apps.audit_log.models import AccessLog, AuditAction, AuditLog, AuditType from kobo.apps.audit_log.serializers import AuditLogSerializer from kobo.apps.audit_log.tests.test_signals import skip_login_access_log from kobo.apps.kobo_auth.shortcuts import User @@ -43,9 +38,7 @@ def force_login_user(self, user): def assert_audit_log_results_equal(self, response, expected_kwargs): # utility method for tests that are just comparing the results of an api call to the results of # manually applying the expected query (simple filters only) - expected = AccessLog.objects.filter(**expected_kwargs).order_by( - '-date_created' - ) + expected = AccessLog.objects.filter(**expected_kwargs).order_by('-date_created') expected_count = expected.count() serializer = AuditLogSerializer( expected, many=True, context=response.renderer_context diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index 7419e05c45..ac7802218c 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -51,9 +51,7 @@ def _check_common_fields(self, access_log: AccessLog, user): self.assertEqual(access_log.action, AuditAction.AUTH) self.assertEqual(access_log.log_type, AuditType.ACCESS) - def create_access_log_sets_standard_fields( - self, patched_ip, patched_source - ): + def create_access_log_sets_standard_fields(self, patched_ip, patched_source): yesterday = timezone.now() - timedelta(days=1) log = AccessLog.objects.create( user=AccessLogModelTestCase.super_user, diff --git a/kobo/apps/audit_log/tests/test_signals.py b/kobo/apps/audit_log/tests/test_signals.py index 145685d45b..d183ae37b8 100644 --- a/kobo/apps/audit_log/tests/test_signals.py +++ b/kobo/apps/audit_log/tests/test_signals.py @@ -4,7 +4,7 @@ from allauth.account.models import EmailAddress from django.contrib.auth.signals import user_logged_in from django.test import override_settings -from django.urls import resolve, reverse +from django.urls import reverse from trench.utils import get_mfa_model from kobo.apps.audit_log.models import AuditAction, AuditLog diff --git a/kobo/apps/audit_log/views.py b/kobo/apps/audit_log/views.py index 8d43b4ff08..4d2ab951e0 100644 --- a/kobo/apps/audit_log/views.py +++ b/kobo/apps/audit_log/views.py @@ -184,8 +184,6 @@ class AccessLogViewSet(AuditLogViewSet): """ - queryset = AccessLog.objects.select_related('user').order_by( - '-date_created' - ) + queryset = AccessLog.objects.select_related('user').order_by('-date_created') permission_classes = (IsAuthenticated,) filter_backends = (AccessLogPermissionsFilter,) diff --git a/kpi/constants.py b/kpi/constants.py index e7e1569fca..663f9c6ab8 100644 --- a/kpi/constants.py +++ b/kpi/constants.py @@ -1,6 +1,6 @@ # coding: utf-8 -SUBMISSION_FORMAT_TYPE_XML = "xml" -SUBMISSION_FORMAT_TYPE_JSON = "json" +SUBMISSION_FORMAT_TYPE_XML = 'xml' +SUBMISSION_FORMAT_TYPE_JSON = 'json' GEO_QUESTION_TYPES = ('geopoint', 'geotrace', 'geoshape') ATTACHMENT_QUESTION_TYPES = ( @@ -46,9 +46,9 @@ # notably not ASSET_TYPE_COLLECTION ] -CLONE_ARG_NAME = "clone_from" -CLONE_FROM_VERSION_ID_ARG_NAME = "clone_from_version_id" -COLLECTION_CLONE_FIELDS = {"name"} +CLONE_ARG_NAME = 'clone_from' +CLONE_FROM_VERSION_ID_ARG_NAME = 'clone_from_version_id' +COLLECTION_CLONE_FIELDS = {'name'} # Types are declared in `kpi.models.assets.ASSET_TYPES`. # These values correspond to index 0 of each tuple of ASSET_TYPES @@ -81,7 +81,7 @@ # List of nested attributes which bypass 'dots' encoding NESTED_MONGO_RESERVED_ATTRIBUTES = [ - "_validation_status", + '_validation_status', ] PREFIX_PARTIAL_PERMS = 'partial_' diff --git a/kpi/views/__init__.py b/kpi/views/__init__.py index 62f1a05a29..0aa81a3d4e 100644 --- a/kpi/views/__init__.py +++ b/kpi/views/__init__.py @@ -14,14 +14,14 @@ def home(request): - return TemplateResponse(request, "index.html") + return TemplateResponse(request, 'index.html') def browser_tests(request): - return TemplateResponse(request, "browser_tests.html") + return TemplateResponse(request, 'browser_tests.html') def modern_browsers(request): - return TemplateResponse(request, "modern_browsers.html") + return TemplateResponse(request, 'modern_browsers.html') @api_view(['POST']) @@ -72,4 +72,6 @@ def authorized_application_authenticate_user(request): # TODO Verify if it's still used def _wrap_html_pre(content): - return "
%s
" % content + return ( + '
%s
' % content + ) From 039b7ee9c6c5d6dad5d1ab936b99f9625582786d Mon Sep 17 00:00:00 2001 From: rgraber Date: Thu, 19 Sep 2024 09:35:11 -0400 Subject: [PATCH 083/119] fixup!: formatting for real this time --- kobo/apps/audit_log/migrations/0010_accesslog.py | 6 ++++-- kobo/apps/audit_log/models.py | 8 ++++---- kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py | 9 ++++----- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/kobo/apps/audit_log/migrations/0010_accesslog.py b/kobo/apps/audit_log/migrations/0010_accesslog.py index 382a8053d7..3e20f8c846 100644 --- a/kobo/apps/audit_log/migrations/0010_accesslog.py +++ b/kobo/apps/audit_log/migrations/0010_accesslog.py @@ -8,7 +8,8 @@ def manually_update_model_name(apps, schema_editor): print( """ !!! ATTENTION !!! - If you have existing projects you may wish to run the SQL query below in PostgreSQL directly: + If you have existing projects you may wish to run the SQL query below + in PostgreSQL directly: > "UPDATE audit_log_auditlog SET model_name = 'user' where model_name='User'" @@ -48,7 +49,8 @@ class Migration(migrations.Migration): else: operations.append( migrations.RunSQL( - sql="UPDATE audit_log_auditlog SET model_name = 'user' where model_name='User'", + sql="UPDATE audit_log_auditlog SET model_name = 'user' " + "where model_name='User'", reverse_sql=migrations.RunSQL.noop, ) ) diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index e386abe9eb..81e1555a25 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -1,5 +1,3 @@ -import logging - from django.conf import settings from django.db import models from django.utils import timezone @@ -98,7 +96,8 @@ def get_queryset(self): return super().get_queryset().filter(log_type=AuditType.ACCESS) def create(self, **kwargs): - # remove any attempt to set fields that should always be the same on an access log + # remove any attempt to set fields that should + # always be the same on an access log app_label = kwargs.pop('app_label', None) if app_label is not None: logging.warning(f'Ignoring attempt to set {app_label=} on access log') @@ -113,7 +112,8 @@ def create(self, **kwargs): logging.warning(f'Ignoring attempt to set {log_type=} on access log') user = kwargs.pop('user') return super().create( - # set the fields that are always the same for access logs, pass along the rest to the original constructor + # set the fields that are always the same for access logs, + # pass along the rest to the original constructor app_label=User._meta.app_label, model_name=User._meta.model_name, action=AuditAction.AUTH, diff --git a/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py b/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py index e8fb41c002..942f7b298a 100644 --- a/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py +++ b/kobo/apps/audit_log/tests/api/v2/test_api_audit_log.py @@ -209,13 +209,12 @@ def setUp(self): super_user = User.objects.get(username='admin') user2 = User.objects.get(username='anotheruser') # generate 3 access logs, 2 for superuser, 1 for user2 - # generate 3 access logs, 2 for user1, 1 for user2 - user_1_log_1 = AccessLog.objects.create(user=super_user) - user_1_log_2 = AccessLog.objects.create(user=super_user) - user_2_log_1 = AccessLog.objects.create(user=super_user) + AccessLog.objects.create(user=super_user) + AccessLog.objects.create(user=super_user) + AccessLog.objects.create(user=user2) # create a random non-auth audit log - log = AuditLog.objects.create( + AuditLog.objects.create( user=User.objects.get(username='someuser'), app_label='foo', model_name='bar', From 6061526f649dcfcab7371c3269d1e4d89d824a3d Mon Sep 17 00:00:00 2001 From: rgraber Date: Thu, 19 Sep 2024 10:00:16 -0400 Subject: [PATCH 084/119] fixup!: fix test name --- kobo/apps/audit_log/tests/test_models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index ac7802218c..f60e1b31b9 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -51,7 +51,7 @@ def _check_common_fields(self, access_log: AccessLog, user): self.assertEqual(access_log.action, AuditAction.AUTH) self.assertEqual(access_log.log_type, AuditType.ACCESS) - def create_access_log_sets_standard_fields(self, patched_ip, patched_source): + def test_create_access_log_sets_standard_fields(self, patched_ip, patched_source): yesterday = timezone.now() - timedelta(days=1) log = AccessLog.objects.create( user=AccessLogModelTestCase.super_user, @@ -63,7 +63,7 @@ def create_access_log_sets_standard_fields(self, patched_ip, patched_source): self.assertDictEqual(log.metadata, {'foo': 'bar'}) @patch('kobo.apps.audit_log.models.logging.warning') - def create_access_log_ignores_attempt_to_override_standard_fields( + def test_create_access_log_ignores_attempt_to_override_standard_fields( self, patched_warning, patched_ip, patched_source ): log = AccessLog.objects.create( From 16469f3ad73e7b50862d6080549d7886af6b00ad Mon Sep 17 00:00:00 2001 From: rgraber Date: Thu, 19 Sep 2024 10:52:49 -0400 Subject: [PATCH 085/119] fix: keep default ignore list for flake8 --- .github/workflows/darker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/darker.yml b/.github/workflows/darker.yml index 18dd2763d1..4b6ee01246 100644 --- a/.github/workflows/darker.yml +++ b/.github/workflows/darker.yml @@ -24,6 +24,6 @@ jobs: # darker still exit with code 1 even with no errors on changes - name: Run Darker with base commit run: | - output=$(darker --check --isort -L "flake8 --max-line-length=88 --ignore=F821" kpi kobo hub -r ${{ github.event.pull_request.base.sha }}) + output=$(darker --check --isort -L "flake8 --max-line-length=88 --extend-ignore=F821" kpi kobo hub -r ${{ github.event.pull_request.base.sha }}) [[ -n "$output" ]] && echo "$output" && exit 1 || exit 0 shell: /usr/bin/bash {0} From 5422e925991b897182f8c9573afbe7376ee7d282 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 19 Sep 2024 17:12:59 -0400 Subject: [PATCH 086/119] Apply requested changes --- kobo/apps/subsequences/api_view.py | 11 +- kobo/apps/subsequences/utils/__init__.py | 19 ++- kobo/apps/subsequences/utils/deprecation.py | 128 ++++++++++---------- kpi/models/asset.py | 49 ++++---- 4 files changed, 113 insertions(+), 94 deletions(-) diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index 7cf40fc96c..d5e806c66b 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -9,7 +9,7 @@ from kobo.apps.subsequences.models import SubmissionExtras from kobo.apps.subsequences.utils.deprecation import ( - sanitize_submission_extra_content, + get_sanitized_dict_keys, ) from kpi.models import Asset from kpi.permissions import SubmissionPermission @@ -102,9 +102,12 @@ def get_submission_processing(asset, s_uuid): try: submission_extra = asset.submission_extras.get(submission_uuid=s_uuid) - # TODO delete line below when every asset is repopulated with `xpath` - # instead of `qpath`. - sanitize_submission_extra_content(submission_extra, asset) + # TODO delete two lines below when every asset is repopulated with + # `xpath` instead of `qpath`. + if content := get_sanitized_dict_keys( + submission_extra.content, asset + ): + submission_extra.content = content return Response(submission_extra.content) except SubmissionExtras.DoesNotExist: diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index 75a28cec6e..ce5f223909 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -1,6 +1,10 @@ from collections import defaultdict from copy import deepcopy +from .deprecation import ( + get_sanitized_advanced_features, + get_sanitized_dict_keys, +) from ..actions.automatic_transcription import AutomaticTranscriptionAction from ..actions.translation import TranslationAction from ..actions.qual import QualAction @@ -83,10 +87,10 @@ def advanced_submission_jsonschema(content, actions, url=None): content = populate_paths(content) # devhack: this keeps serializer from breaking when old params # are still in the database - if 'translated' in actions: # migration + if 'translated' in actions: # migration actions['translation'] = actions['translated'] # migration assert 'languages' in actions['translation'] - del actions['translated'] # migration + del actions['translated'] # migration # /devhack for action_id, action_params in actions.items(): @@ -131,12 +135,19 @@ def stream_with_extras(submission_stream, asset): extras = dict( asset.submission_extras.values_list('submission_uuid', 'content') ) + + if asset.advanced_features and ( + advanced_features := get_sanitized_advanced_features(asset) + ): + asset.advanced_features = advanced_features + try: qual_survey = asset.advanced_features['qual']['qual_survey'] except KeyError: qual_survey = [] else: qual_survey = deepcopy(qual_survey) + # keys are question UUIDs, values are question definitions qual_questions_by_uuid = {} # outer keys are question UUIDs, inner keys are choice UUIDs, values are @@ -201,5 +212,7 @@ def stream_with_extras(submission_stream, asset): val_expanded = val_expanded[0] qual_response['val'] = val_expanded qual_response.update(qual_q) - submission[SUPPLEMENTAL_DETAILS_KEY] = all_supplemental_details + submission[SUPPLEMENTAL_DETAILS_KEY] = get_sanitized_dict_keys( + all_supplemental_details, asset + ) yield submission diff --git a/kobo/apps/subsequences/utils/deprecation.py b/kobo/apps/subsequences/utils/deprecation.py index 1c35b73f97..e63abc9ccc 100644 --- a/kobo/apps/subsequences/utils/deprecation.py +++ b/kobo/apps/subsequences/utils/deprecation.py @@ -1,62 +1,61 @@ from __future__ import annotations +import json from copy import deepcopy -from typing import Optional - -import jsonschema from kpi.fields import WritableJSONField -from ..advanced_features_params_schema import ( - ADVANCED_FEATURES_PARAMS_SCHEMA, -) - - -def jsonschema_validate(asset: 'Asset'): - try: - jsonschema.validate( - instance=asset.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) - except jsonschema.exceptions.ValidationError as e: - if "'qpath' was unexpected" not in str(e): - raise - - qual_survey_orig = asset.advanced_features['qual']['qual_survey'] - qual_survey_iter = deepcopy(qual_survey_orig) - for idx, qual_q in enumerate(qual_survey_iter): - qpath = qual_survey_orig[idx]['qpath'] - xpath = qpath_to_xpath(qpath, asset) - del qual_survey_orig[idx]['qpath'] - qual_survey_orig[idx]['xpath'] = xpath - - jsonschema.validate( - instance=asset.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - ) -def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: +def get_sanitized_advanced_features(asset: 'Asset') -> dict | None: """ - We have abandoned `qpath` attribute in favor of `xpath`. - Existing projects may still use it though. - We need to find the equivalent `xpath` + Replace `qpath` attributes (if present) with their `xpath` + counterparts in asset.advanced_features """ - for row in asset.content['survey']: - if '$qpath' in row and '$xpath' in row and row['$qpath'] == qpath: - return row['$xpath'] - # Could not find it from the survey, let's try to detect it automatically - xpaths = asset.get_attachment_xpaths(deployed=True) - for xpath in xpaths: - dashed_xpath = xpath.replace('/', '-') - if dashed_xpath == qpath: - return xpath + if not asset.advanced_features: + return + + if 'qpath' not in json.dumps(asset.advanced_features): + return + + advanced_features = deepcopy(asset.advanced_features) + qual_survey_orig = advanced_features['qual']['qual_survey'] + qual_survey_iter = deepcopy(qual_survey_orig) + for idx, qual_q in enumerate(qual_survey_iter): + qpath = qual_survey_orig[idx]['qpath'] + xpath = qpath_to_xpath(qpath, asset) + del qual_survey_orig[idx]['qpath'] + qual_survey_orig[idx]['xpath'] = xpath + + return advanced_features - raise KeyError(f'xpath for {qpath} not found') +def get_sanitized_dict_keys(dict_to_update: dict, asset: 'Asset') -> dict | None: + """ + Update `dict_to_update` keys created with `qpath`(if they are present) with + their `xpath` counterpart. + """ + updated_dict = deepcopy(dict_to_update) + changed = False + for old_xpath, values in dict_to_update.items(): + if '-' in old_xpath and '/' not in old_xpath: + xpath = qpath_to_xpath(old_xpath, asset) + if xpath == old_xpath: + continue + + del updated_dict[old_xpath] + updated_dict[xpath] = values + changed = True -def sanitize_known_columns(asset: 'Asset'): - for idx, known_column in enumerate(asset.known_cols): + if changed: + return updated_dict + + +def get_sanitized_known_columns(asset: 'Asset') -> list: + + known_cols = list(asset.known_cols) + + for idx, known_column in enumerate(known_cols): xpath, *rest = known_column.split(':') # Old `qpath` should not contain "/", but could contain "-". # If the question does not belong to a group but does contain "-", @@ -65,32 +64,29 @@ def sanitize_known_columns(asset: 'Asset'): if '-' in xpath and '/' not in xpath: xpath = qpath_to_xpath(xpath, asset) rest.insert(0, xpath) - asset.known_cols[idx] = ':'.join(rest) + known_cols[idx] = ':'.join(rest) - # TODO Should we save asset.known_cols if it has changed? + return known_cols -def sanitize_submission_extra_content( - submission_extra: 'SubmissionExtras', asset: 'Asset' -) -> Optional[dict]: +def qpath_to_xpath(qpath: str, asset: 'Asset') -> str: """ - Replace with `qpath` attribute (if it exists) with `xpath` counterpart + We have abandoned `qpath` attribute in favor of `xpath`. + Existing projects may still use it though. + We need to find the equivalent `xpath`. """ - content = deepcopy(submission_extra.content) - changed = False - for old_xpath, values in submission_extra.content.items(): - if '-' in old_xpath and '/' not in old_xpath: - xpath = qpath_to_xpath(old_xpath, asset) - if xpath == old_xpath: - continue + for row in asset.content['survey']: + if '$qpath' in row and '$xpath' in row and row['$qpath'] == qpath: + return row['$xpath'] - del content[old_xpath] - content[xpath] = values - changed = True + # Could not find it from the survey, let's try to detect it automatically + xpaths = asset.get_attachment_xpaths(deployed=True) + for xpath in xpaths: + dashed_xpath = xpath.replace('/', '-') + if dashed_xpath == qpath: + return xpath - if changed: - submission_extra.content = content - # TODO Should we save submission_extra? + raise KeyError(f'xpath for {qpath} not found') class WritableAdvancedFeaturesField(WritableJSONField): @@ -107,7 +103,7 @@ def __init__(self, **kwargs): def to_representation(self, value): self._model_instance.validate_advanced_features() - return value + return self._model_instance.advanced_features def get_attribute(self, instance): self._model_instance = instance diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 64301925bd..7165e8d48c 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -1,11 +1,10 @@ -# coding: utf-8 -# 😬 import copy import re from functools import reduce from operator import add from typing import Optional, Union +import jsonschema from django.conf import settings from django.contrib.auth.models import Permission from django.contrib.postgres.indexes import BTreeIndex, GinIndex @@ -20,7 +19,6 @@ from formpack.utils.json_hash import json_hash from formpack.utils.kobo_locking import strip_kobo_locking_profile - from kobo.apps.reports.constants import ( SPECIFIC_REPORTS_KEY, DEFAULT_REPORTS_KEY, @@ -29,11 +27,14 @@ advanced_feature_instances, advanced_submission_jsonschema, ) +from kobo.apps.subsequences.advanced_features_params_schema import ( + ADVANCED_FEATURES_PARAMS_SCHEMA, +) from kobo.apps.subsequences.utils.deprecation import ( - jsonschema_validate, + get_sanitized_known_columns, + get_sanitized_dict_keys, + get_sanitized_advanced_features, qpath_to_xpath, - sanitize_known_columns, - sanitize_submission_extra_content, ) from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols from kpi.constants import ( @@ -144,15 +145,15 @@ def add(self, *tags, **kwargs): strips leading and trailng whitespace. Behavior should match the cleanupTags function in jsapp/js/utils.ts. """ tags_out = [] - for t in tags: + for tag in tags: # Modify strings only; the superclass' add() method will then # create Tags or use existing ones as appropriate. We do not fix # existing Tag objects, which could also be passed into this # method, because a fixed name could collide with the name of # another Tag object already in the database. - if isinstance(t, str): - t = t.strip().replace(' ', '-') - tags_out.append(t) + if isinstance(tag, str): + tag = tag.strip().replace(' ', '-') + tags_out.append(tag) super().add(*tags_out, **kwargs) @@ -580,9 +581,14 @@ def get_advanced_feature_instances(self): return advanced_feature_instances(self.content, self.advanced_features) def get_advanced_submission_schema(self, url=None, content=False): + if len(self.advanced_features) == 0: NO_FEATURES_MSG = 'no advanced features activated for this form' return {'type': 'object', '$description': NO_FEATURES_MSG} + + if advanced_features := get_sanitized_advanced_features(self): + self.advanced_features = advanced_features + last_deployed_version = self.deployed_versions.first() if content: return advanced_submission_jsonschema( @@ -1091,8 +1097,11 @@ def update_submission_extra(self, content, user=None): .first() ) instances = self.get_advanced_feature_instances() - sanitize_submission_extra_content(sub, self) + if sub_extra_content := get_sanitized_dict_keys(sub.content, self): + sub.content = sub_extra_content + compiled_content = {**sub.content} + for instance in instances: compiled_content = instance.compile_revised_record( compiled_content, edits=content @@ -1155,15 +1164,13 @@ def validate_advanced_features(self): if self.advanced_features is None: self.advanced_features = {} - # TODO uncomment the 4 lines below… - # jsonschema_validate( - # instance=self.advanced_features, - # schema=ADVANCED_FEATURES_PARAMS_SCHEMA, - # ) + if advanced_features := get_sanitized_advanced_features(self): + self.advanced_features = advanced_features - # TODO … and delete this one when every asset is repopulated with - # `xpath` instead of `qpath`. - jsonschema_validate(self) + jsonschema.validate( + instance=self.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + ) @property def version__content_hash(self): @@ -1195,8 +1202,8 @@ def version_number_and_date(self) -> str: def _get_additional_fields(self): - # TODO line below when when every asset is repopulated with `xpath` - sanitize_known_columns(self) + # TODO Remove line below when when every asset is repopulated with `xpath` + self.known_cols = get_sanitized_known_columns(self) return parse_known_cols(self.known_cols) From fd6e61a1ddc089b7c30a05d9be38152b7d1c0a7c Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 19 Sep 2024 17:13:53 -0400 Subject: [PATCH 087/119] Improve repop_known_cols script efficiency to update data --- .../subsequences/scripts/repop_known_cols.py | 117 +++++++++++++----- 1 file changed, 87 insertions(+), 30 deletions(-) diff --git a/kobo/apps/subsequences/scripts/repop_known_cols.py b/kobo/apps/subsequences/scripts/repop_known_cols.py index f01cc71da4..8f1b57a043 100644 --- a/kobo/apps/subsequences/scripts/repop_known_cols.py +++ b/kobo/apps/subsequences/scripts/repop_known_cols.py @@ -1,69 +1,126 @@ -# coding: utf-8 - -''' +""" Usage: python manage.py runscript repop_known_cols --script-args= -''' -import re +""" + import json -# from pprint import pprint -from kpi.models.asset import Asset -from kobo.apps.subsequences.models import SubmissionExtras +from django.core.paginator import Paginator -from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols +from kobo.apps.subsequences.models import SubmissionExtras from kobo.apps.subsequences.utils.determine_export_cols_with_values import ( determine_export_cols_with_values, ) +from kobo.apps.subsequences.utils.deprecation import ( + get_sanitized_known_columns, + get_sanitized_dict_keys, +) +from kpi.models.asset import Asset -def migrate_subex_content(sub_ex): +def migrate_subex_content( + sub_ex: SubmissionExtras, asset: Asset, save=True +) -> SubmissionExtras: content_string = json.dumps(sub_ex.content) - if '"translated"' in content_string: # migration - content_string = content_string.replace('"translated"', '"translation"') # migration + if '"translated"' in content_string: # migration + content_string = content_string.replace( + '"translated"', '"translation"' + ) # migration sub_ex.content = json.loads(content_string) + if content := get_sanitized_dict_keys(sub_ex.content, asset): + sub_ex.content = content print('submission_extra has old content') - sub_ex.save() + if save: + sub_ex.save() + return sub_ex -def migrate_subex_content_for_asset(asset): +def migrate_subex_content_for_asset(asset, save=True): + submission_extras = [] for sub_ex in asset.submission_extras.all(): - migrate_subex_content(sub_ex) + if updated_sub_ex := migrate_subex_content( + sub_ex, asset=asset, save=save + ): + submission_extras.append(updated_sub_ex) + return submission_extras -def repop_asset_known_cols(asset): + +def repop_asset_known_cols(asset, save=True): print(f'for_asset: {asset.uid}') print(' before:') print(' - ' + '\n - '.join(sorted(asset.known_cols))) known_cols = determine_export_cols_with_values(asset.submission_extras.all()) - asset.known_cols = known_cols - if 'translated' in asset.advanced_features: # migration - asset.advanced_features['translation'] = asset.advanced_features['translated'] # migration - del asset.advanced_features['translated'] # migration - asset.save(create_version=False) + asset.known_cols = get_sanitized_known_columns(asset) + if 'translated' in asset.advanced_features: # migration + asset.advanced_features['translation'] = asset.advanced_features['translated'] + del asset.advanced_features['translated'] + if save: + asset.save(create_version=False) print(' after:') print(' - ' + '\n - '.join(sorted(known_cols))) -def migrate_advanced_features(asset): - if 'translated' in asset.advanced_features: # migration - asset.advanced_features['translation'] = asset.advanced_features['translated'] # migration - asset.save(create_version=False) +def migrate_advanced_features(asset, save=True): + if 'translated' in asset.advanced_features: # migration + asset.advanced_features['translation'] = asset.advanced_features['translated'] + if save: + asset.save(create_version=False) def run(asset_uid=None): + if asset_uid == "!": SubmissionExtras.objects.all().delete() - for asset in Asset.objects.exclude(advanced_features__exact={}).all(): + for asset in Asset.objects.exclude(advanced_features__exact={}).iterator(): asset.advanced_features = {} asset.save(create_version=False) repop_asset_known_cols(asset) print('Note:\nRemoved all transcript+translation related data from all assets') elif asset_uid is None: - for asset in Asset.objects.exclude(advanced_features__exact={}).all(): - migrate_advanced_features(asset) - migrate_subex_content_for_asset(asset) - repop_asset_known_cols(asset) + + page_size = 2000 + paginator = Paginator( + Asset.objects.only( + 'id', + 'uid', + 'content', + 'advanced_features', + 'known_cols', + 'summary', + 'asset_type', + ) + .prefetch_related('submission_extras') + .exclude(advanced_features__exact={}) + .order_by('pk'), + page_size, + ) + + for page in paginator.page_range: + assets = paginator.page(page).object_list + updated_assets = [] + updated_submission_extras = [] + for asset in assets: + print(f'Processing asset {asset.uid}') + migrate_advanced_features(asset, save=False) + updated_submission_extras.extend( + migrate_subex_content_for_asset(asset, save=False) + ) + repop_asset_known_cols(asset, save=False) + asset.adjust_content_on_save() + asset.validate_advanced_features() + updated_assets.append(asset) + + if updated_assets: + Asset.objects.bulk_update( + updated_assets, + ['content', 'advanced_features', 'known_cols'], + ) + + if updated_submission_extras: + SubmissionExtras.objects.bulk_update( + updated_submission_extras, ['content'] + ) else: asset = Asset.objects.get(uid=asset_uid) migrate_subex_content_for_asset(asset) From ed43fb249a314485ca089b7d9e899f1c8db32e24 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 19 Sep 2024 17:50:24 -0400 Subject: [PATCH 088/119] Fix linter errors --- kobo/settings/base.py | 112 ++++++++++++++++++++++++------------------ 1 file changed, 65 insertions(+), 47 deletions(-) diff --git a/kobo/settings/base.py b/kobo/settings/base.py index e9085252cd..973f208b70 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -1,9 +1,9 @@ # coding: utf-8 import logging -import warnings import os import string import subprocess +import warnings from datetime import datetime, timedelta from mimetypes import add_type from urllib.parse import quote_plus @@ -13,12 +13,13 @@ from celery.schedules import crontab from django.conf import global_settings from django.urls import reverse_lazy -from django.utils.translation import get_language_info, gettext_lazy as t +from django.utils.translation import get_language_info +from django.utils.translation import gettext_lazy as t from pymongo import MongoClient from kobo.apps.stripe.constants import ( - FREE_TIER_NO_THRESHOLDS, FREE_TIER_EMPTY_DISPLAY, + FREE_TIER_NO_THRESHOLDS, ) from kpi.utils.json import LazyJSONSerializable from ..static_lists import EXTRA_LANG_INFO, SECTOR_CHOICE_DEFAULTS @@ -38,7 +39,7 @@ # See: https://docs.djangoproject.com/en/1.8/ref/settings/#secure-proxy-ssl-header. # Example environment: `export SECURE_PROXY_SSL_HEADER='HTTP_X_FORWARDED_PROTO, https'`. # SECURITY WARNING: If enabled, outer web server must filter out the `X-Forwarded-Proto` header. -SECURE_PROXY_SSL_HEADER = env.tuple("SECURE_PROXY_SSL_HEADER", str, None) +SECURE_PROXY_SSL_HEADER = env.tuple('SECURE_PROXY_SSL_HEADER', str, None) public_request_scheme = env.str('PUBLIC_REQUEST_SCHEME', 'https').lower() @@ -52,7 +53,7 @@ # Make Django use NginX $host. Useful when running with ./manage.py runserver_plus # It avoids adding the debugger webserver port (i.e. `:8000`) at the end of urls. -USE_X_FORWARDED_HOST = env.bool("USE_X_FORWARDED_HOST", False) +USE_X_FORWARDED_HOST = env.bool('USE_X_FORWARDED_HOST', False) # Domain must not exclude KoBoCAT when sharing sessions SESSION_COOKIE_DOMAIN = env.str('SESSION_COOKIE_DOMAIN', None) @@ -73,7 +74,7 @@ LANGUAGE_COOKIE_AGE = SESSION_COOKIE_AGE # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = env.bool("DJANGO_DEBUG", False) +DEBUG = env.bool('DJANGO_DEBUG', False) ALLOWED_HOSTS = env.str('DJANGO_ALLOWED_HOSTS', '*').split(' ') @@ -383,7 +384,7 @@ 'metadata_fields_jsonschema' ), 'SECTOR_CHOICES': ( - '\n'.join((s[0] for s in SECTOR_CHOICE_DEFAULTS)), + '\n'.join(s[0] for s in SECTOR_CHOICE_DEFAULTS), "Options available for the 'sector' metadata field, one per line.", 'long_textfield' ), @@ -1026,11 +1027,13 @@ def __init__(self, *args, **kwargs): DEFAULT_DEPLOYMENT_BACKEND = 'mock' -''' Stripe configuration intended for kf.kobotoolbox.org only, tracks usage limit exceptions ''' +""" +Stripe configuration intended for kf.kobotoolbox.org only, +tracks usage limit exceptions +""" STRIPE_ENABLED = env.bool('STRIPE_ENABLED', False) - def dj_stripe_request_callback_method(): # This method exists because dj-stripe's documentation doesn't reflect reality. # It claims that DJSTRIPE_SUBSCRIBER_MODEL no longer needs a request callback but @@ -1046,7 +1049,9 @@ def dj_stripe_request_callback_method(): DJSTRIPE_FOREIGN_KEY_TO_FIELD = 'id' DJSTRIPE_USE_NATIVE_JSONFIELD = True STRIPE_LIVE_MODE = env.bool('STRIPE_LIVE_MODE', False) -STRIPE_TEST_PUBLIC_KEY = env.str('STRIPE_TEST_PUBLIC_KEY', 'pk_test_qliDXQRyVGPWmsYR69tB1NPx00ndTrJfVM') +STRIPE_TEST_PUBLIC_KEY = env.str( + 'STRIPE_TEST_PUBLIC_KEY', 'pk_test_qliDXQRyVGPWmsYR69tB1NPx00ndTrJfVM' +) STRIPE_LIVE_PUBLIC_KEY = 'pk_live_7JRQ5elvhnmz4YuWdlSRNmMj00lhvqZz8P' if STRIPE_ENABLED: INSTALLED_APPS += ('djstripe', 'kobo.apps.stripe') @@ -1056,14 +1061,16 @@ def dj_stripe_request_callback_method(): DJSTRIPE_WEBHOOK_VALIDATION = env.str('DJSTRIPE_WEBHOOK_VALIDATION', 'verify_signature') STRIPE_PUBLIC_KEY = STRIPE_LIVE_PUBLIC_KEY if STRIPE_LIVE_MODE else STRIPE_TEST_PUBLIC_KEY -'''Organizations settings''' +"""Organizations settings""" # necessary to prevent calls to `/organizations/{ORG_ID}/service_usage/` (and any other # queries that may need to aggregate data for all organization users) from slowing down db ORGANIZATION_USER_LIMIT = env.str('ORGANIZATION_USER_LIMIT', 400) -''' Enketo configuration ''' -ENKETO_URL = os.environ.get('ENKETO_URL') or os.environ.get('ENKETO_SERVER', 'https://change-me.invalid') +""" Enketo configuration """ +ENKETO_URL = os.environ.get('ENKETO_URL') or os.environ.get( + 'ENKETO_SERVER', 'https://change-me.invalid' +) ENKETO_URL = ENKETO_URL.rstrip('/') # Remove any trailing slashes ENKETO_VERSION = os.environ.get('ENKETO_VERSION', 'Legacy').lower() ENKETO_INTERNAL_URL = os.environ.get('ENKETO_INTERNAL_URL', ENKETO_URL) @@ -1082,7 +1089,7 @@ def dj_stripe_request_callback_method(): # Content Security Policy (CSP) # CSP should "just work" by allowing any possible configuration # however CSP_EXTRA_DEFAULT_SRC is provided to allow for custom additions -if env.bool("ENABLE_CSP", False): +if env.bool('ENABLE_CSP', False): MIDDLEWARE.append('csp.middleware.CSPMiddleware') local_unsafe_allows = [ "'unsafe-eval'", @@ -1090,8 +1097,12 @@ def dj_stripe_request_callback_method(): 'http://kf.kobo.local:3000', 'ws://kf.kobo.local:3000' ] -CSP_DEFAULT_SRC = env.list('CSP_EXTRA_DEFAULT_SRC', str, []) + ["'self'", KOBOCAT_URL, ENKETO_URL] -if env.str("FRONTEND_DEV_MODE", None) == "host": +CSP_DEFAULT_SRC = env.list('CSP_EXTRA_DEFAULT_SRC', str, []) + [ + "'self'", + KOBOCAT_URL, + ENKETO_URL, +] +if env.str('FRONTEND_DEV_MODE', None) == 'host': CSP_DEFAULT_SRC += local_unsafe_allows CSP_CONNECT_SRC = CSP_DEFAULT_SRC CSP_SCRIPT_SRC = CSP_DEFAULT_SRC @@ -1108,29 +1119,37 @@ def dj_stripe_request_callback_method(): if GOOGLE_ANALYTICS_TOKEN: # Taken from https://developers.google.com/tag-platform/tag-manager/csp#google_analytics_4_google_analytics CSP_SCRIPT_SRC.append('https://*.googletagmanager.com') - CSP_CONNECT_SRC.extend(['https://*.google-analytics.com', 'https://*.analytics.google.com', 'https://*.googletagmanager.com']) - CSP_IMG_SRC.extend(['https://*.google-analytics.com', 'https://*.googletagmanager.com']) + CSP_CONNECT_SRC.extend( + [ + 'https://*.google-analytics.com', + 'https://*.analytics.google.com', + 'https://*.googletagmanager.com', + ] + ) + CSP_IMG_SRC.extend( + ['https://*.google-analytics.com', 'https://*.googletagmanager.com'] + ) if SENTRY_JS_DSN_URL and SENTRY_JS_DSN_URL.scheme: sentry_js_url = SENTRY_JS_DSN_URL.scheme + '://' + SENTRY_JS_DSN_URL.hostname CSP_SCRIPT_SRC.append(sentry_js_url) CSP_CONNECT_SRC.append(sentry_js_url) if STRIPE_ENABLED: - stripe_domain = "https://js.stripe.com" + stripe_domain = 'https://js.stripe.com' CSP_SCRIPT_SRC.append(stripe_domain) CSP_FRAME_SRC.append(stripe_domain) csp_report_uri = env.url('CSP_REPORT_URI', None) if csp_report_uri: # Let environ validate uri, but set as string CSP_REPORT_URI = csp_report_uri.geturl() -CSP_REPORT_ONLY = env.bool("CSP_REPORT_ONLY", False) +CSP_REPORT_ONLY = env.bool('CSP_REPORT_ONLY', False) -''' Celery configuration ''' +""" Celery configuration """ # Celery 4.0 New lowercase settings. # Uppercase settings can be used when using a PREFIX # http://docs.celeryproject.org/en/latest/userguide/configuration.html#new-lowercase-settings # http://docs.celeryproject.org/en/4.0/whatsnew-4.0.html#step-2-update-your-configuration-with-the-new-setting-names -CELERY_TIMEZONE = "UTC" +CELERY_TIMEZONE = 'UTC' # helpful for certain debugging CELERY_TASK_ALWAYS_EAGER = env.bool('SKIP_CELERY', False) @@ -1205,15 +1224,15 @@ def dj_stripe_request_callback_method(): CELERY_BROKER_TRANSPORT_OPTIONS = { - "fanout_patterns": True, - "fanout_prefix": True, + 'fanout_patterns': True, + 'fanout_prefix': True, # http://docs.celeryproject.org/en/latest/getting-started/brokers/redis.html#redis-visibility-timeout # TODO figure out how to pass `Constance.HOOK_MAX_RETRIES` or `HookLog.get_remaining_seconds() # Otherwise hardcode `HOOK_MAX_RETRIES` in Settings - "visibility_timeout": 60 * (10 ** 2) # Longest ETA for RestService (seconds) + 'visibility_timeout': 60 * (10**2), # Longest ETA for RestService (seconds) } -CELERY_TASK_DEFAULT_QUEUE = "kpi_queue" +CELERY_TASK_DEFAULT_QUEUE = 'kpi_queue' if 'KOBOCAT_URL' in os.environ: SYNC_KOBOCAT_PERMISSIONS = ( @@ -1221,7 +1240,7 @@ def dj_stripe_request_callback_method(): CELERY_BROKER_URL = os.environ.get( 'CELERY_BROKER_URL', - os.environ.get('KPI_BROKER_URL', 'redis://change-me.invalid:6379/1') + os.environ.get('KPI_BROKER_URL', 'redis://change-me.invalid:6379/1'), ) if 'KPI_BROKER_URL' in os.environ: warnings.warn( @@ -1241,7 +1260,7 @@ def dj_stripe_request_callback_method(): os.environ.get('CELERY_LONG_RUNNING_TASK_SOFT_TIME_LIMIT', 4200) # seconds ) -''' Django allauth configuration ''' +""" Django allauth configuration """ # User.email should continue to be used instead of the EmailAddress model ACCOUNT_ADAPTER = 'kobo.apps.accounts.adapter.AccountAdapter' ACCOUNT_USERNAME_VALIDATORS = 'kobo.apps.accounts.validators.username_validators' @@ -1263,7 +1282,7 @@ def dj_stripe_request_callback_method(): # For SSO, the signup form is prepopulated with the account email # If set True, the email field in the SSO signup form will be readonly UNSAFE_SSO_REGISTRATION_EMAIL_DISABLE = env.bool( - "UNSAFE_SSO_REGISTRATION_EMAIL_DISABLE", False + 'UNSAFE_SSO_REGISTRATION_EMAIL_DISABLE', False ) WEBPACK_LOADER = { @@ -1275,7 +1294,7 @@ def dj_stripe_request_callback_method(): } -''' Email configuration ''' +""" Email configuration """ # This setting sets the prefix in the subject line of the account activation email # The default is the URL of the server. Set to blank to fit the email requirements ACCOUNT_EMAIL_SUBJECT_PREFIX = '' @@ -1304,7 +1323,7 @@ def dj_stripe_request_callback_method(): EMAIL_USE_TLS = os.environ.get('EMAIL_USE_TLS') -''' AWS configuration (email and storage) ''' +""" AWS configuration (email and storage) """ if env.str('AWS_ACCESS_KEY_ID', False): AWS_ACCESS_KEY_ID = env.str('AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = env.str('AWS_SECRET_ACCESS_KEY') @@ -1384,7 +1403,7 @@ def dj_stripe_request_callback_method(): GS_BUCKET_NAME = env.str('GS_BUCKET_NAME', None) -''' Django error logging configuration ''' +""" Django error logging configuration """ LOGGING = { 'version': 1, 'disable_existing_loggers': False, @@ -1458,7 +1477,7 @@ def dj_stripe_request_callback_method(): ) -''' Try to identify the running codebase for informational purposes ''' +""" Try to identify the running codebase for informational purposes """ # Based upon https://github.com/tblobaum/git-rev/blob/master/index.js GIT_REV = {} for git_rev_key, git_command in ( @@ -1476,14 +1495,14 @@ def dj_stripe_request_callback_method(): GIT_REV['branch'] = False -''' +""" Since this project handles user creation, we must handle the model-level permission assignment that would've been done by KoBoCAT's user post_save signal handler. Here we record the content types of the models listed in KC's set_api_permissions_for_user(). Verify that this list still matches that function if you experience permission-related problems. See https://github.com/kobotoolbox/kobocat/blob/master/onadata/libs/utils/user_auth.py. -''' +""" KOBOCAT_DEFAULT_PERMISSION_CONTENT_TYPES = [ # Each tuple must be (app_label, model_name) ('main', 'userprofile'), @@ -1495,7 +1514,7 @@ def dj_stripe_request_callback_method(): TESTING = False -''' Auxiliary database configuration ''' +""" Auxiliary database configuration """ if not (MONGO_DB_URL := env.str('MONGO_DB_URL', False)): # ToDo Remove all this block by the end of 2022. # Update kobo-install accordingly @@ -1515,16 +1534,15 @@ def dj_stripe_request_callback_method(): } if MONGO_DATABASE.get('USER') and MONGO_DATABASE.get('PASSWORD'): - MONGO_DB_URL = "mongodb://{user}:{password}@{host}:{port}/{db_name}".\ - format( - user=MONGO_DATABASE['USER'], - password=quote_plus(MONGO_DATABASE['PASSWORD']), - host=MONGO_DATABASE['HOST'], - port=MONGO_DATABASE['PORT'], - db_name=MONGO_DATABASE['NAME'] - ) + MONGO_DB_URL = 'mongodb://{user}:{password}@{host}:{port}/{db_name}'.format( + user=MONGO_DATABASE['USER'], + password=quote_plus(MONGO_DATABASE['PASSWORD']), + host=MONGO_DATABASE['HOST'], + port=MONGO_DATABASE['PORT'], + db_name=MONGO_DATABASE['NAME'], + ) else: - MONGO_DB_URL = "mongodb://%(HOST)s:%(PORT)s/%(NAME)s" % MONGO_DATABASE + MONGO_DB_URL = 'mongodb://%(HOST)s:%(PORT)s/%(NAME)s' % MONGO_DATABASE mongo_db_name = MONGO_DATABASE['NAME'] else: # Attempt to get collection name from the connection string @@ -1684,10 +1702,10 @@ def dj_stripe_request_callback_method(): } USE_THOUSAND_SEPARATOR = True -DIGEST_NONCE_BACKEND = 'kobo.apps.openrosa.apps.django_digest_backends.cache.RedisCacheNonceStorage' +DIGEST_NONCE_BACKEND = 'kobo.apps.openrosa.apps.django_digest_backends.cache.RedisCacheNonceStorage' # noqa # Needed to get ANONYMOUS_USER = -1 -GUARDIAN_GET_INIT_ANONYMOUS_USER = 'kobo.apps.openrosa.apps.main.models.user_profile.get_anonymous_user_instance' +GUARDIAN_GET_INIT_ANONYMOUS_USER = 'kobo.apps.openrosa.apps.main.models.user_profile.get_anonymous_user_instance' # noqa KPI_HOOK_ENDPOINT_PATTERN = '/api/v2/assets/{asset_uid}/hook-signal/' From c178d7af845820d46d5ba38b4b1b7ebce82adf60 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 19 Sep 2024 18:09:16 -0400 Subject: [PATCH 089/119] fix bug when supplementalDetails is empty if qpath is not present --- kobo/apps/subsequences/api_view.py | 2 +- kobo/apps/subsequences/utils/__init__.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index d5e806c66b..e744cd7102 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -102,7 +102,7 @@ def get_submission_processing(asset, s_uuid): try: submission_extra = asset.submission_extras.get(submission_uuid=s_uuid) - # TODO delete two lines below when every asset is repopulated with + # TODO delete "if" statement below when every asset is repopulated with # `xpath` instead of `qpath`. if content := get_sanitized_dict_keys( submission_extra.content, asset diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index ce5f223909..3be21a587d 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -212,7 +212,13 @@ def stream_with_extras(submission_stream, asset): val_expanded = val_expanded[0] qual_response['val'] = val_expanded qual_response.update(qual_q) - submission[SUPPLEMENTAL_DETAILS_KEY] = get_sanitized_dict_keys( + + # Remove `qpath` if present + if sanitized_suppl_details := get_sanitized_dict_keys( all_supplemental_details, asset - ) + ): + all_supplemental_details = sanitized_suppl_details + + submission[SUPPLEMENTAL_DETAILS_KEY] = all_supplemental_details + yield submission From bf2069066c823b24d191f84366e539591830beb2 Mon Sep 17 00:00:00 2001 From: Rebecca Graber Date: Fri, 20 Sep 2024 12:55:14 -0400 Subject: [PATCH 090/119] Log out user from all devices (#5106) --- kobo/settings/base.py | 2 ++ kpi/tests/api/v2/test_api_logout_all.py | 47 +++++++++++++++++++++++++ kpi/urls/__init__.py | 2 ++ kpi/views/v2/logout.py | 32 +++++++++++++++++ 4 files changed, 83 insertions(+) create mode 100644 kpi/tests/api/v2/test_api_logout_all.py create mode 100644 kpi/views/v2/logout.py diff --git a/kobo/settings/base.py b/kobo/settings/base.py index e207737429..2b942448af 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -105,6 +105,7 @@ 'allauth.socialaccount', 'allauth.socialaccount.providers.microsoft', 'allauth.socialaccount.providers.openid_connect', + 'allauth.usersessions', 'hub.HubAppConfig', 'loginas', 'webpack_loader', @@ -154,6 +155,7 @@ 'django.contrib.sessions.middleware.SessionMiddleware', 'hub.middleware.LocaleMiddleware', 'allauth.account.middleware.AccountMiddleware', + 'allauth.usersessions.middleware.UserSessionsMiddleware', 'django.middleware.common.CommonMiddleware', # Still needed really? 'kobo.apps.openrosa.libs.utils.middleware.LocaleMiddlewareWithTweaks', diff --git a/kpi/tests/api/v2/test_api_logout_all.py b/kpi/tests/api/v2/test_api_logout_all.py new file mode 100644 index 0000000000..c4c04ee2ea --- /dev/null +++ b/kpi/tests/api/v2/test_api_logout_all.py @@ -0,0 +1,47 @@ +from allauth.usersessions.models import UserSession +from django.urls import reverse + +from kobo.apps.kobo_auth.shortcuts import User +from kpi.tests.base_test_case import BaseTestCase + + +class TestLogoutAll(BaseTestCase): + + fixtures = ['test_data'] + + def test_logout_all_sessions(self): + # create 2 user sessions + user = User.objects.get(username='someuser') + UserSession.objects.create(user=user, session_key='12345', ip='1.2.3.4') + UserSession.objects.create(user=user, session_key='56789', ip='5.6.7.8') + count = UserSession.objects.filter(user=user).count() + self.assertEqual(count, 2) + self.client.force_login(user) + url = self._get_endpoint('logout_all') + self.client.post(reverse(url)) + + # ensure both sessions have been deleted + count = UserSession.objects.filter(user=user).count() + self.assertEqual(count, 0) + + def test_logout_all_sessions_does_not_affect_other_users(self): + user1 = User.objects.get(username='someuser') + user2 = User.objects.get(username='anotheruser') + # create sessions for user1 + UserSession.objects.create( + user=user1, session_key='12345', ip='1.2.3.4' + ) + UserSession.objects.create( + user=user1, session_key='56789', ip='5.6.7.8' + ) + count = UserSession.objects.count() + self.assertEqual(count, 2) + + # login user2 + self.client.force_login(user2) + url = self._get_endpoint('logout_all') + self.client.post(reverse(url)) + + # ensure no sessions have been deleted + count = UserSession.objects.filter().count() + self.assertEqual(count, 2) diff --git a/kpi/urls/__init__.py b/kpi/urls/__init__.py index 786df65e38..9cf2ce732b 100644 --- a/kpi/urls/__init__.py +++ b/kpi/urls/__init__.py @@ -13,6 +13,7 @@ from .router_api_v1 import router_api_v1 from .router_api_v2 import router_api_v2, URL_NAMESPACE +from ..views.v2.logout import logout_from_all_devices # TODO: Give other apps their own `urls.py` files instead of importing their # views directly! See @@ -50,6 +51,7 @@ re_path(r'^private-media/', include(private_storage.urls)), # Statistics for superusers re_path(r'^superuser_stats/', include(('kobo.apps.superuser_stats.urls', 'superuser_stats'))), + path('logout-all/', logout_from_all_devices, name='logout_all') ] diff --git a/kpi/views/v2/logout.py b/kpi/views/v2/logout.py new file mode 100644 index 0000000000..959f6a8330 --- /dev/null +++ b/kpi/views/v2/logout.py @@ -0,0 +1,32 @@ +from allauth.usersessions.adapter import get_adapter +from allauth.usersessions.models import UserSession +from rest_framework.decorators import api_view, permission_classes +from rest_framework.response import Response + +from kpi.permissions import IsAuthenticated + + +@api_view(['POST']) +@permission_classes((IsAuthenticated,)) +def logout_from_all_devices(request): + """ + Log calling user out from all devices + +
+    POST /logout-all/
+    
+ + > Example + > + > curl -H 'Authorization Token 12345' -X POST https://[kpi-url]/logout-all + + > Response 200 + + > { "Logged out of all sessions" } + + """ + user = request.user + all_user_sessions = UserSession.objects.purge_and_list(user) + adapter = get_adapter() + adapter.end_sessions(all_user_sessions) + return Response('Logged out of all sessions') From bb54380a2469a24da51fa03d86a30ee099974f49 Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 08:26:04 -0400 Subject: [PATCH 091/119] Add freezegun to dev requirements for testing --- dependencies/pip/dev_requirements.in | 1 + dependencies/pip/dev_requirements.txt | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/dependencies/pip/dev_requirements.in b/dependencies/pip/dev_requirements.in index cb497a7e1d..61020e2124 100644 --- a/dependencies/pip/dev_requirements.in +++ b/dependencies/pip/dev_requirements.in @@ -13,6 +13,7 @@ pytest-cov pytest-django pytest-env pytest-xdist +freezegun # KoboCAT httmock diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index b77f0951c9..d18d3a447d 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -267,6 +267,8 @@ flake8-quotes==3.4.0 # via -r dependencies/pip/dev_requirements.in flower==2.0.1 # via -r dependencies/pip/requirements.in +freezegun==1.5.1 + # via -r dependencies/pip/dev_requirements.in frozenlist==1.4.1 # via # aiohttp @@ -521,6 +523,7 @@ python-dateutil==2.9.0.post0 # -r dependencies/pip/requirements.in # botocore # celery + # freezegun # pandas # python-crontab python3-openid==3.2.0 @@ -685,4 +688,3 @@ yubico-client==1.13.0 # The following packages are considered to be unsafe in a requirements file: # setuptools -backports-zoneinfo==0.2.1; python_version < '3.9' From 561d5ab1036f2b77b362f42e50c533e6b4388cb2 Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 08:27:04 -0400 Subject: [PATCH 092/119] Ensure billing cycle start date matches cancelation date in 31-day months --- kobo/apps/organizations/utils.py | 6 +++-- .../stripe/tests/test_organization_usage.py | 25 +++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 99e4147413..8f354f00a6 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -30,10 +30,12 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): ): return first_of_this_month, first_of_next_month - period_end = canceled_subscription_anchor.replace(tzinfo=pytz.UTC) + canceled_subscription_anchor = canceled_subscription_anchor.replace(tzinfo=pytz.UTC) + period_end = canceled_subscription_anchor while period_end < now: period_end += relativedelta(months=1) - period_start = period_end - relativedelta(months=1) + # Avoid pushing billing cycle back to before cancelation date + period_start = max(period_end - relativedelta(months=1), canceled_subscription_anchor) return period_start, period_end if not billing_details.get('billing_cycle_anchor'): diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 9de13ff2a9..cec207ed04 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -10,6 +10,7 @@ from django.urls import reverse from django.utils import timezone from djstripe.models import Customer +from freezegun import freeze_time from model_bakery import baker from kobo.apps.kobo_auth.shortcuts import User @@ -305,6 +306,30 @@ def test_plan_canceled_last_month(self): == current_billing_period_end.isoformat() ) + def test_plan_canceled_edge_date(self): + """ + If a plan is canceled on the last day of a 31-day month, we want the subsequent + billing cycle to end on the last day of the next month, but we also need to make + sure the cycle starts on the cancelation date + """ + cancel_date = datetime(year=2024, month=8, day=31, tzinfo=pytz.UTC) + with freeze_time(cancel_date.replace(day=1)): + subscription = generate_plan_subscription(self.organization, age_days=1095) + + subscription.status = 'canceled' + subscription.ended_at = cancel_date + subscription.save() + + with freeze_time(cancel_date.replace(month=9, day=1)): + response = self.client.get(self.detail_url) + current_month_start = datetime.fromisoformat(response.data['current_month_start']) + current_month_end = datetime.fromisoformat(response.data['current_month_end']) + + assert current_month_start.month == cancel_date.month + assert current_month_start.day == cancel_date.day + assert current_month_end.month == 9 + assert current_month_end.day == 30 + def test_multiple_canceled_plans(self): """ If a user has multiple canceled plans, their default billing cycle From f710e52b57edd328719a3d8bd522a1d77c80b30c Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 08:38:25 -0400 Subject: [PATCH 093/119] Test cleanup --- kobo/apps/stripe/tests/test_organization_usage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index cec207ed04..bd32292620 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -314,7 +314,7 @@ def test_plan_canceled_edge_date(self): """ cancel_date = datetime(year=2024, month=8, day=31, tzinfo=pytz.UTC) with freeze_time(cancel_date.replace(day=1)): - subscription = generate_plan_subscription(self.organization, age_days=1095) + subscription = generate_plan_subscription(self.organization) subscription.status = 'canceled' subscription.ended_at = cancel_date From ec01d3f2dc42f07ad51ec6901833a2c2d5fe56ef Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 09:01:40 -0400 Subject: [PATCH 094/119] Linting --- kobo/apps/organizations/utils.py | 9 +++++++-- kobo/apps/stripe/tests/test_organization_usage.py | 10 +++++++--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 8f354f00a6..5328343c24 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -30,12 +30,17 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): ): return first_of_this_month, first_of_next_month - canceled_subscription_anchor = canceled_subscription_anchor.replace(tzinfo=pytz.UTC) + canceled_subscription_anchor = canceled_subscription_anchor.replace( + tzinfo=pytz.UTC + ) period_end = canceled_subscription_anchor while period_end < now: period_end += relativedelta(months=1) # Avoid pushing billing cycle back to before cancelation date - period_start = max(period_end - relativedelta(months=1), canceled_subscription_anchor) + period_start = max( + period_end - relativedelta(months=1), + canceled_subscription_anchor, + ) return period_start, period_end if not billing_details.get('billing_cycle_anchor'): diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index bd32292620..58f9f1cfb3 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -309,7 +309,7 @@ def test_plan_canceled_last_month(self): def test_plan_canceled_edge_date(self): """ If a plan is canceled on the last day of a 31-day month, we want the subsequent - billing cycle to end on the last day of the next month, but we also need to make + billing cycle to end on the last day of the next month, but we also need to make sure the cycle starts on the cancelation date """ cancel_date = datetime(year=2024, month=8, day=31, tzinfo=pytz.UTC) @@ -322,8 +322,12 @@ def test_plan_canceled_edge_date(self): with freeze_time(cancel_date.replace(month=9, day=1)): response = self.client.get(self.detail_url) - current_month_start = datetime.fromisoformat(response.data['current_month_start']) - current_month_end = datetime.fromisoformat(response.data['current_month_end']) + current_month_start = datetime.fromisoformat( + response.data['current_month_start'] + ) + current_month_end = datetime.fromisoformat( + response.data['current_month_end'] + ) assert current_month_start.month == cancel_date.month assert current_month_start.day == cancel_date.day From 4d96d9c4604b2146188fca557a12039953fa8124 Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 10:15:01 -0400 Subject: [PATCH 095/119] Add back zoneinfo backports to dev dependencies.txt and remove remaining uses of pytz --- dependencies/pip/dev_requirements.txt | 1 + kobo/apps/organizations/utils.py | 30 +++++++++++-------- .../stripe/tests/test_organization_usage.py | 10 +++++-- kobo/apps/subsequences/actions/base.py | 8 +++-- 4 files changed, 32 insertions(+), 17 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index d18d3a447d..e94950f4f5 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -688,3 +688,4 @@ yubico-client==1.13.0 # The following packages are considered to be unsafe in a requirements file: # setuptools +backports-zoneinfo==0.2.1; python_version < '3.9' \ No newline at end of file diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 5328343c24..e28042ffa6 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -1,8 +1,12 @@ from typing import Union -import pytz from datetime import datetime from dateutil.relativedelta import relativedelta +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + from django.utils import timezone from kobo.apps.organizations.models import Organization @@ -11,8 +15,8 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's monthly billing cycle""" - now = timezone.now().replace(tzinfo=pytz.UTC) - first_of_this_month = datetime(now.year, now.month, 1, tzinfo=pytz.UTC) + now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) + first_of_this_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) first_of_next_month = ( first_of_this_month + relativedelta(months=1) @@ -31,7 +35,7 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): return first_of_this_month, first_of_next_month canceled_subscription_anchor = canceled_subscription_anchor.replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = canceled_subscription_anchor while period_end < now: @@ -49,15 +53,15 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): # Subscription is billed monthly, use the current billing period dates if billing_details.get('recurring_interval') == 'month': period_start = billing_details.get('current_period_start').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = billing_details.get('current_period_end').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) return period_start, period_end # Subscription is billed yearly - count backwards from the end of the current billing year - period_start = billing_details.get('current_period_end').replace(tzinfo=pytz.UTC) + period_start = billing_details.get('current_period_end').replace(tzinfo=ZoneInfo('UTC')) while period_start > now: period_start -= relativedelta(months=1) period_end = period_start + relativedelta(months=1) @@ -66,8 +70,8 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): def get_yearly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's annual billing cycle""" - now = timezone.now().replace(tzinfo=pytz.UTC) - first_of_this_year = datetime(now.year, 1, 1, tzinfo=pytz.UTC) + now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) + first_of_this_year = datetime(now.year, 1, 1, tzinfo=ZoneInfo('UTC')) first_of_next_year = first_of_this_year + relativedelta(years=1) if not organization: @@ -80,15 +84,17 @@ def get_yearly_billing_dates(organization: Union[Organization, None]): # Subscription is billed yearly, use the dates from the subscription if billing_details.get('recurring_interval') == 'year': period_start = billing_details.get('current_period_start').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = billing_details.get('current_period_end').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) return period_start, period_end # Subscription is monthly, calculate this year's start based on anchor date - period_start = anchor_date.replace(tzinfo=pytz.UTC) + relativedelta(years=1) + period_start = anchor_date.replace( + tzinfo=ZoneInfo('UTC')) + relativedelta(years=1 + ) while period_start < now: anchor_date += relativedelta(years=1) period_end = period_start + relativedelta(years=1) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 58f9f1cfb3..0d152b7186 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -2,9 +2,13 @@ import itertools import pytest -import pytz from datetime import datetime from dateutil.relativedelta import relativedelta +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + from django.core.cache import cache from django.test import override_settings from django.urls import reverse @@ -196,7 +200,7 @@ def test_default_plan_period(self): response = self.client.get(self.detail_url) now = timezone.now() - first_of_month = datetime(now.year, now.month, 1, tzinfo=pytz.UTC) + first_of_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) first_of_next_month = first_of_month + relativedelta(months=1) assert response.data['total_submission_count']['current_month'] == num_submissions @@ -312,7 +316,7 @@ def test_plan_canceled_edge_date(self): billing cycle to end on the last day of the next month, but we also need to make sure the cycle starts on the cancelation date """ - cancel_date = datetime(year=2024, month=8, day=31, tzinfo=pytz.UTC) + cancel_date = datetime(year=2024, month=8, day=31, tzinfo=ZoneInfo('UTC')) with freeze_time(cancel_date.replace(day=1)): subscription = generate_plan_subscription(self.organization) diff --git a/kobo/apps/subsequences/actions/base.py b/kobo/apps/subsequences/actions/base.py index c7993c4756..69e798f61a 100644 --- a/kobo/apps/subsequences/actions/base.py +++ b/kobo/apps/subsequences/actions/base.py @@ -1,5 +1,9 @@ import datetime -import pytz +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + from django.utils import timezone from kobo.apps.subsequences.constants import (GOOGLETS, GOOGLETX) @@ -19,7 +23,7 @@ def __init__(self, params): self.load_params(params) def cur_time(self): - return datetime.datetime.now(tz=pytz.UTC).strftime('%Y-%m-%dT%H:%M:%SZ') + return datetime.datetime.now(tz=ZoneInfo('UTC')).strftime('%Y-%m-%dT%H:%M:%SZ') def load_params(self, params): raise NotImplementedError('subclass must define a load_params method') From da73987ca7da58da73bbe42cda47f4f7a54a53f3 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 24 Sep 2024 10:15:48 -0400 Subject: [PATCH 096/119] Replace pytz with ZoneInfo --- kobo/apps/organizations/utils.py | 40 +++++++++++-------- .../stripe/tests/test_organization_usage.py | 13 ++++-- kobo/apps/subsequences/actions/base.py | 9 ++++- 3 files changed, 39 insertions(+), 23 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 99e4147413..5cb3e5eb99 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -1,6 +1,9 @@ from typing import Union +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo -import pytz from datetime import datetime from dateutil.relativedelta import relativedelta from django.utils import timezone @@ -11,8 +14,8 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's monthly billing cycle""" - now = timezone.now().replace(tzinfo=pytz.UTC) - first_of_this_month = datetime(now.year, now.month, 1, tzinfo=pytz.UTC) + now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) + first_of_this_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) first_of_next_month = ( first_of_this_month + relativedelta(months=1) @@ -21,36 +24,39 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): # If no organization, just use the calendar month if not organization: return first_of_this_month, first_of_next_month - - # If no active subscription, check for canceled subscription + + # If no active subscription, check for canceled subscription if not (billing_details := organization.active_subscription_billing_details()): if not ( canceled_subscription_anchor := organization.canceled_subscription_billing_cycle_anchor() ): return first_of_this_month, first_of_next_month - - period_end = canceled_subscription_anchor.replace(tzinfo=pytz.UTC) + + period_end = canceled_subscription_anchor.replace(tzinfo=ZoneInfo('UTC')) while period_end < now: period_end += relativedelta(months=1) period_start = period_end - relativedelta(months=1) return period_start, period_end - + if not billing_details.get('billing_cycle_anchor'): return first_of_this_month, first_of_next_month # Subscription is billed monthly, use the current billing period dates if billing_details.get('recurring_interval') == 'month': period_start = billing_details.get('current_period_start').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = billing_details.get('current_period_end').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) return period_start, period_end - # Subscription is billed yearly - count backwards from the end of the current billing year - period_start = billing_details.get('current_period_end').replace(tzinfo=pytz.UTC) + # Subscription is billed yearly - count backwards from the end of the + # current billing year + period_start = billing_details.get('current_period_end').replace( + tzinfo=ZoneInfo('UTC') + ) while period_start > now: period_start -= relativedelta(months=1) period_end = period_start + relativedelta(months=1) @@ -59,8 +65,8 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): def get_yearly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's annual billing cycle""" - now = timezone.now().replace(tzinfo=pytz.UTC) - first_of_this_year = datetime(now.year, 1, 1, tzinfo=pytz.UTC) + now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) + first_of_this_year = datetime(now.year, 1, 1, tzinfo=ZoneInfo('UTC')) first_of_next_year = first_of_this_year + relativedelta(years=1) if not organization: @@ -73,15 +79,15 @@ def get_yearly_billing_dates(organization: Union[Organization, None]): # Subscription is billed yearly, use the dates from the subscription if billing_details.get('recurring_interval') == 'year': period_start = billing_details.get('current_period_start').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) period_end = billing_details.get('current_period_end').replace( - tzinfo=pytz.UTC + tzinfo=ZoneInfo('UTC') ) return period_start, period_end # Subscription is monthly, calculate this year's start based on anchor date - period_start = anchor_date.replace(tzinfo=pytz.UTC) + relativedelta(years=1) + period_start = anchor_date.replace(tzinfo=ZoneInfo('UTC')) + relativedelta(years=1) while period_start < now: anchor_date += relativedelta(years=1) period_end = period_start + relativedelta(years=1) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 9de13ff2a9..43b5e8b9d0 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -1,8 +1,11 @@ import timeit import itertools +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo import pytest -import pytz from datetime import datetime from dateutil.relativedelta import relativedelta from django.core.cache import cache @@ -11,6 +14,7 @@ from django.utils import timezone from djstripe.models import Customer from model_bakery import baker +from rest_framework import status from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.organizations.models import Organization, OrganizationUser @@ -24,7 +28,6 @@ ) from kpi.tests.test_usage_calculator import BaseServiceUsageTestCase from kpi.tests.api.v2.test_api_asset_usage import AssetUsageAPITestCase -from rest_framework import status class OrganizationServiceUsageAPIMultiUserTestCase(BaseServiceUsageTestCase): @@ -46,7 +49,9 @@ def setUpTestData(cls): super().setUpTestData() cls.now = timezone.now() - cls.organization = baker.make(Organization, id=cls.org_id, name='test organization') + cls.organization = baker.make( + Organization, id=cls.org_id, name='test organization' + ) cls.organization.add_user(cls.anotheruser, is_admin=True) assets = create_mock_assets([cls.anotheruser], cls.assets_per_user) @@ -195,7 +200,7 @@ def test_default_plan_period(self): response = self.client.get(self.detail_url) now = timezone.now() - first_of_month = datetime(now.year, now.month, 1, tzinfo=pytz.UTC) + first_of_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) first_of_next_month = first_of_month + relativedelta(months=1) assert response.data['total_submission_count']['current_month'] == num_submissions diff --git a/kobo/apps/subsequences/actions/base.py b/kobo/apps/subsequences/actions/base.py index c7993c4756..8b01b9b14a 100644 --- a/kobo/apps/subsequences/actions/base.py +++ b/kobo/apps/subsequences/actions/base.py @@ -1,5 +1,9 @@ import datetime -import pytz +try: + from zoneinfo import ZoneInfo +except ImportError: + from backports.zoneinfo import ZoneInfo + from django.utils import timezone from kobo.apps.subsequences.constants import (GOOGLETS, GOOGLETX) @@ -7,6 +11,7 @@ ACTION_NEEDED = 'ACTION_NEEDED' PASSES = 'PASSES' + class BaseAction: ID = None _destination_field = '_supplementalDetails' @@ -19,7 +24,7 @@ def __init__(self, params): self.load_params(params) def cur_time(self): - return datetime.datetime.now(tz=pytz.UTC).strftime('%Y-%m-%dT%H:%M:%SZ') + return datetime.datetime.now(tz=ZoneInfo('UTC')).strftime('%Y-%m-%dT%H:%M:%SZ') def load_params(self, params): raise NotImplementedError('subclass must define a load_params method') From fd312c083e69ad5739bdc9095de543816c56ccf8 Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 10:27:37 -0400 Subject: [PATCH 097/119] Linting --- kobo/apps/organizations/utils.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index e28042ffa6..a5786f55d7 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -16,7 +16,9 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): """Returns start and end dates of an organization's monthly billing cycle""" now = timezone.now().replace(tzinfo=ZoneInfo('UTC')) - first_of_this_month = datetime(now.year, now.month, 1, tzinfo=ZoneInfo('UTC')) + first_of_this_month = datetime( + now.year, now.month, 1, tzinfo=ZoneInfo('UTC') + ) first_of_next_month = ( first_of_this_month + relativedelta(months=1) @@ -25,7 +27,7 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): # If no organization, just use the calendar month if not organization: return first_of_this_month, first_of_next_month - + # If no active subscription, check for canceled subscription if not (billing_details := organization.active_subscription_billing_details()): if not ( @@ -33,7 +35,7 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): := organization.canceled_subscription_billing_cycle_anchor() ): return first_of_this_month, first_of_next_month - + canceled_subscription_anchor = canceled_subscription_anchor.replace( tzinfo=ZoneInfo('UTC') ) @@ -46,7 +48,7 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): canceled_subscription_anchor, ) return period_start, period_end - + if not billing_details.get('billing_cycle_anchor'): return first_of_this_month, first_of_next_month @@ -61,7 +63,9 @@ def get_monthly_billing_dates(organization: Union[Organization, None]): return period_start, period_end # Subscription is billed yearly - count backwards from the end of the current billing year - period_start = billing_details.get('current_period_end').replace(tzinfo=ZoneInfo('UTC')) + period_start = billing_details.get('current_period_end').replace( + tzinfo=ZoneInfo('UTC') + ) while period_start > now: period_start -= relativedelta(months=1) period_end = period_start + relativedelta(months=1) @@ -93,8 +97,8 @@ def get_yearly_billing_dates(organization: Union[Organization, None]): # Subscription is monthly, calculate this year's start based on anchor date period_start = anchor_date.replace( - tzinfo=ZoneInfo('UTC')) + relativedelta(years=1 - ) + tzinfo=ZoneInfo('UTC') + ) + relativedelta(years=1) while period_start < now: anchor_date += relativedelta(years=1) period_end = period_start + relativedelta(years=1) From 5b059bf4a7e0037e1402e89cbb4447738e9bb53c Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 24 Sep 2024 10:16:18 -0400 Subject: [PATCH 098/119] Remove backport.zoneinfo dependency --- dependencies/pip/dev_requirements.txt | 2 +- dependencies/pip/requirements.in | 2 +- dependencies/pip/requirements.txt | 2 +- pip-compile.sh | 9 +++++---- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index b77f0951c9..271d23c4a3 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -685,4 +685,4 @@ yubico-client==1.13.0 # The following packages are considered to be unsafe in a requirements file: # setuptools -backports-zoneinfo==0.2.1; python_version < '3.9' +# backports-zoneinfo==0.2.1; python_version < '3.9' diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index c59f7921b7..ed01f1c4ce 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -111,4 +111,4 @@ djangorestframework-jsonp pandas # Python 3.8 support -backports.zoneinfo; python_version < '3.9' +# backports.zoneinfo; python_version < '3.9' diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index a4810f845f..dd585330f3 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -530,4 +530,4 @@ yarl==1.9.4 # via aiohttp yubico-client==1.13.0 # via django-trench -backports-zoneinfo==0.2.1; python_version < '3.9' +# backports-zoneinfo==0.2.1; python_version < '3.9' diff --git a/pip-compile.sh b/pip-compile.sh index d3c554b9e0..68ac90525a 100755 --- a/pip-compile.sh +++ b/pip-compile.sh @@ -12,7 +12,8 @@ do pip-compile "$@" "$in_file" || exit $? done for out_file in dependencies/pip/*.txt -do - # Workaround for https://github.com/jazzband/pip-tools/issues/1326 - echo "backports-zoneinfo==0.2.1; python_version < '3.9'" >> "$out_file" -done + +#do +# # Workaround for https://github.com/jazzband/pip-tools/issues/1326 +# echo "backports-zoneinfo==0.2.1; python_version < '3.9'" >> "$out_file" +#done From c02d8a50e1a6a32ae828514e8847478d284fb96e Mon Sep 17 00:00:00 2001 From: James Kiger Date: Tue, 24 Sep 2024 11:57:54 -0400 Subject: [PATCH 099/119] Remove duplicate import from merge --- kobo/apps/organizations/utils.py | 4 ---- kobo/apps/stripe/tests/test_organization_usage.py | 4 ---- 2 files changed, 8 deletions(-) diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index c356171e54..0d17572af4 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -6,10 +6,6 @@ from datetime import datetime from dateutil.relativedelta import relativedelta -try: - from zoneinfo import ZoneInfo -except ImportError: - from backports.zoneinfo import ZoneInfo from django.utils import timezone diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 5a7a662ad3..c85cc057d2 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -8,10 +8,6 @@ import pytest from datetime import datetime from dateutil.relativedelta import relativedelta -try: - from zoneinfo import ZoneInfo -except ImportError: - from backports.zoneinfo import ZoneInfo from django.core.cache import cache from django.test import override_settings From 7dc0997b04a743cbed1632b290518e419d84a78e Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 24 Sep 2024 16:03:25 -0400 Subject: [PATCH 100/119] Remove useless comments --- dependencies/pip/dev_requirements.txt | 1 - dependencies/pip/requirements.in | 3 --- dependencies/pip/requirements.txt | 1 - kpi/views/v2/asset.py | 2 +- pip-compile.sh | 5 ----- 5 files changed, 1 insertion(+), 11 deletions(-) diff --git a/dependencies/pip/dev_requirements.txt b/dependencies/pip/dev_requirements.txt index 271d23c4a3..5b3215fec2 100644 --- a/dependencies/pip/dev_requirements.txt +++ b/dependencies/pip/dev_requirements.txt @@ -685,4 +685,3 @@ yubico-client==1.13.0 # The following packages are considered to be unsafe in a requirements file: # setuptools -# backports-zoneinfo==0.2.1; python_version < '3.9' diff --git a/dependencies/pip/requirements.in b/dependencies/pip/requirements.in index ed01f1c4ce..cd7bd9ea7e 100644 --- a/dependencies/pip/requirements.in +++ b/dependencies/pip/requirements.in @@ -109,6 +109,3 @@ modilabs-python-utils djangorestframework-csv djangorestframework-jsonp pandas - -# Python 3.8 support -# backports.zoneinfo; python_version < '3.9' diff --git a/dependencies/pip/requirements.txt b/dependencies/pip/requirements.txt index dd585330f3..8d4cf907dd 100644 --- a/dependencies/pip/requirements.txt +++ b/dependencies/pip/requirements.txt @@ -530,4 +530,3 @@ yarl==1.9.4 # via aiohttp yubico-client==1.13.0 # via django-trench -# backports-zoneinfo==0.2.1; python_version < '3.9' diff --git a/kpi/views/v2/asset.py b/kpi/views/v2/asset.py index 973dc3275b..b44de7640e 100644 --- a/kpi/views/v2/asset.py +++ b/kpi/views/v2/asset.py @@ -667,7 +667,7 @@ def get_serializer_context(self): # the issue here: https://github.com/kobotoolbox/kpi/issues/2576 queryset = self.__filtered_queryset - # 1) Retrieve all asset IDs of current list + # 1) Retrieve all asset IDs of the current list asset_ids = AssetPagination.get_all_asset_ids_from_queryset( queryset ) diff --git a/pip-compile.sh b/pip-compile.sh index 68ac90525a..b4f7b2da70 100755 --- a/pip-compile.sh +++ b/pip-compile.sh @@ -12,8 +12,3 @@ do pip-compile "$@" "$in_file" || exit $? done for out_file in dependencies/pip/*.txt - -#do -# # Workaround for https://github.com/jazzband/pip-tools/issues/1326 -# echo "backports-zoneinfo==0.2.1; python_version < '3.9'" >> "$out_file" -#done From af4e29007cfb47901879885b09e8fb67fae5386a Mon Sep 17 00:00:00 2001 From: James Kiger Date: Wed, 25 Sep 2024 07:13:42 -0400 Subject: [PATCH 101/119] Clearer unit test variables --- kobo/apps/stripe/tests/test_organization_usage.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index c85cc057d2..8b0670504b 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -318,15 +318,22 @@ def test_plan_canceled_edge_date(self): billing cycle to end on the last day of the next month, but we also need to make sure the cycle starts on the cancelation date """ - cancel_date = datetime(year=2024, month=8, day=31, tzinfo=ZoneInfo('UTC')) - with freeze_time(cancel_date.replace(day=1)): + frozen_datetime_now = datetime( + year=2024, + month=9, + day=1, + tzinfo=ZoneInfo('UTC'), + ) + subscribe_date = frozen_datetime_now.replace(month=8, day=1) + cancel_date = frozen_datetime_now.replace(month=8, day=31) + with freeze_time(subscribe_date): subscription = generate_plan_subscription(self.organization) subscription.status = 'canceled' subscription.ended_at = cancel_date subscription.save() - with freeze_time(cancel_date.replace(month=9, day=1)): + with freeze_time(frozen_datetime_now): response = self.client.get(self.detail_url) current_month_start = datetime.fromisoformat( response.data['current_month_start'] From 658825c5fa21fd69500c82636f9a1a9fbcbc7e31 Mon Sep 17 00:00:00 2001 From: Rebecca Graber Date: Mon, 30 Sep 2024 10:29:22 -0400 Subject: [PATCH 102/119] Change access log endpoint names (#5132) --- kobo/apps/audit_log/urls.py | 4 ++-- kobo/apps/audit_log/views.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/kobo/apps/audit_log/urls.py b/kobo/apps/audit_log/urls.py index 7820bca243..fac5f9ad15 100644 --- a/kobo/apps/audit_log/urls.py +++ b/kobo/apps/audit_log/urls.py @@ -5,9 +5,9 @@ router = DefaultRouter() router.register(r'audit-logs', AuditLogViewSet, basename='audit-log') -router.register(r'access-logs', AccessLogViewSet, basename='access-log') +router.register(r'access-logs', AllAccessLogViewSet, basename='all-access-logs') router.register( - r'access-logs/all', AllAccessLogViewSet, basename='all-access-logs' + r'access-logs/me', AccessLogViewSet, basename='access-log' ) urlpatterns = [] diff --git a/kobo/apps/audit_log/views.py b/kobo/apps/audit_log/views.py index 4d2ab951e0..5a81b54765 100644 --- a/kobo/apps/audit_log/views.py +++ b/kobo/apps/audit_log/views.py @@ -93,12 +93,12 @@ class AllAccessLogViewSet(AuditLogViewSet): Lists all access logs for all users. Only available to superusers.
-    GET /api/v2/access-logs/all
+    GET /api/v2/access-logs/
     
> Example > - > curl -X GET https://[kpi-url]/access-logs/all + > curl -X GET https://[kpi-url]/access-logs/ > Response 200 @@ -144,12 +144,12 @@ class AccessLogViewSet(AuditLogViewSet): Lists all access logs for the authenticated user
-    GET /api/v2/access-logs/
+    GET /api/v2/access-logs/me
     
> Example > - > curl -X GET https://[kpi-url]/access-logs/ + > curl -X GET https://[kpi-url]/access-logs/me > Response 200 From 02d8138c0582ceffd4fd30b2395231498e69548b Mon Sep 17 00:00:00 2001 From: Paulo Amorim Date: Fri, 27 Sep 2024 18:31:22 -0300 Subject: [PATCH 103/119] improve error readability --- jsapp/scss/stylesheets/partials/_registration.scss | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/jsapp/scss/stylesheets/partials/_registration.scss b/jsapp/scss/stylesheets/partials/_registration.scss index d192b8a735..efe806dcea 100644 --- a/jsapp/scss/stylesheets/partials/_registration.scss +++ b/jsapp/scss/stylesheets/partials/_registration.scss @@ -31,7 +31,7 @@ margin: 0 auto; position: relative; max-width: 400px; - background: rgba(colors.$kobo-gray-800, 0.8); + background: rgba(colors.$kobo-gray-800, 0.9); padding: 20px 30px; border-radius: 10px; @@ -305,7 +305,7 @@ ul.errorlist { padding: 0px; - color: colors.$kobo-red; + color: colors.$kobo-mid-red; margin: 8px 0px; } } From 796fb11f429794b2b8fbf1e8f96c6d45c6459c6c Mon Sep 17 00:00:00 2001 From: Paulo Amorim Date: Fri, 27 Sep 2024 18:12:46 -0300 Subject: [PATCH 104/119] replace renderJSXMessage implementation --- jsapp/js/alertify.ts | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/jsapp/js/alertify.ts b/jsapp/js/alertify.ts index 02e8138362..e48374cb16 100644 --- a/jsapp/js/alertify.ts +++ b/jsapp/js/alertify.ts @@ -4,8 +4,7 @@ import {KeyNames} from 'js/constants'; import type {IconName} from 'jsapp/fonts/k-icons'; import {escapeHtml} from 'js/utils'; import type {ReactElement} from 'react'; -import {createRoot} from 'react-dom/client'; - +import ReactDOMServer from 'react-dom/server'; interface MultiConfirmButton { label: string; @@ -193,10 +192,5 @@ export function destroyConfirm( * strings). */ export function renderJSXMessage(jsx: ReactElement) { - const domNode = document.createElement('div'); - const root = createRoot(domNode); - root.render(jsx); - const str = domNode.outerHTML; - root.unmount(); - return str; + return ReactDOMServer.renderToStaticMarkup(jsx); } From 94a0705caa8c87b521c378338e31718f52e2cdea Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 30 Sep 2024 18:25:23 -0400 Subject: [PATCH 105/119] Trigger Celery tasks after transaction validation --- kobo/apps/project_ownership/exceptions.py | 4 +++ .../apps/project_ownership/models/transfer.py | 33 ++++++++++++------- kobo/apps/project_ownership/tasks.py | 11 +++++-- 3 files changed, 34 insertions(+), 14 deletions(-) diff --git a/kobo/apps/project_ownership/exceptions.py b/kobo/apps/project_ownership/exceptions.py index 7efd66be5a..8eb122962d 100644 --- a/kobo/apps/project_ownership/exceptions.py +++ b/kobo/apps/project_ownership/exceptions.py @@ -4,3 +4,7 @@ class AsyncTaskException(Exception): class TransferAlreadyProcessedException(Exception): pass + + +class TransferStillPendingException(Exception): + pass diff --git a/kobo/apps/project_ownership/models/transfer.py b/kobo/apps/project_ownership/models/transfer.py index 3e45cebb8e..6c97ddfd2f 100644 --- a/kobo/apps/project_ownership/models/transfer.py +++ b/kobo/apps/project_ownership/models/transfer.py @@ -1,5 +1,6 @@ from __future__ import annotations +import time from datetime import timedelta from typing import Optional, Union @@ -87,6 +88,7 @@ def status(self, value: Union[str, tuple[str]]): global_status.status = value global_status.save() + self.date_modified = timezone.now() self.save(update_fields=['date_modified']) self._update_invite_status() @@ -113,6 +115,7 @@ def transfer_project(self): status=TransferStatusChoices.SUCCESS ) else: + _rewrite_mongo = False with transaction.atomic(): with kc_transaction_atomic(): deployment = self.asset.deployment @@ -128,19 +131,11 @@ def transfer_project(self): deployment.rename_enketo_id_key(previous_owner_username) self._sent_in_app_messages() + _rewrite_mongo = True - # Move submissions, media files and attachments in background - # tasks because it can take a while to complete on big projects - - # 1) Rewrite `_userform_id` in MongoDB - async_task.delay( - self.pk, TransferStatusTypeChoices.SUBMISSIONS - ) - - # 2) Move media files to new owner's home directory - async_task.delay( - self.pk, TransferStatusTypeChoices.MEDIA_FILES - ) + # Do not delegate anything to Celery before the transaction has + # been validated. Otherwise, Celery could fetch outdated data. + transaction.on_commit(lambda: self._start_async_jobs(_rewrite_mongo)) success = True finally: @@ -265,6 +260,20 @@ def _sent_in_app_messages(self): ] ) + def _start_async_jobs(self, rewrite_mongo: bool = True): + # Move submissions, media files and attachments in background + # tasks because it can take a while to complete on big projects + if rewrite_mongo: + # 1) Rewrite `_userform_id` in MongoDB + async_task.delay( + self.pk, TransferStatusTypeChoices.SUBMISSIONS + ) + + # 2) Move media files to new owner's home directory + async_task.delay( + self.pk, TransferStatusTypeChoices.MEDIA_FILES + ) + def _update_invite_status(self): """ Update the status of the invite based on the status of each transfer diff --git a/kobo/apps/project_ownership/tasks.py b/kobo/apps/project_ownership/tasks.py index 87b37d5964..ac2d194fa0 100644 --- a/kobo/apps/project_ownership/tasks.py +++ b/kobo/apps/project_ownership/tasks.py @@ -11,7 +11,7 @@ from kobo.celery import celery_app from kpi.utils.mailer import EmailMessage, Mailer -from .exceptions import AsyncTaskException +from .exceptions import AsyncTaskException, TransferStillPendingException from .models.choices import ( InviteStatusChoices, TransferStatusChoices, @@ -28,6 +28,7 @@ autoretry_for=( SoftTimeLimitExceeded, TimeLimitExceeded, + TransferStillPendingException, ), max_retry=5, retry_backoff=60, @@ -43,8 +44,14 @@ def async_task(transfer_id: int, async_task_type: str): transfer = Transfer.objects.get(pk=transfer_id) + if transfer.status == TransferStatusChoices.PENDING: + # Sometimes, a race condition occurs: the Celery task starts, but + # `transfer.status` has not been updated fast enough. + # Raise an exception which allows retry. + raise TransferStillPendingException + if transfer.status != TransferStatusChoices.IN_PROGRESS: - raise AsyncTaskException(f'`{transfer}` is not in progress') + raise AsyncTaskException(f'`{transfer}` is not in progress: {transfer.status}') TransferStatus.update_status( transfer_id=transfer_id, From 1bb667e572a9d3820ca00d48db9452006f54792b Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 30 Sep 2024 18:39:26 -0400 Subject: [PATCH 106/119] Fix error: Attachment.media_file has no attribute 'move' --- kobo/apps/openrosa/apps/logger/models/attachment.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kobo/apps/openrosa/apps/logger/models/attachment.py b/kobo/apps/openrosa/apps/logger/models/attachment.py index 024ee06b11..037bbe73f7 100644 --- a/kobo/apps/openrosa/apps/logger/models/attachment.py +++ b/kobo/apps/openrosa/apps/logger/models/attachment.py @@ -1,4 +1,3 @@ -# coding: utf-8 import mimetypes import os @@ -10,6 +9,7 @@ from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) +from kpi.fields.file import ExtendedFileField from .instance import Instance @@ -41,7 +41,7 @@ class Attachment(models.Model): instance = models.ForeignKey( Instance, related_name='attachments', on_delete=models.CASCADE ) - media_file = models.FileField( + media_file = ExtendedFileField( storage=default_storage, upload_to=upload_to, max_length=380, From d65e57807a3061c0de08ecf01278e7f9793b3cd5 Mon Sep 17 00:00:00 2001 From: Leszek Date: Tue, 1 Oct 2024 14:59:59 +0200 Subject: [PATCH 107/119] fix typo in message --- .../permissions/transferProjects/transferProjects.api.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jsapp/js/components/permissions/transferProjects/transferProjects.api.ts b/jsapp/js/components/permissions/transferProjects/transferProjects.api.ts index b953e114f1..ea2c5200ee 100644 --- a/jsapp/js/components/permissions/transferProjects/transferProjects.api.ts +++ b/jsapp/js/components/permissions/transferProjects/transferProjects.api.ts @@ -83,7 +83,7 @@ export async function cancelInvite(inviteUrl: string) { handleApiFail( error as FailResponse, t( - 'Failed to cancel transfer. The transfer may be declined or accpeted already. Please check your email.' + 'Failed to cancel transfer. The transfer may be declined or accepted already. Please check your email.' ) ); } From 0fb90bf3281c023a5d879d3194bda4c99473d172 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 1 Oct 2024 12:52:20 -0400 Subject: [PATCH 108/119] Add management command to resume broken project ownership transfers --- .../project_ownership/management/__init__.py | 0 .../management/commands/__init__.py | 0 .../resume_failed_transfers_2_024_25_fix.py | 97 +++++++++++++++++++ 3 files changed, 97 insertions(+) create mode 100644 kobo/apps/project_ownership/management/__init__.py create mode 100644 kobo/apps/project_ownership/management/commands/__init__.py create mode 100644 kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py diff --git a/kobo/apps/project_ownership/management/__init__.py b/kobo/apps/project_ownership/management/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/kobo/apps/project_ownership/management/commands/__init__.py b/kobo/apps/project_ownership/management/commands/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py b/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py new file mode 100644 index 0000000000..34e43a49f9 --- /dev/null +++ b/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py @@ -0,0 +1,97 @@ +from django.core.management import call_command +from django.core.management.base import BaseCommand + +from ...models import ( + Transfer, + TransferStatus, + TransferStatusChoices, + TransferStatusTypeChoices, +) +from ...utils import ( + move_media_files, + move_attachments, + rewrite_mongo_userform_id, +) + + +class Command(BaseCommand): + help = ( + 'Resume project ownership transfers done under `2.024.25` which failed ' + 'with error: "Project A : previous_owner -> new_owner is not in progress"' + ) + + def handle(self, *args, **options): + + usernames = set() + verbosity = options['verbosity'] + + for transfer_status in TransferStatus.objects.filter( + status=TransferStatusChoices.FAILED, + status_type=TransferStatusTypeChoices.GLOBAL, + error__icontains='is not in progress', + ).iterator(): + transfer = transfer_status.transfer + if transfer.asset.pending_delete: + if verbosity: + self.stdout.write( + f'Project `{transfer.asset}` is in trash bin, skip it!' + ) + continue + + if not self._validate_whether_transfer_can_be_fixed(transfer): + if verbosity: + self.stdout.write( + f'Project `{transfer.asset}` transfer cannot be fixed automatically' + ) + continue + + if not transfer.asset.has_deployment: + continue + + if verbosity: + self.stdout.write( + f'Resuming `{transfer.asset}` transfer…' + ) + self._move_data(transfer) + move_attachments(transfer) + move_media_files(transfer) + if verbosity: + self.stdout.write( + f'\tDone!' + ) + usernames.add(transfer.invite.recipient.username) + + # Update attachment storage bytes counters + for username in usernames: + call_command( + 'update_attachment_storage_bytes', + verbosity=verbosity, + force=True, + username=username, + ) + + def _move_data(self, transfer: Transfer): + + # Sanity check + asset = transfer.asset + rewrite_mongo_userform_id(transfer) + number_of_submissions = asset.deployment.xform.num_of_submissions + submission_ids = [ + s['_id'] + for s in asset.deployment.get_submissions(asset.owner, fields=['_id']) + ] + + if number_of_submissions == (mongo_document_count := len(submission_ids)): + self.stdout.write(f'\tSuccess: {number_of_submissions} submissions moved!') + else: + missing_count = number_of_submissions - mongo_document_count + self.stdout.write( + f'\t⚠️ Only {mongo_document_count} submissions moved, ' + f'{missing_count} are missing!' + ) + + def _validate_whether_transfer_can_be_fixed(self, transfer: Transfer) -> bool: + original_new_owner_id = transfer.invite.recipient_id + current_owner_id = transfer.asset.owner_id + + return current_owner_id == original_new_owner_id From 041aeb99d3d512c3594069d4154f12903afdbb22 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Tue, 1 Oct 2024 14:27:16 -0400 Subject: [PATCH 109/119] Fix unit tests: patch "on_commit" --- .../resume_failed_transfers_2_024_25_fix.py | 7 +++--- .../apps/project_ownership/models/transfer.py | 12 ++++----- .../tests/api/v2/test_api.py | 23 ++++++++--------- .../tests/test_transfer_status.py | 7 ++++-- kpi/tests/utils/transaction.py | 25 +++++++++++++++++++ 5 files changed, 49 insertions(+), 25 deletions(-) create mode 100644 kpi/tests/utils/transaction.py diff --git a/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py b/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py index 34e43a49f9..6be2719b24 100644 --- a/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py +++ b/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py @@ -41,7 +41,8 @@ def handle(self, *args, **options): if not self._validate_whether_transfer_can_be_fixed(transfer): if verbosity: self.stdout.write( - f'Project `{transfer.asset}` transfer cannot be fixed automatically' + f'Project `{transfer.asset}` transfer cannot be fixed' + f' automatically' ) continue @@ -56,9 +57,7 @@ def handle(self, *args, **options): move_attachments(transfer) move_media_files(transfer) if verbosity: - self.stdout.write( - f'\tDone!' - ) + self.stdout.write('\tDone!') usernames.add(transfer.invite.recipient.username) # Update attachment storage bytes counters diff --git a/kobo/apps/project_ownership/models/transfer.py b/kobo/apps/project_ownership/models/transfer.py index 6c97ddfd2f..23c90b0098 100644 --- a/kobo/apps/project_ownership/models/transfer.py +++ b/kobo/apps/project_ownership/models/transfer.py @@ -1,6 +1,5 @@ from __future__ import annotations -import time from datetime import timedelta from typing import Optional, Union @@ -102,6 +101,7 @@ def transfer_project(self): success = False try: if not self.asset.has_deployment: + _rewrite_mongo = False with transaction.atomic(): self._reassign_project_permissions(update_deployment=False) self._sent_in_app_messages() @@ -115,7 +115,7 @@ def transfer_project(self): status=TransferStatusChoices.SUCCESS ) else: - _rewrite_mongo = False + _rewrite_mongo = True with transaction.atomic(): with kc_transaction_atomic(): deployment = self.asset.deployment @@ -131,12 +131,10 @@ def transfer_project(self): deployment.rename_enketo_id_key(previous_owner_username) self._sent_in_app_messages() - _rewrite_mongo = True - - # Do not delegate anything to Celery before the transaction has - # been validated. Otherwise, Celery could fetch outdated data. - transaction.on_commit(lambda: self._start_async_jobs(_rewrite_mongo)) + # Do not delegate anything to Celery before the transaction has + # been validated. Otherwise, Celery could fetch outdated data. + transaction.on_commit(lambda: self._start_async_jobs(_rewrite_mongo)) success = True finally: if not success: diff --git a/kobo/apps/project_ownership/tests/api/v2/test_api.py b/kobo/apps/project_ownership/tests/api/v2/test_api.py index 58f708f1de..f8a4027772 100644 --- a/kobo/apps/project_ownership/tests/api/v2/test_api.py +++ b/kobo/apps/project_ownership/tests/api/v2/test_api.py @@ -1,15 +1,12 @@ import uuid from constance.test import override_config -from datetime import timedelta -from dateutil.parser import isoparse from django.conf import settings from django.contrib.auth import get_user_model from django.utils import timezone from mock import patch, MagicMock from rest_framework import status from rest_framework.reverse import reverse -from unittest.mock import ANY from kobo.apps.project_ownership.models import ( Invite, @@ -18,11 +15,11 @@ ) from kobo.apps.project_ownership.tests.utils import MockServiceUsageSerializer from kobo.apps.trackers.utils import update_nlp_counter - from kpi.constants import PERM_VIEW_ASSET from kpi.models import Asset from kpi.tests.base_test_case import BaseAssetTestCase from kpi.tests.kpi_test_case import KpiTestCase +from kpi.tests.utils.transaction import immediate_on_commit from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE @@ -432,7 +429,7 @@ def test_account_usage_transferred_to_new_user(self): response = self.client.get(service_usage_url) assert response.data == expected_empty_data - # Transfer project from someuser to anotheruser + # Transfer the project from someuser to anotheruser self.client.login(username='someuser', password='someuser') payload = { 'recipient': self.absolute_reverse( @@ -445,9 +442,10 @@ def test_account_usage_transferred_to_new_user(self): 'kpi.deployment_backends.backends.MockDeploymentBackend.xform', MagicMock(), ): - response = self.client.post( - self.invite_url, data=payload, format='json' - ) + with immediate_on_commit(): + response = self.client.post( + self.invite_url, data=payload, format='json' + ) assert response.status_code == status.HTTP_201_CREATED # someuser should have no usage reported anymore @@ -495,7 +493,7 @@ def test_data_accessible_to_new_user(self): ) == 0 ) - # Transfer project from someuser to anotheruser + # Transfer the project from someuser to anotheruser self.client.login(username='someuser', password='someuser') payload = { 'recipient': self.absolute_reverse( @@ -508,9 +506,10 @@ def test_data_accessible_to_new_user(self): 'kpi.deployment_backends.backends.MockDeploymentBackend.xform', MagicMock(), ): - response = self.client.post( - self.invite_url, data=payload, format='json' - ) + with immediate_on_commit(): + response = self.client.post( + self.invite_url, data=payload, format='json' + ) assert response.status_code == status.HTTP_201_CREATED # anotheruser is the owner and should see the project diff --git a/kobo/apps/project_ownership/tests/test_transfer_status.py b/kobo/apps/project_ownership/tests/test_transfer_status.py index 11985bdc2c..2853083e66 100644 --- a/kobo/apps/project_ownership/tests/test_transfer_status.py +++ b/kobo/apps/project_ownership/tests/test_transfer_status.py @@ -2,6 +2,7 @@ from django.test import TestCase from kpi.models import Asset +from kpi.tests.utils.transaction import immediate_on_commit from ..models import ( Invite, InviteStatusChoices, @@ -105,9 +106,11 @@ def test_calculated_failed_transfer_status(self): assert self.invite.status == InviteStatusChoices.FAILED def test_draft_project_transfer(self): - # when project is a draft, there are no celery tasks called to move + # When a project is a draft, there are no celery tasks called to move # submissions (and related attachments). - self.transfer.transfer_project() + with immediate_on_commit(): + self.transfer.transfer_project() + assert self.transfer.status == TransferStatusChoices.SUCCESS # However, the status of each async task should still be updated to diff --git a/kpi/tests/utils/transaction.py b/kpi/tests/utils/transaction.py new file mode 100644 index 0000000000..7d30838425 --- /dev/null +++ b/kpi/tests/utils/transaction.py @@ -0,0 +1,25 @@ +from contextlib import contextmanager +from unittest import mock + +from django.contrib.auth.management import DEFAULT_DB_ALIAS + + +@contextmanager +def immediate_on_commit(using=None): + """ + Context manager executing transaction.on_commit() hooks immediately as + if the connection was in auto-commit mode. This is required when + using a subclass of django.test.TestCase as all tests are wrapped in + a transaction that never gets committed. + + Source: https://code.djangoproject.com/ticket/30457#comment:1 + """ + immediate_using = DEFAULT_DB_ALIAS if using is None else using + + def on_commit(func, using=None): + using = DEFAULT_DB_ALIAS if using is None else using + if using == immediate_using: + func() + + with mock.patch('django.db.transaction.on_commit', side_effect=on_commit) as patch: + yield patch From 65d81b6ce6d7431909da9c66490fd47d618fa4ab Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 3 Oct 2024 10:55:57 -0400 Subject: [PATCH 110/119] Apply requested changes --- .../openrosa/apps/logger/models/instance.py | 9 +- .../stripe/tests/test_organization_usage.py | 3 +- .../tests/test_submission_stream.py | 3 - kobo/apps/trackers/submission_utils.py | 8 +- kobo/settings/base.py | 1 + kpi/deployment_backends/mock_backend.py | 18 +- kpi/deployment_backends/openrosa_backend.py | 14 +- kpi/tests/api/v1/test_api_submissions.py | 4 +- .../api/v2/test_api_asset_export_settings.py | 15 +- kpi/tests/api/v2/test_api_assets.py | 13 +- kpi/tests/api/v2/test_api_submissions.py | 164 +++++++++++------- kpi/tests/test_mongo_helper.py | 3 +- kpi/tests/test_utils.py | 141 ++++++++++++++- kpi/tests/utils/dicts.py | 92 +++++++--- kpi/views/v2/data.py | 2 +- 15 files changed, 362 insertions(+), 128 deletions(-) diff --git a/kobo/apps/openrosa/apps/logger/models/instance.py b/kobo/apps/openrosa/apps/logger/models/instance.py index 79a99d72b9..31a889856f 100644 --- a/kobo/apps/openrosa/apps/logger/models/instance.py +++ b/kobo/apps/openrosa/apps/logger/models/instance.py @@ -134,13 +134,10 @@ def check_active(self, force): if self.xform and not self.xform.downloadable: raise FormInactiveError() - # FIXME Access `self.xform.user.profile` directly could raise a - # `RelatedObjectDoesNotExist` error if profile does not exist even if - # wrapped in try/except UserProfile = apps.get_model('main', 'UserProfile') # noqa - Avoid circular imports - if profile := UserProfile.objects.filter(user=self.xform.user).first(): - if profile.metadata.get('submissions_suspended', False): - raise TemporarilyUnavailableError() + profile, created = UserProfile.objects.get_or_create(user=self.xform.user) + if not created and profile.metadata.get('submissions_suspended', False): + raise TemporarilyUnavailableError() return def _set_geom(self): diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 5cc13d1e52..9ee9326048 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -1,5 +1,4 @@ import timeit -import itertools import pytest from django.core.cache import cache @@ -49,7 +48,7 @@ def setUpTestData(cls): users = baker.make( User, - username=itertools.cycle(cls.names), + username=iter(cls.names), _quantity=cls.user_count - 1, _bulk_create=True, ) diff --git a/kobo/apps/subsequences/tests/test_submission_stream.py b/kobo/apps/subsequences/tests/test_submission_stream.py index dc6f770873..e915f79e72 100644 --- a/kobo/apps/subsequences/tests/test_submission_stream.py +++ b/kobo/apps/subsequences/tests/test_submission_stream.py @@ -281,6 +281,3 @@ def test_stream_with_extras_handles_duplicated_submission_uuids(self): for v in qual_response['val']: assert isinstance(v['uuid'], str) - - ## Clear all mocked submissions to avoid duplicate submission errors - #self.asset.deployment.mock_submissions([]) diff --git a/kobo/apps/trackers/submission_utils.py b/kobo/apps/trackers/submission_utils.py index 0294318d96..b388deebe1 100644 --- a/kobo/apps/trackers/submission_utils.py +++ b/kobo/apps/trackers/submission_utils.py @@ -4,13 +4,8 @@ import uuid from django.conf import settings -from django.utils import timezone from model_bakery import baker -from kobo.apps.openrosa.apps.logger.models import ( - DailyXFormSubmissionCounter, - XForm, -) from kpi.models import Asset from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE @@ -48,7 +43,7 @@ def _get_uid(count): owner=user, asset_type='survey', name='test', - uid=itertools.cycle(_get_uid(assets_per_user)), + uid=iter(_get_uid(assets_per_user)), _quantity=assets_per_user, ) @@ -105,6 +100,5 @@ def add_mock_submissions(assets: list, submissions_per_asset: int = 1): asset.deployment.mock_submissions(asset_submissions) all_submissions = all_submissions + asset_submissions - # update_xform_counters(asset, submissions=submissions_per_asset) return all_submissions diff --git a/kobo/settings/base.py b/kobo/settings/base.py index 812c5de440..8683ffaad5 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -1722,6 +1722,7 @@ def dj_stripe_request_callback_method(): 'video/webm', 'audio/aac', 'audio/aacp', + 'audio/3gpp', 'audio/flac', 'audio/mp3', 'audio/mp4', diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 4dfa0b8e28..139d11ca57 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -15,7 +15,7 @@ safe_create_instance, ) from kpi.constants import PERM_ADD_SUBMISSIONS, SUBMISSION_FORMAT_TYPE_JSON -from kpi.tests.utils.dicts import nested_dict_from_keys +from kpi.tests.utils.dicts import convert_hierarchical_keys_to_nested_dict from .openrosa_backend import OpenRosaDeploymentBackend from ..utils.files import ExtendedContentFile @@ -56,6 +56,20 @@ def mock_submissions( Read test data and convert it to proper XML to be saved as a real Instance object. + + 1. Each item in the iterable submissions must be a dictionary following + the format of the JSON returned by the data API. + 2. The submissions are mutated to include submission and attachments + PKs (relatively `_id`, and `_attachments[index]['id']`) after being + saved in the database. + 3. If `_submitted_by` is present in a submission, the submission is made + by the user identified there, even if that user must (temporarily) be + granted permission to submit to `self.asset`. + 4. `meta/instanceID` is added to any submission where it's missing if + `create_uuids` is `True`. + 5. If `_submission_time` is present in the submission, it is preserved by + overriding the normal logic that populates this field with the current + timestamp at the moment of submission. """ class FakeRequest: @@ -65,7 +79,7 @@ class FakeRequest: owner_username = self.asset.owner.username for submission in submissions: - sub_copy = nested_dict_from_keys(submission) + sub_copy = convert_hierarchical_keys_to_nested_dict(submission) if create_uuids: if 'formhub/uuid' not in submission: diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 59faeb2662..11d16852f0 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -277,12 +277,11 @@ def duplicate_submission( ) -> dict: """ Duplicates a single submission. The submission with the given - `submission_id` is duplicated and the `start`, `end` and + `submission_id` is duplicated, and the `start`, `end` and `instanceID` parameters of the submission are reset before being - saving the instance. + saved to the instance. - Returns a dict with uuid of created - submission if successful + Returns the duplicated submission (if successful) """ user = request.user @@ -329,6 +328,8 @@ def duplicate_submission( ) # TODO Handle errors returned by safe_create_instance + # (safe_)create_instance uses `username` argument to identify the XForm object + # (when nothing else worked). `_submitted_by` is populated by `request.user` error, instance = safe_create_instance( username=self.asset.owner.username, xml_file=ContentFile(xml_tostring(xml_parsed)), @@ -337,7 +338,6 @@ def duplicate_submission( request=request, ) - # Cast to list to help unit tests to pass. return self._rewrite_json_attachment_urls( self.get_submission(user=user, submission_id=instance.pk), request ) @@ -408,6 +408,8 @@ def edit_submission( ) # TODO Handle errors returned by safe_create_instance + # (safe_)create_instance uses `username` argument to identify the XForm object + # (when nothing else worked). `_submitted_by` is populated by `request.user` safe_create_instance( username=user.username, xml_file=xml_submission_file, @@ -1140,6 +1142,8 @@ def store_submission( media_file for media_file in attachments.values() ) + # (safe_)create_instance uses `username` argument to identify the XForm object + # (when nothing else worked). `_submitted_by` is populated by `request.user` return safe_create_instance( username=self.asset.owner.username, xml_file=ContentFile(xml_submission), diff --git a/kpi/tests/api/v1/test_api_submissions.py b/kpi/tests/api/v1/test_api_submissions.py index 509ed0365b..875d827981 100644 --- a/kpi/tests/api/v1/test_api_submissions.py +++ b/kpi/tests/api/v1/test_api_submissions.py @@ -34,7 +34,7 @@ def test_list_submissions_as_owner(self): def test_list_submissions_shared_as_anotheruser(self): self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.submission_list_url, {"format": "json"}) self.assertEqual(response.status_code, status.HTTP_200_OK) expected_ids = [s['_id'] for s in self.submissions] @@ -115,7 +115,7 @@ def test_delete_submission_as_owner(self): def test_delete_submission_shared_as_anotheruser(self): self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) submission = self.submissions_submitted_by_someuser[0] url = reverse( self._get_endpoint('submission-detail'), diff --git a/kpi/tests/api/v2/test_api_asset_export_settings.py b/kpi/tests/api/v2/test_api_asset_export_settings.py index 54a32fb20e..d673d925cd 100644 --- a/kpi/tests/api/v2/test_api_asset_export_settings.py +++ b/kpi/tests/api/v2/test_api_asset_export_settings.py @@ -53,13 +53,6 @@ def setUp(self): 'type': 'csv', } - def _log_in_as_another_user(self): - """ - Helper to switch user from `someuser` to `anotheruser`. - """ - self.client.logout() - self.client.login(username='anotheruser', password='anotheruser') - def _create_foo_export_settings(self, name=None): if name is None: name = self.name @@ -225,14 +218,14 @@ def test_api_list_asset_export_settings_without_perms(self): # assign `view_asset` to anotheruser self.asset.assign_perm(self.anotheruser, PERM_VIEW_ASSET) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.export_settings_list_url) assert response.status_code == status.HTTP_404_NOT_FOUND def test_api_list_asset_export_settings_with_perms(self): self._create_foo_export_settings() - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.export_settings_list_url) assert response.status_code == status.HTTP_404_NOT_FOUND @@ -247,14 +240,14 @@ def test_api_detail_asset_export_settings_without_perms(self): export_settings = self._create_foo_export_settings() url = self._get_detail_url(export_settings.uid) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(url) assert response.status_code == status.HTTP_404_NOT_FOUND def test_api_detail_asset_export_settings_shared_with_manage_asset_perms(self): export_settings = self._create_foo_export_settings() url = self._get_detail_url(export_settings.uid) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) # assign `view_asset` to anotheruser so that they can see the asset but # not the export settings diff --git a/kpi/tests/api/v2/test_api_assets.py b/kpi/tests/api/v2/test_api_assets.py index 230d354fa9..5e3302b9de 100644 --- a/kpi/tests/api/v2/test_api_assets.py +++ b/kpi/tests/api/v2/test_api_assets.py @@ -300,6 +300,7 @@ class AssetProjectViewListApiTests(BaseAssetTestCase): def setUp(self): self.client.login(username='someuser', password='someuser') + self.anotheruser = User.objects.get(username='anotheruser') self.asset_list_url = reverse(self._get_endpoint('asset-list')) self.region_views_url = reverse(self._get_endpoint('projectview-list')) asset_country_settings = [ @@ -379,7 +380,7 @@ def test_regional_views_list(self): ['Overview', 'Test view 1'] ) - self._login_as_anotheruser() + self.client.force_login(self.anotheruser) res = self.client.get(self.region_views_url) data = res.json() # anotheruser should only see view 1 and 2 @@ -413,7 +414,7 @@ def test_project_views_for_someuser(self): assert asset_countries & region_for_view def test_project_views_anotheruser_submission_count(self): - self._login_as_anotheruser() + self.client.force_login(self.anotheruser) for asset in Asset.objects.all(): if asset.has_deployment: submissions = [ @@ -443,7 +444,7 @@ def test_project_views_anotheruser_submission_count(self): assert asset_detail_response.data['deployment__submission_count'] == 1 def test_project_views_for_anotheruser(self): - self._login_as_anotheruser() + self.client.force_login(self.anotheruser) res = self.client.get(self.region_views_url) data = res.json() results = data['results'] @@ -485,7 +486,7 @@ def test_project_views_for_someuser_can_view_submissions(self): assert data_res.status_code == status.HTTP_200_OK def test_project_views_for_anotheruser_can_view_asset_detail(self): - self._login_as_anotheruser() + self.client.force_login(self.anotheruser) user = User.objects.get(username='anotheruser') res = self.client.get(self.region_views_url) data = res.json() @@ -511,7 +512,7 @@ def test_project_views_for_anotheruser_can_view_all_asset_permission_assignments self, ): # get the first asset from the first project view - self._login_as_anotheruser() + self.client.force_login(self.anotheruser) anotheruser = User.objects.get(username='anotheruser') proj_view_list = self.client.get(self.region_views_url).data['results'] first_proj_view = proj_view_list[0] @@ -593,7 +594,7 @@ def test_project_views_for_anotheruser_can_preview_form(self): assert snap_response.status_code == status.HTTP_200_OK def test_project_views_for_anotheruser_can_change_metadata(self): - self._login_as_anotheruser() + self.client.force_login(self.anotheruser) res = self.client.get(self.region_views_url) data = res.json() results = data['results'] diff --git a/kpi/tests/api/v2/test_api_submissions.py b/kpi/tests/api/v2/test_api_submissions.py index c4eb694d2b..7101c92ac3 100644 --- a/kpi/tests/api/v2/test_api_submissions.py +++ b/kpi/tests/api/v2/test_api_submissions.py @@ -20,6 +20,7 @@ from rest_framework import status from kobo.apps.audit_log.models import AuditLog +from kobo.apps.openrosa.apps.logger.models.instance import Instance from kobo.apps.openrosa.apps.main.models.user_profile import UserProfile from kobo.apps.openrosa.libs.utils.logger_tools import dict2xform from kobo.apps.kobo_auth.shortcuts import User @@ -123,13 +124,6 @@ def _add_submissions(self, other_fields: dict = None): self.submissions_submitted_by_anotheruser = submissions[4:6] self.submissions = submissions - def _log_in_as_another_user(self): - """ - Helper to switch user from `someuser` to `anotheruser`. - """ - self.client.logout() - self.client.login(username='anotheruser', password='anotheruser') - class BulkDeleteSubmissionsApiTests(BaseSubmissionTestCase): @@ -211,7 +205,7 @@ def test_delete_not_shared_submissions_as_anotheruser(self): anotheruser cannot view someuser's data, therefore cannot delete it. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) data = {'payload': {'confirm': True}} response = self.client.delete(self.submission_bulk_url, data=data, @@ -226,7 +220,7 @@ def test_delete_shared_submissions_as_anotheruser(self): """ self.asset.assign_perm(self.anotheruser, PERM_DELETE_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) data = {'payload': {'confirm': True}} response = self.client.delete(self.submission_bulk_url, @@ -245,7 +239,7 @@ def test_delete_all_allowed_submissions_with_partial_perms_as_anotheruser(self): Test that anotheruser can delete all their data at once and if they do, only delete their data. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_DELETE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } @@ -297,7 +291,7 @@ def test_delete_some_allowed_submissions_with_partial_perms_as_anotheruser(self) Test that anotheruser can delete part of their data """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_DELETE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } @@ -344,7 +338,7 @@ def test_cannot_delete_view_only_submissions_with_partial_perms_as_anotheruser(s Test that anotheruser cannot delete someuser's data """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_VIEW_SUBMISSIONS: [{'_submitted_by': 'someuser'}], PERM_DELETE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] # view_submission is implied @@ -417,7 +411,7 @@ def test_cannot_create_submission(self): # Shared self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.post(self.submission_list_url, data=submission) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -506,7 +500,7 @@ def test_list_submissions_not_shared_as_anotheruser(self): anotheruser cannot view someuser's data. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.submission_list_url, {"format": "json"}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -516,7 +510,7 @@ def test_list_submissions_shared_as_anotheruser(self): anotheruser has view access on someuser's data. They can view all """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.submission_list_url, {"format": "json"}) self.assertEqual(response.status_code, status.HTTP_200_OK) response_ids = [r['_id'] for r in response.data.get('results')] @@ -529,7 +523,7 @@ def test_list_submissions_with_partial_permissions_as_anotheruser(self): anotheruser has partial view access on someuser's project. They can view only the data they submitted to someuser's project. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_VIEW_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } @@ -582,7 +576,7 @@ def test_list_submissions_asset_publicly_shared_as_authenticated_user(self): """ anonymous_user = get_anonymous_user() - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) # Give the user who will access the public data--without any explicit # permission assignment--their own asset. This is needed to expose a @@ -604,7 +598,7 @@ def test_list_submissions_asset_publicly_shared_and_shared_with_user_as_anotheru unable to view submission data. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) anonymous_user = get_anonymous_user() assert not self.asset.has_perm(self.anotheruser, PERM_VIEW_ASSET) @@ -651,14 +645,17 @@ def test_list_query_elem_match(self): question = 'q3' submission[group] = [ { - f'{question}': 'whap.gif', + f'{group}/{question}': 'whap.gif', }, + { + f'{group}/{question}': 'whop.gif', + } ] + self.asset.deployment.mock_submissions([submission]) - # FIXME with attachments data = { - 'query': f'{{"{group}/{question}":{{"$exists":true}}}}', + 'query': f'{{"{group}":{{"$elemMatch":{{"{group}/{question}":{{"$exists":true}}}}}}}}', 'format': 'json', } response = self.client.get(self.submission_list_url, data) @@ -710,7 +707,7 @@ def test_retrieve_submission_not_shared_as_anotheruser(self): anotheruser has no access to someuser's data someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), @@ -728,7 +725,7 @@ def test_retrieve_submission_shared_as_anotheruser(self): anotheruser has view access to someuser's data. """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), @@ -747,7 +744,7 @@ def test_retrieve_submission_with_partial_permissions_as_anotheruser(self): anotheruser has partial view access to someuser's data. They can only see their own data. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_VIEW_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } @@ -866,7 +863,7 @@ def test_delete_submission_not_shared_as_anotheruser(self): anotheruser cannot view someuser's data, therefore they cannot delete it. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), @@ -886,7 +883,7 @@ def test_delete_submission_shared_as_anotheruser(self): anotheruser can view someuser's data but they cannot delete it. """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) submission = self.submissions_submitted_by_unknownuser[0] url = reverse( self._get_endpoint('submission-detail'), @@ -917,7 +914,7 @@ def test_delete_submission_with_partial_perms_as_anotheruser(self): anotheruser has partial access to someuser's data. anotheruser can only view/delete their data. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_DELETE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } @@ -1081,7 +1078,7 @@ def test_attachments_rewrite(self): asset.deployment.mock_submissions([submission]) asset.deployment.set_namespace(self.URL_NAMESPACE) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) url = reverse( self._get_endpoint('submission-detail'), kwargs={ @@ -1100,16 +1097,22 @@ def test_attachments_rewrite(self): 'group_ec9yq67/group_dq8as25[1]/group_xt0za80[2]/my_attachment', 'group_ec9yq67/group_dq8as25[2]/group_xt0za80[1]/my_attachment' ] + + submission_id = submission['_id'] + attachment_0_id = submission['_attachments'][0]['id'] + attachment_1_id = submission['_attachments'][1]['id'] + attachment_2_id = submission['_attachments'][2]['id'] + expected_new_download_urls = [ 'http://testserver/api/v2/assets/' + asset.uid - + '/data/1000/attachments/1/?format=json', + + f"/data/{submission_id}/attachments/{attachment_0_id}/?format=json", 'http://testserver/api/v2/assets/' + asset.uid - + '/data/1000/attachments/2/?format=json', + + f"/data/{submission_id}/attachments/{attachment_1_id}/?format=json", 'http://testserver/api/v2/assets/' + asset.uid - + '/data/1000/attachments/3/?format=json', + + f"/data/{submission_id}/attachments/{attachment_2_id}/?format=json", ] for idx, attachment in enumerate(attachments): @@ -1202,7 +1205,7 @@ def test_get_edit_link_submission_not_shared_as_anotheruser(self): anotheruser cannot view the project, therefore cannot edit data. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.submission_url, {'format': 'json'}) assert response.status_code == status.HTTP_404_NOT_FOUND @@ -1213,7 +1216,7 @@ def test_cannot_get_edit_link_submission_shared_with_view_as_anotheruser(self): someuser's data existence should not be revealed. """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.submission_url, {'format': 'json'}) # FIXME if anotheruser has view permissions, they should receive a 403 @@ -1227,7 +1230,7 @@ def test_get_edit_link_submission_shared_with_edit_as_anotheruser(self): anotheruser can retrieve enketo edit link """ self.asset.assign_perm(self.anotheruser, PERM_CHANGE_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) ee_url = ( f'{settings.ENKETO_URL}/{settings.ENKETO_EDIT_INSTANCE_ENDPOINT}' @@ -1250,7 +1253,7 @@ def test_get_edit_link_with_partial_perms_as_anotheruser(self): anotheruser has partial permissions on someuser's data anotheruser can only view/edit their own data """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_CHANGE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } @@ -1689,7 +1692,7 @@ def test_cannot_get_view_link_submission_not_shared_as_anotheruser(self): anotheruser cannot view the project, therefore cannot retrieve enketo link. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.submission_view_link_url, {'format': 'json'}) assert response.status_code == status.HTTP_404_NOT_FOUND @@ -1701,7 +1704,7 @@ def test_get_view_link_submission_shared_with_view_only_as_anotheruser(self): anotheruser can retrieve enketo view link. """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) ee_url = ( f'{settings.ENKETO_URL}/{settings.ENKETO_VIEW_INSTANCE_ENDPOINT}' @@ -1723,7 +1726,7 @@ def test_get_view_link_with_partial_perms_as_anotheruser(self): anotheruser has partial view permissions on someuser's data anotheruser can only view their own data """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_VIEW_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } @@ -1849,6 +1852,7 @@ def test_duplicate_submission_as_owner_allowed(self): someuser is the owner of the project. someuser is allowed to duplicate their own data """ + print('URL :', self.submission_url, flush=True) response = self.client.post(self.submission_url, {'format': 'json'}) assert response.status_code == status.HTTP_201_CREATED self._check_duplicate(response) @@ -1862,6 +1866,23 @@ def test_duplicate_submission_with_xml_encoding(self): assert submission_xml.startswith( '' ) + breakpoint() + self.test_duplicate_submission_as_owner_allowed() + + def test_duplicate_submission_without_xml_encoding(self): + submission_xml = self.asset.deployment.get_submissions( + user=self.asset.owner, + format_type=SUBMISSION_FORMAT_TYPE_XML, + submission_ids=[self.submission['_id']], + )[0] + assert submission_xml.startswith( + '' + ) + Instance.objects.filter(pk=self.submission['_id']).update( + xml=submission_xml.replace( + '', '' + ) + ) self.test_duplicate_submission_as_owner_allowed() def test_duplicate_submission_as_anotheruser_not_allowed(self): @@ -1871,7 +1892,7 @@ def test_duplicate_submission_as_anotheruser_not_allowed(self): anotheruser has no access to someuser's data and someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.post(self.submission_url, {'format': 'json'}) assert response.status_code == status.HTTP_404_NOT_FOUND @@ -1894,7 +1915,7 @@ def test_cannot_duplicate_submission_as_anotheruser_with_view_perm(self): edit/duplicate someuser's data. """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.post(self.submission_url, {'format': 'json'}) assert response.status_code == status.HTTP_403_FORBIDDEN @@ -1906,7 +1927,7 @@ def test_duplicate_submission_as_anotheruser_with_change_perm_allowed(self): someuser's data. """ self.asset.assign_perm(self.anotheruser, PERM_CHANGE_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.post(self.submission_url, {'format': 'json'}) assert response.status_code == status.HTTP_201_CREATED self._check_duplicate(response) @@ -1921,7 +1942,7 @@ def test_cannot_duplicate_submission_as_anotheruser_with_view_add_perms(self): """ for perm in [PERM_VIEW_SUBMISSIONS, PERM_ADD_SUBMISSIONS]: self.asset.assign_perm(self.anotheruser, perm) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.post(self.submission_url, {'format': 'json'}) assert response.status_code == status.HTTP_403_FORBIDDEN @@ -1932,7 +1953,7 @@ def test_duplicate_submission_as_anotheruser_with_partial_perms(self): anotheruser has partial change submissions permissions. They can edit/duplicate their own data only. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_CHANGE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] @@ -2063,7 +2084,7 @@ def test_cannot_bulk_update_submissions_as_anotheruser(self): anotheruser cannot access someuser's data. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.patch( self.submission_url, data=self.submitted_payload, format='json' ) @@ -2090,7 +2111,7 @@ def test_cannot_bulk_update_submissions_as_anotheruser_with_view_perm(self): update someuser's data """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.patch( self.submission_url, data=self.submitted_payload, format='json' ) @@ -2103,7 +2124,7 @@ def test_bulk_update_submissions_as_anotheruser_with_change_perm(self): anotheruser can edit view someuser's data """ self.asset.assign_perm(self.anotheruser, PERM_CHANGE_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.patch( self.submission_url, data=self.submitted_payload, format='json' ) @@ -2116,7 +2137,7 @@ def test_bulk_update_submissions_as_anotheruser_with_partial_perms(self): The project is partially shared with anotheruser anotheruser can only edit their own data. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) # Allow anotheruser to update their own data partial_perms = { @@ -2178,7 +2199,7 @@ def test_cannot_retrieve_status_of_not_shared_submission_as_anotheruser(self): anotheruser has no access to someuser's data. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.validation_status_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -2190,7 +2211,7 @@ def test_retrieve_status_of_shared_submission_as_anotheruser(self): anotheruser can view validation status of submissions. """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.get(self.validation_status_url) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, {}) @@ -2227,7 +2248,7 @@ def test_cannot_delete_status_of_not_shared_submission_as_anotheruser(self): validation status. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.delete(self.validation_status_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -2239,7 +2260,7 @@ def test_delete_status_of_shared_submission_as_anotheruser(self): anotheruser can delete validation status of the project. """ self.asset.assign_perm(self.anotheruser, PERM_VALIDATE_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.delete(self.validation_status_url) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) @@ -2284,7 +2305,7 @@ def test_cannot_edit_status_of_not_shared_submission_as_anotheruser(self): validate them. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) response = self.client.patch(self.validation_status_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -2296,7 +2317,7 @@ def test_edit_status_of_shared_submission_as_anotheruser(self): anotheruser can edit validation status of the project. """ self.asset.assign_perm(self.anotheruser, PERM_VALIDATE_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) data = { 'validation_status.uid': 'validation_status_not_approved' } @@ -2327,7 +2348,7 @@ def test_edit_status_with_partial_perms_as_anotheruser(self): anotheruser has partial access to someuser's data. anotheruser can only view and validate their data. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_VALIDATE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } @@ -2386,6 +2407,17 @@ def setUp(self): kwargs={'parent_lookup_asset': self.asset.uid, 'format': 'json'}, ) + # Ensure all submissions have no validation status + response = self.client.get( + self.submission_list_url, format='json' + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + emptied = [ + not s['_validation_status'] + for s in response.data['results'] + ] + self.assertTrue(all(emptied)) + # Make the owner change validation status of all submissions data = { 'payload': { @@ -2398,6 +2430,18 @@ def setUp(self): ) self.assertEqual(response.status_code, status.HTTP_200_OK) + def test_all_validation_statuses_applied(self): + # ensure all submissions are not approved + response = self.client.get( + self.submission_list_url, format='json' + ) + self.assertEqual(response.status_code, status.HTTP_200_OK) + applied = [ + s['_validation_status']['uid'] == 'validation_status_not_approved' + for s in response.data['results'] + ] + self.assertTrue(all(applied)) + def test_delete_all_status_as_owner(self): """ someuser is the owner of the project. @@ -2469,7 +2513,7 @@ def test_delete_status_of_not_shared_submissions_as_anotheruser(self): bulk delete the validation status of them. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) data = { 'payload': { 'validation_status.uid': None, @@ -2491,7 +2535,7 @@ def test_delete_status_of_shared_submissions_as_anotheruser(self): """ self.asset.assign_perm(self.anotheruser, PERM_VALIDATE_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) data = { 'payload': { 'validation_status.uid': None, @@ -2616,7 +2660,7 @@ def test_cannot_edit_submission_validation_statuses_not_shared_as_anotheruser(se bulk edit the validation status of them. someuser's data existence should not be revealed. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) data = { 'payload': { 'validation_status.uid': 'validation_status_approved', @@ -2638,7 +2682,7 @@ def test_edit_submission_validation_statuses_as_anotheruser(self): at once. """ self.asset.assign_perm(self.anotheruser, PERM_VALIDATE_SUBMISSIONS) - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) data = { 'payload': { 'validation_status.uid': 'validation_status_approved', @@ -2693,7 +2737,7 @@ def test_edit_all_submission_validation_statuses_with_partial_perms_as_anotherus `confirm=true` must be sent when the request alters all their submissions at once. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_VALIDATE_SUBMISSIONS: [ {'_submitted_by': 'anotheruser'}] @@ -2747,7 +2791,7 @@ def test_edit_some_submission_validation_statuses_with_partial_perms_as_anotheru The project is partially shared with anotheruser. anotheruser can only validate their own data. """ - self._log_in_as_another_user() + self.client.force_login(self.anotheruser) partial_perms = { PERM_VALIDATE_SUBMISSIONS: [ {'_submitted_by': 'anotheruser'}] diff --git a/kpi/tests/test_mongo_helper.py b/kpi/tests/test_mongo_helper.py index 48a1192b9a..538cfaf095 100644 --- a/kpi/tests/test_mongo_helper.py +++ b/kpi/tests/test_mongo_helper.py @@ -1,7 +1,6 @@ from __future__ import annotations import copy -import itertools from django.conf import settings from django.test import TestCase @@ -27,7 +26,7 @@ def test_get_instances(self): names = ('bob', 'alice') users = baker.make( settings.AUTH_USER_MODEL, - username=itertools.cycle(names), + username=iter(names), _quantity=2, ) assets = [] diff --git a/kpi/tests/test_utils.py b/kpi/tests/test_utils.py index 0ce45b8403..65a1092eb4 100644 --- a/kpi/tests/test_utils.py +++ b/kpi/tests/test_utils.py @@ -1,4 +1,3 @@ -# coding: utf-8 import os import re from copy import deepcopy @@ -12,6 +11,7 @@ SearchQueryTooShortException, QueryParserNotSupportedFieldLookup, ) +from kpi.tests.utils.dicts import convert_hierarchical_keys_to_nested_dict from kpi.utils.autoname import autoname_fields, autoname_fields_to_field from kpi.utils.autoname import autovalue_choices_in_place from kpi.utils.pyxform_compatibility import allow_choice_duplicates @@ -26,6 +26,145 @@ ) +class ConvertHierarchicalKeysToNestedDictTestCase(TestCase): + + def test_regular_group(self): + dict_ = { + 'group_lx4sf58/question_1': 'answer_1', + 'group_lx4sf58/question_2': 'answer_2' + } + + expected = { + 'group_lx4sf58': { + 'question_1': 'answer_1', + 'question_2': 'answer_2' + } + } + + assert convert_hierarchical_keys_to_nested_dict(dict_) == expected + + def test_nested_groups(self): + dict_ = { + 'parent_group/middle_group/inner_group/question_1': 'answer_1' + } + + expected = { + 'parent_group': { + 'middle_group': { + 'inner_group': { + 'question_1': 'answer_1' + } + } + } + } + + assert convert_hierarchical_keys_to_nested_dict(dict_) == expected + + def test_nested_repeated_groups(self): + dict_ = { + 'formhub/uuid': '61b5029a4d2e42b49a12b9a18c22449f', + 'group_lq3wx73': [ + { + 'group_lq3wx73/middle_group': [ + { + 'group_lq3wx73/middle_group/middle_q': 'middle 1.1.1.1', + 'group_lq3wx73/middle_group/inner_group': [ + { + 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner 1.1.1.1' + }, + { + 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner 1.1.1.2' + }, + ], + }, + { + 'group_lq3wx73/middle_group/middle_q': 'middle 1.1.2.1', + 'group_lq3wx73/middle_group/inner_group': [ + { + 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner 1.1.2.1' + }, + { + 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner 1.1.2.1' + }, + ], + }, + ] + }, + { + 'group_lq3wx73/middle_group': [ + { + 'group_lq3wx73/middle_group/middle_q': 'middle 1.2.1.1', + 'group_lq3wx73/middle_group/inner_group': [ + { + 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner_q 1.2.1.1' + } + ], + } + ] + }, + ], + } + + expected = { + 'formhub': {'uuid': '61b5029a4d2e42b49a12b9a18c22449f'}, + 'group_lq3wx73': [ + { + 'middle_group': [ + { + 'middle_q': 'middle 1.1.1.1', + 'inner_group': [ + {'inner_q': 'inner 1.1.1.1'}, + {'inner_q': 'inner 1.1.1.2'}, + ], + }, + { + 'middle_q': 'middle 1.1.2.1', + 'inner_group': [ + {'inner_q': 'inner 1.1.2.1'}, + {'inner_q': 'inner 1.1.2.1'}, + ], + }, + ] + }, + { + 'middle_group': [ + { + 'middle_q': 'middle 1.2.1.1', + 'inner_group': [ + {'inner_q': 'inner_q 1.2.1.1'} + ], + } + ] + }, + ], + } + assert convert_hierarchical_keys_to_nested_dict(dict_) == expected + + def test_nested_repeated_groups_in_group(self): + dict_ = { + 'people/person': [ + { + 'people/person/name': 'Julius Caesar', + 'people/person/age': 55, + }, + { + 'people/person/name': 'Augustus', + 'people/person/age': 75, + }, + ], + } + + expected = { + 'people': { + 'person': [ + {'name': 'Julius Caesar', 'age': 55}, + {'name': 'Augustus', 'age': 75} + ] + } + } + assert convert_hierarchical_keys_to_nested_dict(dict_) == expected + + class UtilsTestCase(TestCase): def test_sluggify(self): diff --git a/kpi/tests/utils/dicts.py b/kpi/tests/utils/dicts.py index 3e25ddfe3c..d1e5790405 100644 --- a/kpi/tests/utils/dicts.py +++ b/kpi/tests/utils/dicts.py @@ -1,35 +1,87 @@ from __future__ import annotations -def nested_dict_from_keys(dict_: dict) -> dict: - """ - Transforms a dictionary with keys containing slashes into a nested - dictionary structure. +def convert_hierarchical_keys_to_nested_dict(dict_: dict) -> dict: """ + Converts a dictionary with flat keys containing slashes into a nested dictionary. + This function takes a dictionary where keys represent a hierarchical path, + separated by slashes (e.g., "level1/level2/level3"), and converts it into + a nested dictionary structure. Each part of the key becomes a level in the + resulting dictionary. + """ result = {} for key, value in dict_.items(): + # Split the key to get each level of hierarchy keys = key.split('/') sub_dict = result - for sub_key in keys[:-1]: - if sub_key not in sub_dict: - sub_dict[sub_key] = {} - sub_dict = sub_dict[sub_key] + # Traverse each part of the key except the last one to build the nested structure + # + # Example: + # In keys = ['a', 'b', 'c'], the sub-keys 'a' and 'b' represent intermediate + # levels in the nested dictionary structure, while 'c' is the last part, + # which corresponds to the point where we will actually assign the value + # and the appropriate depth we want. + for part in keys[:-1]: + if part not in sub_dict: + # Create an empty dictionary if the part does not exist + sub_dict[part] = {} + # Move deeper into the current level of the dictionary + sub_dict = sub_dict[part] + + # Handle the final part of the key if isinstance(value, list): - sub_dict[keys[-1]] = [ - { - sub_key.split('/')[-1]: sub_val - for sub_key, sub_val in item.items() - } - for item in value if item - ] + # If the value is a list, make sure the corresponding key exists as a list + if keys[-1] not in sub_dict: + sub_dict[keys[-1]] = [] + + # Iterate over each item in the list + for item in value: + if isinstance(item, dict): + # Clean the dictionary item and append it to the list + sub_dict[keys[-1]].append(_clean_keys(item)) + else: + # Append non-dictionary items directly to the list + sub_dict[keys[-1]].append(item) else: - sub_dict[keys[-1]] = ( - nested_dict_from_keys(value) - if isinstance(value, dict) - else value - ) + # Assign the value directly for non-list items + sub_dict[keys[-1]] = value return result + + +def _clean_keys(dict_: dict) -> dict: + """ + Removes the redundant parent segments from keys in a dictionary, + keeping only relevant parts for hierarchical nesting. + """ + + cleaned_dict = {} + + for key, value in dict_.items(): + # Get the last segment of the key after the last slash (see example + # in `convert_flat_keys_to_nested_dict` for more details). + cleaned_key = key.split('/')[-1] + + # Handle lists of dictionaries recursively + if isinstance(value, list): + cleaned_list = [] + for item in value: + if isinstance(item, dict): + # Recursively clean each dictionary in the list + cleaned_list.append(_clean_keys(item)) + else: + # Append non-dictionary items directly to the cleaned list + cleaned_list.append(item) + # Store the cleaned list under the cleaned key + cleaned_dict[cleaned_key] = cleaned_list + # Handle nested dictionaries recursively + elif isinstance(value, dict): + cleaned_dict[cleaned_key] = _clean_keys(value) + else: + # Assign the value directly if it is not a dictionary or list + cleaned_dict[cleaned_key] = value + + return cleaned_dict diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 6ab9300769..982fec71a3 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -545,7 +545,7 @@ def duplicate(self, request, pk, *args, **kwargs): Creates a duplicate of the submission with a given `pk` """ deployment = self._get_deployment() - # Coerce to int because back end only finds matches with same type + # Coerce to int because the back end only finds matches with the same type submission_id = positive_int(pk) duplicate_response = deployment.duplicate_submission( submission_id=submission_id, request=request From 14d57a27c14dfd4b2b48941b528f42675b3bb1db Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 3 Oct 2024 16:57:31 -0400 Subject: [PATCH 111/119] Remove useless imports --- .../openrosa/apps/logger/tests/test_publish_xls.py | 1 + .../apps/project_ownership/tests/api/v2/test_api.py | 1 - .../tests/test_submission_extras_api_post.py | 1 - kobo/apps/trackers/submission_utils.py | 1 - kpi/permissions.py | 13 ------------- kpi/tests/api/v2/test_api_submissions.py | 1 - 6 files changed, 1 insertion(+), 17 deletions(-) diff --git a/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py b/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py index d5f315fb79..30647fdef6 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py @@ -13,6 +13,7 @@ from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.libs.utils.logger_tools import report_exception + class TestPublishXLS(TestBase): def test_publish_xls(self): diff --git a/kobo/apps/project_ownership/tests/api/v2/test_api.py b/kobo/apps/project_ownership/tests/api/v2/test_api.py index 3420843798..7ae35e988e 100644 --- a/kobo/apps/project_ownership/tests/api/v2/test_api.py +++ b/kobo/apps/project_ownership/tests/api/v2/test_api.py @@ -13,7 +13,6 @@ InviteStatusChoices, Transfer, ) -from kobo.apps.project_ownership.tests.utils import MockServiceUsageSerializer from kobo.apps.trackers.utils import update_nlp_counter from kpi.constants import PERM_VIEW_ASSET diff --git a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py index 1502fcccf5..fcd0c2079d 100644 --- a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py +++ b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py @@ -1,4 +1,3 @@ -import uuid from copy import deepcopy from unittest.mock import patch diff --git a/kobo/apps/trackers/submission_utils.py b/kobo/apps/trackers/submission_utils.py index b388deebe1..f66a09474c 100644 --- a/kobo/apps/trackers/submission_utils.py +++ b/kobo/apps/trackers/submission_utils.py @@ -1,4 +1,3 @@ -import itertools import os import time import uuid diff --git a/kpi/permissions.py b/kpi/permissions.py index 2652b52bbc..4639cebc78 100644 --- a/kpi/permissions.py +++ b/kpi/permissions.py @@ -256,19 +256,6 @@ class AssetEditorSubmissionViewerPermission(AssetNestedObjectPermission): } -class AssetExportSettingsPermission(AssetNestedObjectPermission): - perms_map = { - 'GET': ['%(app_label)s.view_submissions'], - 'POST': ['%(app_label)s.manage_asset'], - } - - perms_map['OPTIONS'] = perms_map['GET'] - perms_map['HEAD'] = perms_map['GET'] - perms_map['PUT'] = perms_map['POST'] - perms_map['PATCH'] = perms_map['POST'] - perms_map['DELETE'] = perms_map['POST'] - - class AssetPermissionAssignmentPermission(AssetNestedObjectPermission): perms_map = AssetNestedObjectPermission.perms_map.copy() diff --git a/kpi/tests/api/v2/test_api_submissions.py b/kpi/tests/api/v2/test_api_submissions.py index 7101c92ac3..ab48e988a8 100644 --- a/kpi/tests/api/v2/test_api_submissions.py +++ b/kpi/tests/api/v2/test_api_submissions.py @@ -1866,7 +1866,6 @@ def test_duplicate_submission_with_xml_encoding(self): assert submission_xml.startswith( '' ) - breakpoint() self.test_duplicate_submission_as_owner_allowed() def test_duplicate_submission_without_xml_encoding(self): From 41c6b739cf50cdf4fb15372114a0bcc02a0352c6 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 12 Sep 2024 11:13:45 -0400 Subject: [PATCH 112/119] Apply requested changes --- kobo/apps/openrosa/apps/api/permissions.py | 4 ++-- .../api/tests/viewsets/test_abstract_viewset.py | 4 +++- kobo/apps/openrosa/apps/api/tools.py | 9 ++++----- .../openrosa/apps/api/viewsets/xform_list_api.py | 2 +- .../openrosa/apps/api/viewsets/xform_viewset.py | 3 --- .../openrosa/apps/main/tests/test_past_bugs.py | 7 +++++++ kpi/views/v2/paired_data.py | 14 +++++++++++++- 7 files changed, 30 insertions(+), 13 deletions(-) diff --git a/kobo/apps/openrosa/apps/api/permissions.py b/kobo/apps/openrosa/apps/api/permissions.py index e7ea0771aa..845ae86603 100644 --- a/kobo/apps/openrosa/apps/api/permissions.py +++ b/kobo/apps/openrosa/apps/api/permissions.py @@ -172,13 +172,13 @@ def has_object_permission(self, request, view, obj): except KeyError: pass else: - # Deleting submissions is not allowed anymore with KoboCAT API + # Deleting submissions is not allowed with KoboCAT API if view.action == 'bulk_delete': raise LegacyAPIException return user.has_perms(required_perms, obj) - # Deleting submissions in not allowed anymore with KoboCAT API + # Deleting submissions is not allowed with KoboCAT API if view.action == 'destroy': raise LegacyAPIException diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py index 6632ae1a9d..e6f8cce8da 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py @@ -61,7 +61,9 @@ def publish_xls_form( # during deployment. Thus, this method will create the XForm object directly # without an API call except if `use_api` is True. - # In unit tests, if we need to test the result of the (KoboCAT API), + # Some unit tests still need to test the result of API `v1` + # (i.e.: KoboCAT API). For example, to ensure project creation is + # not allowed anymore. if not data: data = { 'owner': self.user.username, diff --git a/kobo/apps/openrosa/apps/api/tools.py b/kobo/apps/openrosa/apps/api/tools.py index bab408a532..52dd0b9116 100644 --- a/kobo/apps/openrosa/apps/api/tools.py +++ b/kobo/apps/openrosa/apps/api/tools.py @@ -34,7 +34,7 @@ default_kobocat_storage as default_storage, ) from kpi.views.v2.paired_data import ( - PairedDataViewset, + OpenRosaDynamicDataAttachmentViewset, SubmissionXMLRenderer, XMLExternalDataPermission, ) @@ -170,11 +170,10 @@ def get_media_file_response( args = resolver_match.args kwargs = resolver_match.kwargs - paired_data_viewset = PairedDataViewset.as_view({'get': 'external'}) + paired_data_viewset = OpenRosaDynamicDataAttachmentViewset.as_view( + {'get': 'external'} + ) django_http_request = request._request - paired_data_viewset.cls.permission_classes = [XMLExternalDataPermission] - paired_data_viewset.cls.renderer_classes = [SubmissionXMLRenderer] - paired_data_viewset.cls.filter_backends = [] return paired_data_viewset(request=django_http_request, *args, **kwargs) diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py b/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py index 789cfa4a83..5f90eb1735 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py @@ -203,7 +203,7 @@ def _is_metadata_expired(obj: MetaData, request: Request) -> bool: timedelta = timezone.now() - obj.date_modified if timedelta.total_seconds() > settings.PAIRED_DATA_EXPIRATION: - # Force external xml regeneration + # Force external XML regeneration get_media_file_response(obj, request) # We update the modification time here to avoid requesting that KPI diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py index 60e06d9cd7..3cdd4b8827 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py @@ -575,9 +575,6 @@ def create(self, request, *args, **kwargs): return Response(survey, status=status.HTTP_400_BAD_REQUEST) - def get_queryset(self): - return super().get_queryset() - def update(self, request, pk, *args, **kwargs): if 'xls_file' in request.FILES: # A new XLSForm has been uploaded and will replace the existing diff --git a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py index 5589e19a13..4cd6fd49b9 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py +++ b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py @@ -41,6 +41,13 @@ def test_buggy_files(self): ) assert message == str(e) + def test_erics_files(self): + for name in [ + 'battery_life.xls', + 'Enumerator_Training_Practice_Survey.xls', + ]: + self._publish_xls_file(os.path.join('fixtures', 'bug_fixes', name)) + class TestSubmissionBugs(TestBase): diff --git a/kpi/views/v2/paired_data.py b/kpi/views/v2/paired_data.py index 8331a4ebd1..f73e948b48 100644 --- a/kpi/views/v2/paired_data.py +++ b/kpi/views/v2/paired_data.py @@ -227,7 +227,7 @@ def external(self, request, paired_data_uid, **kwargs): file_type=AssetFile.PAIRED_DATA, user=self.asset.owner, ) - # When asset file is new, we consider its content as expired to + # When the asset file is new, we consider its content as expired to # force its creation below has_expired = True else: @@ -336,3 +336,15 @@ def get_serializer_context(self): source__names[record['uid']] = record['name'] context_['source__names'] = source__names return context_ + + +class OpenRosaDynamicDataAttachmentViewset(PairedDataViewset): + """ + Only specific to OpenRosa manifest when projects are linked with DDA. + Enforce permission and renderer classes at the class level instead to be + sure they are taken into account while calling `viewset.as_view()` + """ + + permission_classes = [XMLExternalDataPermission] + renderer_classes = [SubmissionXMLRenderer] + filter_backends = [] From 76656564674336892538b04502d2f634288cc78c Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 3 Oct 2024 17:50:13 -0400 Subject: [PATCH 113/119] Removed redundant call of get_queryset() --- kobo/apps/openrosa/apps/api/viewsets/data_viewset.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py index be8402cc8f..01ce97a136 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py @@ -404,9 +404,6 @@ class DataViewSet(AnonymousUserPublicFormsMixin, OpenRosaModelViewSet): extra_lookup_fields = None queryset = XForm.objects.all() - def get_queryset(self): - return super().get_queryset() - def bulk_delete(self, request, *args, **kwargs): """ Bulk delete instances From 5408a75f7550bcb7905147cbcae01984ce33948c Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Thu, 3 Oct 2024 18:05:09 -0400 Subject: [PATCH 114/119] Make MCH test pass - PyXForm is case sensitive --- .../main/tests/fixtures/bug_fixes/MCH_v1.xls | Bin 14848 -> 14848 bytes .../apps/main/tests/test_past_bugs.py | 12 +++++------- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/kobo/apps/openrosa/apps/main/tests/fixtures/bug_fixes/MCH_v1.xls b/kobo/apps/openrosa/apps/main/tests/fixtures/bug_fixes/MCH_v1.xls index 1fea56130ca945168d07af9fe6aeae154010ce1d..7bd979ec038b401a1c204e81d893877907be49d5 100644 GIT binary patch delta 1259 zcmZXUO=uHQ5Xa~3ZnnuLF$-;^kamsrgBW8>vrXKjiH-GO1aGC#V+;16lv>(A5hF>h zpvS`N$-85WtZ96+4=q748y+HZnPWEKj`Ea z{ir`Zy12Z!y0Bzv5qJy(09U%PyFCz4h(;fUZm0?ZOF4PyE-5F|oi5x9160Q65PIhuf+APf3sv~tEmVq73hiMYdwyg&>|#4AqN z0x>KR`<$2%h!Kf!(yWhyKqMt%ofAcY7?X&XX{(ih3j%S{Py7V~(+b33182t~2Yn3I z@u@=sBX7Q^`=t(--aC|*e9}@^+QN?jseWyVYd0~;%9hc4Wy?a@bicIBrS}h|lRoLB zAkCmOj9L6Q`7c3#o@Fzw2uRkyI?ZJmQ5A@sMBL)UMS&QXh-aLb5{SG+{Kzb0m6|}< z645-z`nV(z6B6;76Vn1wkci)$z#Z^^ixmBYTgtv8bT3+zqQ;6hMk4BtP*62Xn!)~pC+#Pdkopc%-a%+POs*kv}hde~#UQJq9_O(A9 zX!*lIfQ05j{0zfdIkdaj&(k0O&$S(L`^sFT)G`;1+Bag&fN9ckyd81YfLjG`aQP`F z==213Bs?BZn3Pi(A7kL|>eljj4Ai7T4KWISP+Ja_SEyk|!4GQ3p$ZChkWui1+H1H+%rJ_&;M!^qiIV%HK6>5r6@PpcNs6~aEW)%FOb{y)ILKPVWe^%%uEiR5#X| z>znbFYt6OwCh8a@k98iZK$JfA&AL*E`#P(urRR@63oBtn?QWr@iy!u%Vq1ux_N~%= F@eg2lqrw0H diff --git a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py index 4cd6fd49b9..afe5c0989b 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py +++ b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py @@ -26,14 +26,12 @@ def test_uniqueness_of_group_names_enforced(self): assert XForm.objects.count() == pre_count - def test_buggy_files(self): - message = "Unknown question type 'Select one from source'" - with pytest.raises(PyXFormError) as e: - self._publish_xls_file( - os.path.join('fixtures/bug_fixes/MCH_v1.xls') - ) - assert message == str(e) + def test_mch(self): + self._publish_xls_file( + os.path.join('fixtures/bug_fixes/MCH_v1.xls') + ) + def test_buggy_files(self): message = 'Duplicate column header: label' with pytest.raises(PyXFormError) as e: self._publish_xls_file( From d3d3ba67b1d161695d94db7092e19de38df7738c Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 7 Oct 2024 10:40:46 -0400 Subject: [PATCH 115/119] Restore missing code from kobotoolbox/kpi#5003 --- kobo/apps/stripe/tests/test_organization_usage.py | 1 - kobo/apps/trackers/tests/submission_utils.py | 1 + kpi/views/v2/data.py | 9 ++++++++- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 004c468d8a..3d59479ebd 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -7,7 +7,6 @@ import pytest from datetime import datetime from dateutil.relativedelta import relativedelta - from django.core.cache import cache from django.test import override_settings from django.urls import reverse diff --git a/kobo/apps/trackers/tests/submission_utils.py b/kobo/apps/trackers/tests/submission_utils.py index 49dacd06e1..5c761f2823 100644 --- a/kobo/apps/trackers/tests/submission_utils.py +++ b/kobo/apps/trackers/tests/submission_utils.py @@ -4,6 +4,7 @@ from dateutil.relativedelta import relativedelta from django.conf import settings +from django.utils import timezone from model_bakery import baker from kpi.models import Asset diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 5936790bd3..780e76f5f1 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -402,6 +402,9 @@ def duplicate(self, request, pk, *args, **kwargs): deployment = self._get_deployment() # Coerce to int because the back end only finds matches with the same type submission_id = positive_int(pk) + original_submission = deployment.get_submission( + submission_id=submission_id, user=request.user, fields=['_id', '_uuid'] + ) with http_open_rosa_error_handler( lambda: deployment.duplicate_submission( @@ -416,8 +419,12 @@ def duplicate(self, request, pk, *args, **kwargs): 'status': handler.status_code, } else: + duplicate_submission = handler.func_return + deployment.copy_submission_extras( + original_submission['_uuid'], duplicate_submission['_uuid'] + ) response = { - 'data': handler.func_return, + 'data': duplicate_submission, 'content_type': 'application/json', 'status': status.HTTP_201_CREATED, } From 5f39557e4dcd2860b32571c96325578efa62851b Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 7 Oct 2024 10:56:54 -0400 Subject: [PATCH 116/119] linting: Remove unused imports and variables --- kobo/apps/audit_log/tests/test_one_time_auth.py | 6 +++--- kobo/apps/kobo_auth/__init__.py | 2 +- .../apps/api/tests/viewsets/test_abstract_viewset.py | 3 +-- kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py | 2 -- .../apps/api/tests/viewsets/test_xform_submission_api.py | 8 +++----- kobo/apps/openrosa/apps/api/tools.py | 2 -- ...move_xform_has_kpi_hooks_and_instance_posted_to_kpi.py | 6 +----- kobo/apps/openrosa/apps/main/models/meta_data.py | 2 -- kobo/apps/openrosa/apps/main/models/user_profile.py | 2 -- kpi/deployment_backends/kc_access/utils.py | 1 - kpi/deployment_backends/mock_backend.py | 6 +++--- kpi/deployment_backends/openrosa_backend.py | 4 ++-- kpi/tests/api/v2/test_api_paired_data.py | 6 +----- kpi/tests/test_mongo_helper.py | 2 +- kpi/tests/test_usage_calculator.py | 5 ----- kpi/utils/monkey_patching.py | 1 - kpi/views/v2/data.py | 2 +- kpi/views/v2/user.py | 2 -- 18 files changed, 17 insertions(+), 45 deletions(-) diff --git a/kobo/apps/audit_log/tests/test_one_time_auth.py b/kobo/apps/audit_log/tests/test_one_time_auth.py index 205709c2e3..fa1e473e93 100644 --- a/kobo/apps/audit_log/tests/test_one_time_auth.py +++ b/kobo/apps/audit_log/tests/test_one_time_auth.py @@ -1,9 +1,9 @@ -from unittest import TestCase, mock +from unittest import mock from unittest.mock import patch from ddt import data, ddt, unpack from django.http import HttpResponse -from django.urls import resolve, reverse +from django.urls import reverse from rest_framework.authtoken.models import Token from trench.utils import get_mfa_model @@ -159,7 +159,7 @@ def test_authorized_application_auth_creates_log(self): app: AuthorizedApplication = AuthorizedApplication(name='Auth app') app.save() header = {'HTTP_AUTHORIZATION': f'Token {app.key}'} - response = self.client.post( + self.client.post( reverse('authenticate_user'), **header, data={'username': 'test', 'password': 'test'}, diff --git a/kobo/apps/kobo_auth/__init__.py b/kobo/apps/kobo_auth/__init__.py index 4b7550c50b..f20d4bb027 100644 --- a/kobo/apps/kobo_auth/__init__.py +++ b/kobo/apps/kobo_auth/__init__.py @@ -6,5 +6,5 @@ class KoboAuthAppConfig(AppConfig): verbose_name = 'Authentication and authorization' def ready(self): - from . import signals + from . import signals # noqa super().ready() diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py index e6f8cce8da..3a61038693 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py @@ -1,4 +1,3 @@ -# coding: utf-8 import os from typing import Union @@ -107,7 +106,7 @@ def publish_xls_form( self.form_data = response.data else: with open(path, 'rb') as f: - xls_file = ContentFile(f.read(), name=f'transportation.xls') + xls_file = ContentFile(f.read(), name='transportation.xls') self.xform = logger_tools.publish_xls_form(xls_file, self.user) response = self.client.get( diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py index b408b289bf..2c0d6e5b31 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py @@ -1,9 +1,7 @@ -# coding: utf-8 import base64 import os import pytest -from django.conf import settings from django.urls.exceptions import NoReverseMatch from rest_framework import status from rest_framework.reverse import reverse diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py index 989bdc3967..34dee502fb 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py @@ -1,17 +1,15 @@ -# coding: utf-8 import os -import uuid import simplejson as json -from django.conf import settings from django.contrib.auth.models import AnonymousUser from django.core.files.uploadedfile import InMemoryUploadedFile from django_digest.test import DigestAuth from kobo.apps.openrosa.libs.utils.guardian import assign_perm from rest_framework import status -from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import \ - TestAbstractViewSet +from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import ( + TestAbstractViewSet, +) from kobo.apps.openrosa.apps.api.viewsets.xform_submission_api import XFormSubmissionApi from kobo.apps.openrosa.apps.logger.models import Attachment from kobo.apps.openrosa.libs.constants import ( diff --git a/kobo/apps/openrosa/apps/api/tools.py b/kobo/apps/openrosa/apps/api/tools.py index 52dd0b9116..5996db6d74 100644 --- a/kobo/apps/openrosa/apps/api/tools.py +++ b/kobo/apps/openrosa/apps/api/tools.py @@ -35,8 +35,6 @@ ) from kpi.views.v2.paired_data import ( OpenRosaDynamicDataAttachmentViewset, - SubmissionXMLRenderer, - XMLExternalDataPermission, ) DECIMAL_PRECISION = 2 diff --git a/kobo/apps/openrosa/apps/logger/migrations/0037_remove_xform_has_kpi_hooks_and_instance_posted_to_kpi.py b/kobo/apps/openrosa/apps/logger/migrations/0037_remove_xform_has_kpi_hooks_and_instance_posted_to_kpi.py index 850fafc846..c6f30f2a5c 100644 --- a/kobo/apps/openrosa/apps/logger/migrations/0037_remove_xform_has_kpi_hooks_and_instance_posted_to_kpi.py +++ b/kobo/apps/openrosa/apps/logger/migrations/0037_remove_xform_has_kpi_hooks_and_instance_posted_to_kpi.py @@ -1,11 +1,7 @@ # Generated by Django 4.2.11 on 2024-07-31 15:59 from django.conf import settings -from django.db import migrations, models -import django.db.models.deletion -import kobo.apps.openrosa.apps.logger.models.attachment -import kobo.apps.openrosa.apps.logger.models.xform -import kpi.deployment_backends.kc_access.storage +from django.db import migrations class Migration(migrations.Migration): diff --git a/kobo/apps/openrosa/apps/main/models/meta_data.py b/kobo/apps/openrosa/apps/main/models/meta_data.py index 834648dc5f..a853492bf3 100644 --- a/kobo/apps/openrosa/apps/main/models/meta_data.py +++ b/kobo/apps/openrosa/apps/main/models/meta_data.py @@ -1,4 +1,3 @@ -# coding: utf-8 import mimetypes import os import requests @@ -11,7 +10,6 @@ from django.core.validators import URLValidator from django.db import models from django.conf import settings -from django.utils import timezone from requests.exceptions import RequestException from kobo.apps.openrosa.apps.logger.models import XForm diff --git a/kobo/apps/openrosa/apps/main/models/user_profile.py b/kobo/apps/openrosa/apps/main/models/user_profile.py index fa0034a825..e7b9f6475f 100644 --- a/kobo/apps/openrosa/apps/main/models/user_profile.py +++ b/kobo/apps/openrosa/apps/main/models/user_profile.py @@ -1,10 +1,8 @@ -# coding: utf-8 import json from django.conf import settings from django.db import models from guardian.conf import settings as guardian_settings -from rest_framework.authtoken.models import Token from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.logger.fields import LazyDefaultBooleanField diff --git a/kpi/deployment_backends/kc_access/utils.py b/kpi/deployment_backends/kc_access/utils.py index c47e2afede..55b3aed6ae 100644 --- a/kpi/deployment_backends/kc_access/utils.py +++ b/kpi/deployment_backends/kc_access/utils.py @@ -1,4 +1,3 @@ -import logging from contextlib import ContextDecorator from typing import Union diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 63dad11268..394cdf3755 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -178,9 +178,9 @@ def _get_media_files(self, submission): ) if not os.path.isfile(file_): raise Exception( - f'File `filename` does not exist! Use `path/to/image.png` if' - f' you need a fake attachment, or use one of file names ' - f'inside `kpi/fixtures/attachments for real attachment' + 'File `filename` does not exist! Use `path/to/image.png` if' + ' you need a fake attachment, or use one of file names ' + 'inside `kpi/fixtures/attachments for real attachment' ) with open(file_, 'rb') as f: diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 85603d4729..3e3c9cfd30 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -1340,11 +1340,11 @@ def transfer_counters_ownership(self, new_owner: 'kobo_auth.User'): UserProfile.objects.filter(user_id=self.asset.owner.pk).update( attachment_storage_bytes=F('attachment_storage_bytes') - - self.xform.attachment_storage_bytes + - self.xform.attachment_storage_bytes ) UserProfile.objects.filter(user_id=self.asset.owner.pk).update( attachment_storage_bytes=F('attachment_storage_bytes') - + self.xform.attachment_storage_bytes + + self.xform.attachment_storage_bytes ) @property diff --git a/kpi/tests/api/v2/test_api_paired_data.py b/kpi/tests/api/v2/test_api_paired_data.py index bf4fc5821f..ca49db2443 100644 --- a/kpi/tests/api/v2/test_api_paired_data.py +++ b/kpi/tests/api/v2/test_api_paired_data.py @@ -1,6 +1,4 @@ -# coding: utf-8 import unittest -from mock import patch from django.urls import reverse from rest_framework import status @@ -98,9 +96,7 @@ def toggle_source_sharing( if not source_url: source_url = self.source_asset_detail_url - response = self.client.patch(source_url, - data=payload, - format='json') + response = self.client.patch(source_url, data=payload, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) return response diff --git a/kpi/tests/test_mongo_helper.py b/kpi/tests/test_mongo_helper.py index 538cfaf095..81e85b7382 100644 --- a/kpi/tests/test_mongo_helper.py +++ b/kpi/tests/test_mongo_helper.py @@ -66,7 +66,7 @@ def test_get_instances(self): def test_get_instances_permission_filters(self): bob = baker.make(settings.AUTH_USER_MODEL, username='bob') - alice = baker.make(settings.AUTH_USER_MODEL, username='alice') + baker.make(settings.AUTH_USER_MODEL, username='alice') asset = baker.make('kpi.Asset', owner=bob, uid='assetUid') asset.deploy(backend='mock', active=True) userform_id = asset.deployment.mongo_userform_id diff --git a/kpi/tests/test_usage_calculator.py b/kpi/tests/test_usage_calculator.py index 921ab99c96..ef85d70ee5 100644 --- a/kpi/tests/test_usage_calculator.py +++ b/kpi/tests/test_usage_calculator.py @@ -9,10 +9,6 @@ from model_bakery import baker from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.apps.logger.models import ( - XForm, - DailyXFormSubmissionCounter, -) from kobo.apps.organizations.models import Organization from kobo.apps.stripe.tests.utils import generate_enterprise_subscription from kobo.apps.trackers.models import NLPUsageCounter @@ -75,7 +71,6 @@ def _create_asset(self, user=None): ) self._deployment = self.asset.deployment - def add_nlp_trackers(self): """ Add nlp data to an asset diff --git a/kpi/utils/monkey_patching.py b/kpi/utils/monkey_patching.py index c244b43b6f..d0bc364fff 100644 --- a/kpi/utils/monkey_patching.py +++ b/kpi/utils/monkey_patching.py @@ -1,6 +1,5 @@ import django.contrib.auth.management import django.db.models.deletion -from django.apps import apps from django.db import router from django.conf import settings from django.contrib.auth.management import ( diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 780e76f5f1..38f473cf56 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -672,7 +672,7 @@ def _bulk_update(self, request: Request) -> dict: ) except (MissingXFormException, InvalidXFormException): return { - 'data': {'detail': f'Could not updated submissions'}, + 'data': {'detail': 'Could not updated submissions'}, 'content_type': 'application/json', 'status': status.HTTP_400_BAD_REQUEST, } diff --git a/kpi/views/v2/user.py b/kpi/views/v2/user.py index b791b41909..933daf49a7 100644 --- a/kpi/views/v2/user.py +++ b/kpi/views/v2/user.py @@ -1,4 +1,3 @@ -# coding: utf-8 from rest_framework import exceptions, mixins, renderers, status, viewsets from rest_framework.decorators import action from rest_framework.response import Response @@ -7,7 +6,6 @@ from kobo.apps.kobo_auth.shortcuts import User from kpi.filters import SearchFilter -from kpi.models.authorized_application import ApplicationTokenAuthentication from kpi.permissions import IsAuthenticated from kpi.serializers.v2.user import UserSerializer, UserListSerializer from kpi.tasks import sync_kobocat_xforms From 80484b8a3a287f204ac242cedf58dd6f0dc05cd1 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 7 Oct 2024 11:46:53 -0400 Subject: [PATCH 117/119] linting: Replace double quotes with single quotes --- hub/admin/extend_user.py | 13 +- hub/models/extra_user_detail.py | 2 +- kobo/apps/__init__.py | 2 +- kobo/apps/accounts/mfa/models.py | 2 + kobo/apps/audit_log/signals.py | 1 + kobo/apps/audit_log/urls.py | 1 - kobo/apps/audit_log/views.py | 1 + kobo/apps/form_disclaimer/models.py | 1 - kobo/apps/hook/constants.py | 2 +- kobo/apps/hook/models/hook_log.py | 15 +- .../models/service_definition_interface.py | 9 +- kobo/apps/hook/tasks.py | 9 +- kobo/apps/hook/tests/hook_test_case.py | 7 +- kobo/apps/hook/tests/test_api_hook.py | 110 ++++++----- kobo/apps/hook/tests/test_email.py | 45 ++--- kobo/apps/hook/tests/test_parser.py | 1 + kobo/apps/hook/tests/test_ssrf.py | 11 +- kobo/apps/hook/tests/test_utils.py | 7 +- kobo/apps/hook/views/v2/hook.py | 14 +- kobo/apps/hook/views/v2/hook_log.py | 14 +- kobo/apps/kobo_auth/models.py | 4 +- ...0006_delete_digest_and_auth_token_table.py | 3 +- kobo/apps/openrosa/apps/api/permissions.py | 8 +- .../tests/viewsets/test_abstract_viewset.py | 12 +- .../tests/viewsets/test_attachment_viewset.py | 4 +- .../api/tests/viewsets/test_data_viewset.py | 15 +- .../tests/viewsets/test_metadata_viewset.py | 15 +- .../apps/api/tests/viewsets/test_user.py | 1 + .../viewsets/test_xform_submission_api.py | 24 ++- .../api/tests/viewsets/test_xform_viewset.py | 53 +++--- kobo/apps/openrosa/apps/api/tools.py | 14 +- .../apps/api/viewsets/connect_viewset.py | 3 +- .../apps/api/viewsets/data_viewset.py | 51 +++--- .../apps/api/viewsets/xform_list_api.py | 18 +- .../apps/api/viewsets/xform_submission_api.py | 18 +- .../apps/api/viewsets/xform_viewset.py | 18 +- kobo/apps/openrosa/apps/logger/app.py | 1 - .../0030_backfill_lost_monthly_counters.py | 6 +- ...36_alter_instance_date_created_and_more.py | 4 +- .../openrosa/apps/logger/models/__init__.py | 12 +- .../openrosa/apps/logger/models/attachment.py | 7 +- .../openrosa/apps/logger/models/instance.py | 6 +- .../apps/openrosa/apps/logger/models/xform.py | 32 ++-- kobo/apps/openrosa/apps/logger/signals.py | 11 +- .../logger/tests/models/test_attachment.py | 8 +- .../apps/logger/tests/test_parsing.py | 79 ++++---- .../apps/logger/tests/test_publish_xls.py | 32 ++-- .../logger/tests/test_simple_submission.py | 9 +- .../openrosa/apps/logger/utils/__init__.py | 4 +- .../apps/logger/utils/database_query.py | 1 + .../openrosa/apps/logger/utils/instance.py | 7 +- .../apps/logger/xform_instance_parser.py | 46 ++--- .../migrations/0011_drop_old_kpi_tables.py | 3 +- .../0014_drop_old_formdisclaimer_tables.py | 3 +- .../0016_drop_old_restservice_tables.py | 3 +- .../openrosa/apps/main/models/meta_data.py | 4 +- .../openrosa/apps/main/models/user_profile.py | 4 +- .../openrosa/apps/main/tests/test_base.py | 22 ++- .../apps/main/tests/test_past_bugs.py | 3 +- .../openrosa/apps/main/tests/test_process.py | 172 +++++++++--------- .../test_user_id_string_unique_together.py | 1 + .../apps/viewer/models/parsed_instance.py | 36 ++-- kobo/apps/openrosa/libs/filters.py | 2 +- .../libs/serializers/xform_serializer.py | 6 +- .../tests/mixins/make_submission_mixin.py | 6 +- kobo/apps/openrosa/libs/utils/image_tools.py | 1 - kobo/apps/openrosa/libs/utils/logger_tools.py | 99 +++++----- kobo/apps/openrosa/libs/utils/middleware.py | 5 +- kobo/apps/organizations/utils.py | 3 +- .../resume_failed_transfers_2_024_25_fix.py | 2 +- .../apps/project_ownership/models/transfer.py | 9 +- kobo/apps/project_ownership/tasks.py | 3 +- .../tests/api/v2/test_api.py | 2 +- .../tests/test_transfer_status.py | 1 + kobo/apps/project_ownership/utils.py | 3 +- .../stripe/tests/test_organization_usage.py | 14 +- .../actions/automatic_transcription.py | 7 +- kobo/apps/subsequences/actions/base.py | 7 +- .../subsequences/actions/keyword_search.py | 7 +- kobo/apps/subsequences/actions/qual.py | 6 +- kobo/apps/subsequences/actions/translation.py | 4 +- .../subsequences/integrations/google/base.py | 13 +- .../integrations/google/google_transcribe.py | 13 +- .../integrations/google/google_translate.py | 14 +- kobo/apps/subsequences/models.py | 5 +- ...add_qual_to_last_question_of_last_asset.py | 4 +- .../subsequences/scripts/repop_known_cols.py | 10 +- .../tests/test_submission_extras_api_post.py | 7 +- kobo/apps/subsequences/utils/__init__.py | 7 +- .../determine_export_cols_with_values.py | 4 +- kobo/apps/superuser_stats/models.py | 4 +- kobo/apps/superuser_stats/tasks.py | 15 +- kobo/apps/trackers/tests/test_trackers.py | 1 - kobo/apps/trash_bin/models/project.py | 1 + kobo/apps/trash_bin/utils.py | 5 +- kobo/settings/base.py | 1 + kpi/deployment_backends/base_backend.py | 12 +- kpi/deployment_backends/mock_backend.py | 5 +- kpi/deployment_backends/openrosa_backend.py | 30 +-- kpi/management/commands/sync_kobocat_perms.py | 12 +- .../commands/sync_kobocat_xforms.py | 18 +- .../0011_explode_asset_deployments.py | 9 +- .../0012_onetimeauthenticationkey.py | 36 +++- kpi/mixins/formpack_xlsform_utils.py | 25 ++- kpi/models/asset.py | 39 ++-- kpi/models/authorized_application.py | 10 +- kpi/models/import_export_task.py | 30 +-- kpi/permissions.py | 4 +- kpi/serializers/v2/asset.py | 21 +-- kpi/signals.py | 2 +- kpi/tasks.py | 1 - kpi/tests/api/v1/test_api_assets.py | 42 ++--- kpi/tests/api/v1/test_api_submissions.py | 4 +- kpi/tests/api/v2/test_api_asset_counts.py | 2 +- kpi/tests/api/v2/test_api_asset_usage.py | 2 +- kpi/tests/api/v2/test_api_assets.py | 44 ++--- kpi/tests/api/v2/test_api_attachments.py | 2 +- kpi/tests/api/v2/test_api_paired_data.py | 4 +- kpi/tests/api/v2/test_api_submissions.py | 54 +++--- kpi/tests/kpi_test_case.py | 7 +- kpi/tests/test_asset_content.py | 6 +- kpi/tests/test_asset_versions.py | 7 +- kpi/tests/test_mock_data.py | 96 +++++----- ...t_mock_data_conflicting_version_exports.py | 4 +- kpi/tests/test_mock_data_exports.py | 10 +- kpi/tests/test_usage_calculator.py | 2 +- kpi/tests/test_utils.py | 41 +++-- kpi/tests/utils/xml.py | 1 + kpi/urls/__init__.py | 16 +- kpi/urls/router_api_v1.py | 11 +- kpi/utils/files.py | 4 +- kpi/utils/monkey_patching.py | 6 +- kpi/utils/project_view_exports.py | 2 +- kpi/utils/usage_calculator.py | 2 +- kpi/views/environment.py | 8 +- kpi/views/v2/asset.py | 29 ++- kpi/views/v2/asset_snapshot.py | 4 +- kpi/views/v2/attachment.py | 2 +- kpi/views/v2/data.py | 8 +- kpi/views/v2/paired_data.py | 4 +- kpi/views/v2/user.py | 4 +- 141 files changed, 1044 insertions(+), 973 deletions(-) diff --git a/hub/admin/extend_user.py b/hub/admin/extend_user.py index 0ee48c965a..5552e67887 100644 --- a/hub/admin/extend_user.py +++ b/hub/admin/extend_user.py @@ -5,9 +5,11 @@ from django.contrib import admin, messages from django.contrib.auth.admin import UserAdmin from django.contrib.auth.forms import ( - UserCreationForm as DjangoUserCreationForm, UserChangeForm as DjangoUserChangeForm, ) +from django.contrib.auth.forms import ( + UserCreationForm as DjangoUserCreationForm, +) from django.core.exceptions import ValidationError from django.db.models import Count, Sum from django.forms import CharField @@ -17,8 +19,8 @@ from kobo.apps.accounts.mfa.models import MfaMethod from kobo.apps.accounts.validators import ( - USERNAME_MAX_LENGTH, USERNAME_INVALID_MESSAGE, + USERNAME_MAX_LENGTH, username_validators, ) from kobo.apps.openrosa.apps.logger.models import MonthlyXFormSubmissionCounter @@ -26,9 +28,8 @@ from kobo.apps.trash_bin.exceptions import TrashIntegrityError from kobo.apps.trash_bin.models.account import AccountTrash from kobo.apps.trash_bin.utils import move_to_trash - - from kpi.models.asset import AssetDeploymentStatus + from .filters import UserAdvancedSearchFilter from .mixins import AdvancedSearchMixin @@ -67,7 +68,7 @@ def clean(self): )) if cleaned_data.get('is_superuser', False) and not validate_superuser_auth(self.instance): raise ValidationError( - "Superusers with a usable password must enable MFA." + 'Superusers with a usable password must enable MFA.' ) return cleaned_data @@ -99,7 +100,7 @@ class OrgInline(admin.StackedInline): def active_subscription_status(self, obj): if settings.STRIPE_ENABLED: - return obj.active_subscription_status if obj.active_subscription_status else "None" + return obj.active_subscription_status if obj.active_subscription_status else 'None' def has_add_permission(self, request, obj=OrganizationUser): return False diff --git a/hub/models/extra_user_detail.py b/hub/models/extra_user_detail.py index 990f091910..f96f65a233 100644 --- a/hub/models/extra_user_detail.py +++ b/hub/models/extra_user_detail.py @@ -21,7 +21,7 @@ class ExtraUserDetail(StandardizeSearchableFieldMixin, models.Model): validated_password = models.BooleanField(default=True) def __str__(self): - return '{}\'s data: {}'.format(self.user.__str__(), repr(self.data)) + return "{}'s data: {}".format(self.user.__str__(), repr(self.data)) def save( self, diff --git a/kobo/apps/__init__.py b/kobo/apps/__init__.py index cf07273c96..9c7365cede 100644 --- a/kobo/apps/__init__.py +++ b/kobo/apps/__init__.py @@ -1,6 +1,6 @@ # coding: utf-8 from django.apps import AppConfig -from django.core.checks import register, Tags +from django.core.checks import Tags, register import kpi.utils.monkey_patching # noqa from kpi.utils.two_database_configuration_checker import ( diff --git a/kobo/apps/accounts/mfa/models.py b/kobo/apps/accounts/mfa/models.py index ab58f7b66a..d590a026ab 100644 --- a/kobo/apps/accounts/mfa/models.py +++ b/kobo/apps/accounts/mfa/models.py @@ -5,6 +5,8 @@ from django.utils.timezone import now from trench.admin import ( MFAMethod as TrenchMFAMethod, +) +from trench.admin import ( MFAMethodAdmin as TrenchMFAMethodAdmin, ) diff --git a/kobo/apps/audit_log/signals.py b/kobo/apps/audit_log/signals.py index 046ea518eb..0d128dea27 100644 --- a/kobo/apps/audit_log/signals.py +++ b/kobo/apps/audit_log/signals.py @@ -2,6 +2,7 @@ from django.dispatch import receiver from kpi.utils.log import logging + from .models import AccessLog diff --git a/kobo/apps/audit_log/urls.py b/kobo/apps/audit_log/urls.py index fac5f9ad15..91780fbef8 100644 --- a/kobo/apps/audit_log/urls.py +++ b/kobo/apps/audit_log/urls.py @@ -1,4 +1,3 @@ -from django.urls import include, path from rest_framework.routers import DefaultRouter from .views import AccessLogViewSet, AllAccessLogViewSet, AuditLogViewSet diff --git a/kobo/apps/audit_log/views.py b/kobo/apps/audit_log/views.py index 5a81b54765..a7173bd906 100644 --- a/kobo/apps/audit_log/views.py +++ b/kobo/apps/audit_log/views.py @@ -3,6 +3,7 @@ from kpi.filters import SearchFilter from kpi.permissions import IsAuthenticated + from .filters import AccessLogPermissionsFilter from .models import AccessLog, AuditAction, AuditLog from .permissions import SuperUserPermission diff --git a/kobo/apps/form_disclaimer/models.py b/kobo/apps/form_disclaimer/models.py index ce2fe53e8e..f144850a9b 100644 --- a/kobo/apps/form_disclaimer/models.py +++ b/kobo/apps/form_disclaimer/models.py @@ -1,4 +1,3 @@ -from django.conf import settings from django.db import models from django.db.models import Q from django.db.models.constraints import UniqueConstraint diff --git a/kobo/apps/hook/constants.py b/kobo/apps/hook/constants.py index 7b84fa03bb..6d103511f3 100644 --- a/kobo/apps/hook/constants.py +++ b/kobo/apps/hook/constants.py @@ -1,7 +1,7 @@ # coding: utf-8 from enum import Enum -from rest_framework import status +from rest_framework import status HOOK_LOG_FAILED = 0 HOOK_LOG_PENDING = 1 diff --git a/kobo/apps/hook/models/hook_log.py b/kobo/apps/hook/models/hook_log.py index 449b30178f..1cd4057d98 100644 --- a/kobo/apps/hook/models/hook_log.py +++ b/kobo/apps/hook/models/hook_log.py @@ -7,20 +7,21 @@ from kpi.fields import KpiUidField from kpi.models.abstract_models import AbstractTimeStampedModel from kpi.utils.log import logging + from ..constants import ( - HookLogStatus, - HOOK_LOG_PENDING, HOOK_LOG_FAILED, - KOBO_INTERNAL_ERROR_STATUS_CODE + HOOK_LOG_PENDING, + KOBO_INTERNAL_ERROR_STATUS_CODE, + HookLogStatus, ) class HookLog(AbstractTimeStampedModel): hook = models.ForeignKey( - "Hook", related_name="logs", on_delete=models.CASCADE + 'Hook', related_name='logs', on_delete=models.CASCADE ) - uid = KpiUidField(uid_prefix="hl") + uid = KpiUidField(uid_prefix='hl') submission_id = models.IntegerField( # `KoboCAT.logger.Instance.id` default=0, db_index=True ) @@ -32,7 +33,7 @@ class HookLog(AbstractTimeStampedModel): status_code = models.IntegerField( default=KOBO_INTERNAL_ERROR_STATUS_CODE, null=True, blank=True ) - message = models.TextField(default="") + message = models.TextField(default='') class Meta: ordering = ['-date_created'] @@ -94,4 +95,4 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) def __str__(self): - return "".format(uid=self.uid) + return ''.format(uid=self.uid) diff --git a/kobo/apps/hook/models/service_definition_interface.py b/kobo/apps/hook/models/service_definition_interface.py index 9b2bd1a095..a1b8e76e6c 100644 --- a/kobo/apps/hook/models/service_definition_interface.py +++ b/kobo/apps/hook/models/service_definition_interface.py @@ -9,15 +9,16 @@ from ssrf_protect.ssrf_protect import SSRFProtect, SSRFProtectException from kpi.utils.log import logging -from .hook import Hook -from .hook_log import HookLog + from ..constants import ( - HOOK_LOG_SUCCESS, HOOK_LOG_FAILED, + HOOK_LOG_SUCCESS, KOBO_INTERNAL_ERROR_STATUS_CODE, RETRIABLE_STATUS_CODES, ) from ..exceptions import HookRemoteServerDownError +from .hook import Hook +from .hook_log import HookLog class ServiceDefinitionInterface(metaclass=ABCMeta): @@ -230,7 +231,7 @@ def save_log( try: json.loads(message) except ValueError: - message = re.sub(r"<[^>]*>", " ", message).strip() + message = re.sub(r'<[^>]*>', ' ', message).strip() log.message = message diff --git a/kobo/apps/hook/tasks.py b/kobo/apps/hook/tasks.py index c87cd21076..cf7c4bdbc9 100644 --- a/kobo/apps/hook/tasks.py +++ b/kobo/apps/hook/tasks.py @@ -6,11 +6,12 @@ from django.conf import settings from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import get_template -from django.utils import translation, timezone +from django.utils import timezone, translation from django_celery_beat.models import PeriodicTask from kobo.celery import celery_app from kpi.utils.log import logging + from .constants import HOOK_LOG_FAILED from .exceptions import HookRemoteServerDownError from .models import Hook, HookLog @@ -54,12 +55,12 @@ def failures_reports(): Notifies owners' assets by email of hooks failures. :return: bool """ - beat_schedule = settings.CELERY_BEAT_SCHEDULE.get("send-hooks-failures-reports") + beat_schedule = settings.CELERY_BEAT_SCHEDULE.get('send-hooks-failures-reports') # Use `.first()` instead of `.get()`, because task can be duplicated in admin section failures_reports_period_task = PeriodicTask.objects.filter( enabled=True, - task=beat_schedule.get('task')).order_by("-last_run_at").first() + task=beat_schedule.get('task')).order_by('-last_run_at').first() if failures_reports_period_task: @@ -138,7 +139,7 @@ def failures_reports(): 'kpi_base_url': settings.KOBOFORM_URL } # Localize templates - translation.activate(record.get("language")) + translation.activate(record.get('language')) text_content = plain_text_template.render(variables) html_content = html_template.render(variables) diff --git a/kobo/apps/hook/tests/hook_test_case.py b/kobo/apps/hook/tests/hook_test_case.py index 7a8a90e5d2..0ec5d7045b 100644 --- a/kobo/apps/hook/tests/hook_test_case.py +++ b/kobo/apps/hook/tests/hook_test_case.py @@ -11,17 +11,18 @@ from kpi.constants import SUBMISSION_FORMAT_TYPE_JSON, SUBMISSION_FORMAT_TYPE_XML from kpi.exceptions import BadFormatException from kpi.tests.kpi_test_case import KpiTestCase + from ..constants import HOOK_LOG_FAILED from ..exceptions import HookRemoteServerDownError -from ..models import HookLog, Hook +from ..models import Hook, HookLog class HookTestCase(KpiTestCase): def setUp(self): - self.client.login(username="someuser", password="someuser") + self.client.login(username='someuser', password='someuser') self.asset = self.create_asset( - "some_asset", + 'some_asset', content=json.dumps({'survey': [ {'type': 'text', 'label': 'q1', 'name': 'q1'}, {'type': 'begin_group', 'label': 'group1', 'name': 'group1'}, diff --git a/kobo/apps/hook/tests/test_api_hook.py b/kobo/apps/hook/tests/test_api_hook.py index c6368f568d..f747bcaa11 100644 --- a/kobo/apps/hook/tests/test_api_hook.py +++ b/kobo/apps/hook/tests/test_api_hook.py @@ -1,15 +1,14 @@ # coding: utf-8 import json +from ipaddress import ip_address +from unittest.mock import MagicMock, patch import pytest import responses from constance.test import override_config from django.urls import reverse -from ipaddress import ip_address -from mock import patch, MagicMock from rest_framework import status - from kobo.apps.hook.constants import ( HOOK_LOG_FAILED, HOOK_LOG_PENDING, @@ -18,14 +17,15 @@ ) from kobo.apps.hook.models.hook import Hook from kobo.apps.kobo_auth.shortcuts import User -from kpi.constants import SUBMISSION_FORMAT_TYPE_JSON from kpi.constants import ( + PERM_CHANGE_ASSET, PERM_VIEW_SUBMISSIONS, - PERM_CHANGE_ASSET + SUBMISSION_FORMAT_TYPE_JSON, ) from kpi.utils.datetime import several_minutes_from_now -from .hook_test_case import HookTestCase + from ..exceptions import HookRemoteServerDownError +from .hook_test_case import HookTestCase class ApiHookTestCase(HookTestCase): @@ -34,24 +34,24 @@ def test_anonymous_access(self): hook = self._create_hook() self.client.logout() - list_url = reverse("hook-list", kwargs={ - "parent_lookup_asset": self.asset.uid + list_url = reverse('hook-list', kwargs={ + 'parent_lookup_asset': self.asset.uid }) response = self.client.get(list_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - detail_url = reverse("hook-detail", kwargs={ - "parent_lookup_asset": self.asset.uid, - "uid": hook.uid, + detail_url = reverse('hook-detail', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'uid': hook.uid, }) response = self.client.get(detail_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - log_list_url = reverse("hook-log-list", kwargs={ - "parent_lookup_asset": self.asset.uid, - "parent_lookup_hook": hook.uid, + log_list_url = reverse('hook-log-list', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': hook.uid, }) response = self.client.get(log_list_url) @@ -118,26 +118,26 @@ def test_editor_create(self): def test_non_owner_cannot_access(self): hook = self._create_hook() self.client.logout() - self.client.login(username="anotheruser", password="anotheruser") + self.client.login(username='anotheruser', password='anotheruser') - list_url = reverse("hook-list", kwargs={ - "parent_lookup_asset": self.asset.uid + list_url = reverse('hook-list', kwargs={ + 'parent_lookup_asset': self.asset.uid }) response = self.client.get(list_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - detail_url = reverse("hook-detail", kwargs={ - "parent_lookup_asset": self.asset.uid, - "uid": hook.uid, + detail_url = reverse('hook-detail', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'uid': hook.uid, }) response = self.client.get(detail_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - log_list_url = reverse("hook-log-list", kwargs={ - "parent_lookup_asset": self.asset.uid, - "parent_lookup_hook": hook.uid, + log_list_url = reverse('hook-log-list', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': hook.uid, }) response = self.client.get(log_list_url) @@ -145,7 +145,7 @@ def test_non_owner_cannot_access(self): def test_non_owner_cannot_create(self): self.client.logout() - self.client.login(username="anotheruser", password="anotheruser") + self.client.login(username='anotheruser', password='anotheruser') response = self._create_hook(return_response_only=True, name="Hook for asset I don't own") self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -153,25 +153,25 @@ def test_non_owner_cannot_create(self): def test_anonymous_cannot_create(self): self.client.logout() response = self._create_hook(return_response_only=True, - name="Hook for asset from anonymous") + name='Hook for asset from anonymous') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_partial_update_hook(self): hook = self._create_hook() - url = reverse("hook-detail", kwargs={ - "parent_lookup_asset": self.asset.uid, - "uid": hook.uid + url = reverse('hook-detail', kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'uid': hook.uid }) data = { - "name": "some disabled external service", - "active": False + 'name': 'some disabled external service', + 'active': False } response = self.client.patch(url, data, format=SUBMISSION_FORMAT_TYPE_JSON) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data) hook.refresh_from_db() self.assertFalse(hook.active) - self.assertEqual(hook.name, "some disabled external service") + self.assertEqual(hook.name, 'some disabled external service') @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', @@ -287,7 +287,7 @@ def test_unsecured_endpoint_validation(self): response = self._create_hook(return_response_only=True) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - expected_response = {"endpoint": ["Unsecured endpoint is not allowed"]} + expected_response = {'endpoint': ['Unsecured endpoint is not allowed']} self.assertEqual(response.data, expected_response) def test_payload_template_validation(self): @@ -326,12 +326,17 @@ def test_payload_template_validation(self): @responses.activate def test_hook_log_filter_success(self): # Create success hook - hook = self._create_hook(name="success hook", - endpoint="http://success.service.local/", - settings={}) - responses.add(responses.POST, hook.endpoint, - status=status.HTTP_200_OK, - content_type="application/json") + hook = self._create_hook( + name='success hook', + endpoint='http://success.service.local/', + settings={}, + ) + responses.add( + responses.POST, + hook.endpoint, + status=status.HTTP_200_OK, + content_type='application/json', + ) # simulate a submission ServiceDefinition = hook.get_service_definition() @@ -372,7 +377,7 @@ def test_hook_log_filter_failure(self): responses.POST, hook.endpoint, status=status.HTTP_504_GATEWAY_TIMEOUT, - content_type="application/json", + content_type='application/json', ) # simulate a submission @@ -428,11 +433,14 @@ def test_hook_log_filter_validation(self): def test_hook_log_filter_date(self): # Create success hook hook = self._create_hook( - name="date hook", endpoint="http://date.service.local/", settings={} + name='date hook', endpoint='http://date.service.local/', settings={} + ) + responses.add( + responses.POST, + hook.endpoint, + status=status.HTTP_200_OK, + content_type='application/json', ) - responses.add(responses.POST, hook.endpoint, - status=status.HTTP_200_OK, - content_type="application/json") # simulate a submission ServiceDefinition = hook.get_service_definition() @@ -455,7 +463,8 @@ def test_hook_log_filter_date(self): tzoffset = '-02:00' # There should be a success log around now - response = self.client.get(f'{hook_log_url}?start={five_minutes_ago}&end={in_five_min}', format='json') + response = self.client.get( + f'{hook_log_url}?start={five_minutes_ago}&end={in_five_min}', format='json') self.assertEqual(response.data.get('count'), 1) # There should be no log before now @@ -467,14 +476,19 @@ def test_hook_log_filter_date(self): self.assertEqual(response.data.get('count'), 0) # There should be no log around now when expressed in a different time zone - response = self.client.get(f'{hook_log_url}?start={five_minutes_ago}{tzoffset}&end={in_five_min}{tzoffset}', format='json') + response = self.client.get( + f'{hook_log_url}?start={five_minutes_ago}{tzoffset}&end={in_five_min}{tzoffset}', + format='json', + ) self.assertEqual(response.data.get('count'), 0) def test_hook_log_filter_date_validation(self): # Create hook - hook = self._create_hook(name="success hook", - endpoint="http://hook.service.local/", - settings={}) + hook = self._create_hook( + name='success hook', + endpoint='http://hook.service.local/', + settings={}, + ) # Get log for the success hook hook_log_url = reverse('hook-log-list', kwargs={ diff --git a/kobo/apps/hook/tests/test_email.py b/kobo/apps/hook/tests/test_email.py index 2a587340cb..678f9a40a4 100644 --- a/kobo/apps/hook/tests/test_email.py +++ b/kobo/apps/hook/tests/test_email.py @@ -1,15 +1,16 @@ # coding: utf-8 +from ipaddress import ip_address +from unittest.mock import MagicMock, patch + import responses from django.conf import settings from django.core import mail from django.template.loader import get_template -from django.utils import translation, dateparse -from django_celery_beat.models import PeriodicTask, CrontabSchedule -from ipaddress import ip_address -from mock import patch, MagicMock +from django.utils import dateparse, translation +from django_celery_beat.models import CrontabSchedule, PeriodicTask -from .hook_test_case import HookTestCase from ..tasks import failures_reports +from .hook_test_case import HookTestCase class EmailTestCase(HookTestCase): @@ -41,34 +42,34 @@ def test_notifications(self): self.assertEqual(len(mail.outbox), 1) expected_record = { - "username": self.asset.owner.username, - "email": self.asset.owner.email, - "language": "en", - "assets": { + 'username': self.asset.owner.username, + 'email': self.asset.owner.email, + 'language': 'en', + 'assets': { self.asset.uid: { - "name": self.asset.name, - "hook_uid": self.hook.uid, - "max_length": len(self.hook.name), - "logs": [{ - "hook_name": self.hook.name, - "status_code": first_log_response.get("status_code"), - "message": first_log_response.get("message"), - "uid": first_log_response.get("uid"), - "date_modified": dateparse.parse_datetime(first_log_response.get("date_modified")) + 'name': self.asset.name, + 'hook_uid': self.hook.uid, + 'max_length': len(self.hook.name), + 'logs': [{ + 'hook_name': self.hook.name, + 'status_code': first_log_response.get('status_code'), + 'message': first_log_response.get('message'), + 'uid': first_log_response.get('uid'), + 'date_modified': dateparse.parse_datetime(first_log_response.get('date_modified')) }] } } } - plain_text_template = get_template("reports/failures_email_body.txt") + plain_text_template = get_template('reports/failures_email_body.txt') variables = { - "username": expected_record.get("username"), - "assets": expected_record.get("assets"), + 'username': expected_record.get('username'), + 'assets': expected_record.get('assets'), 'kpi_base_url': settings.KOBOFORM_URL } # Localize templates - translation.activate(expected_record.get("language")) + translation.activate(expected_record.get('language')) text_content = plain_text_template.render(variables) self.assertEqual(mail.outbox[0].body, text_content) diff --git a/kobo/apps/hook/tests/test_parser.py b/kobo/apps/hook/tests/test_parser.py index 0709a0fb1b..f90dc6fbbe 100644 --- a/kobo/apps/hook/tests/test_parser.py +++ b/kobo/apps/hook/tests/test_parser.py @@ -7,6 +7,7 @@ from kpi.constants import SUBMISSION_FORMAT_TYPE_XML from kpi.utils.strings import to_str from kpi.utils.xml import check_lxml_fromstring + from .hook_test_case import HookTestCase diff --git a/kobo/apps/hook/tests/test_ssrf.py b/kobo/apps/hook/tests/test_ssrf.py index df3ebd193f..f4c38155e2 100644 --- a/kobo/apps/hook/tests/test_ssrf.py +++ b/kobo/apps/hook/tests/test_ssrf.py @@ -1,15 +1,14 @@ +from ipaddress import ip_address +from unittest.mock import MagicMock, patch + import pytest import responses from constance.test import override_config -from ipaddress import ip_address -from mock import patch, MagicMock from rest_framework import status from ssrf_protect.exceptions import SSRFProtectException -from kobo.apps.hook.constants import ( - HOOK_LOG_FAILED, - KOBO_INTERNAL_ERROR_STATUS_CODE -) +from kobo.apps.hook.constants import HOOK_LOG_FAILED, KOBO_INTERNAL_ERROR_STATUS_CODE + from .hook_test_case import HookTestCase diff --git a/kobo/apps/hook/tests/test_utils.py b/kobo/apps/hook/tests/test_utils.py index 931f66ce1e..d05f618564 100644 --- a/kobo/apps/hook/tests/test_utils.py +++ b/kobo/apps/hook/tests/test_utils.py @@ -1,10 +1,11 @@ -import responses from ipaddress import ip_address -from mock import patch, MagicMock +from unittest.mock import MagicMock, patch + +import responses from rest_framework import status -from .hook_test_case import HookTestCase from ..utils.services import call_services +from .hook_test_case import HookTestCase class HookUtilsTestCase(HookTestCase): diff --git a/kobo/apps/hook/views/v2/hook.py b/kobo/apps/hook/views/v2/hook.py index 1e2a975868..5550717eb4 100644 --- a/kobo/apps/hook/views/v2/hook.py +++ b/kobo/apps/hook/views/v2/hook.py @@ -5,7 +5,7 @@ from django.db.models import Q from django.utils import timezone from django.utils.translation import gettext as t -from rest_framework import viewsets, status +from rest_framework import status, viewsets from rest_framework.decorators import action from rest_framework.response import Response from rest_framework_extensions.mixins import NestedViewSetMixin @@ -152,7 +152,7 @@ class HookViewSet(AssetNestedObjectViewsetMixin, NestedViewSetMixin, """ model = Hook - lookup_field = "uid" + lookup_field = 'uid' serializer_class = HookSerializer permission_classes = (AssetEditorSubmissionViewerPermission,) @@ -168,10 +168,10 @@ def get_queryset(self): def perform_create(self, serializer): serializer.save(asset=self.asset) - @action(detail=True, methods=["PATCH"]) + @action(detail=True, methods=['PATCH']) def retry(self, request, uid=None, *args, **kwargs): hook = self.get_object() - response = {"detail": t("Task successfully scheduled")} + response = {'detail': t('Task successfully scheduled')} status_code = status.HTTP_200_OK if hook.active: threshold = timezone.now() - timedelta(seconds=120) @@ -205,14 +205,14 @@ def retry(self, request, uid=None, *args, **kwargs): queue='kpi_low_priority_queue', args=(hooklogs_ids,) ) response.update({ - "pending_uids": hooklogs_uids + 'pending_uids': hooklogs_uids }) else: - response["detail"] = t("No data to retry") + response['detail'] = t('No data to retry') status_code = status.HTTP_304_NOT_MODIFIED else: - response["detail"] = t("Can not retry on disabled hooks") + response['detail'] = t('Can not retry on disabled hooks') status_code = status.HTTP_400_BAD_REQUEST return Response(response, status=status_code) diff --git a/kobo/apps/hook/views/v2/hook_log.py b/kobo/apps/hook/views/v2/hook_log.py index 6b5a8874c1..6e6ec34de9 100644 --- a/kobo/apps/hook/views/v2/hook_log.py +++ b/kobo/apps/hook/views/v2/hook_log.py @@ -1,15 +1,15 @@ # coding: utf-8 from django.utils.translation import gettext as t from django_filters.rest_framework import DjangoFilterBackend -from rest_framework import viewsets, mixins, status, serializers +from rest_framework import mixins, status, viewsets from rest_framework.decorators import action from rest_framework.response import Response from rest_framework_extensions.mixins import NestedViewSetMixin from kobo.apps.hook.constants import KOBO_INTERNAL_ERROR_STATUS_CODE +from kobo.apps.hook.filters import HookLogFilter from kobo.apps.hook.models.hook_log import HookLog from kobo.apps.hook.serializers.v2.hook_log import HookLogSerializer -from kobo.apps.hook.filters import HookLogFilter from kpi.paginators import TinyPaginated from kpi.permissions import AssetEditorSubmissionViewerPermission from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin @@ -76,7 +76,7 @@ class HookLogViewSet(AssetNestedObjectViewsetMixin, model = HookLog - lookup_field = "uid" + lookup_field = 'uid' serializer_class = HookLogSerializer permission_classes = (AssetEditorSubmissionViewerPermission,) pagination_class = TinyPaginated @@ -84,7 +84,7 @@ class HookLogViewSet(AssetNestedObjectViewsetMixin, filterset_class = HookLogFilter def get_queryset(self): - hook_uid = self.get_parents_query_dict().get("hook") + hook_uid = self.get_parents_query_dict().get('hook') queryset = self.model.objects.filter(hook__uid=hook_uid, hook__asset__uid=self.asset_uid) # Even though we only need 'uid', `select_related('hook__asset__uid')` @@ -95,7 +95,7 @@ def get_queryset(self): return queryset - @action(detail=True, methods=["PATCH"]) + @action(detail=True, methods=['PATCH']) def retry(self, request, uid=None, *args, **kwargs): """ Retries to send data to external service. @@ -103,8 +103,8 @@ def retry(self, request, uid=None, *args, **kwargs): :param uid: str :return: Response """ - response = {"detail": "", - "status_code": KOBO_INTERNAL_ERROR_STATUS_CODE} + response = {'detail': '', + 'status_code': KOBO_INTERNAL_ERROR_STATUS_CODE} status_code = status.HTTP_200_OK hook_log = self.get_object() diff --git a/kobo/apps/kobo_auth/models.py b/kobo/apps/kobo_auth/models.py index 307e7614ac..5e1a96f1d9 100644 --- a/kobo/apps/kobo_auth/models.py +++ b/kobo/apps/kobo_auth/models.py @@ -1,13 +1,11 @@ from django.conf import settings -from django.contrib.auth.models import Permission from django.contrib.auth.models import AbstractUser - from kobo.apps.openrosa.libs.constants import ( OPENROSA_APP_LABELS, ) from kobo.apps.openrosa.libs.permissions import get_model_permission_codenames -from kpi.utils.database import use_db, update_autofield_sequence +from kpi.utils.database import update_autofield_sequence, use_db class User(AbstractUser): diff --git a/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py b/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py index 0a2ecc9e30..5d593eecf5 100644 --- a/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py +++ b/kobo/apps/openrosa/apps/api/migrations/0006_delete_digest_and_auth_token_table.py @@ -1,8 +1,7 @@ # Generated by Django 4.2.15 on 2024-09-13 13:52 -from django.db import migrations, connections from django.conf import settings - +from django.db import connections, migrations KC_TABLES_TO_DELETE = [ 'django_digest_partialdigest', diff --git a/kobo/apps/openrosa/apps/api/permissions.py b/kobo/apps/openrosa/apps/api/permissions.py index 845ae86603..bb49cfe575 100644 --- a/kobo/apps/openrosa/apps/api/permissions.py +++ b/kobo/apps/openrosa/apps/api/permissions.py @@ -1,20 +1,20 @@ # coding: utf-8 from django.http import Http404 from rest_framework.permissions import ( + SAFE_METHODS, BasePermission, DjangoObjectPermissions, IsAuthenticated, - SAFE_METHODS, ) +from kobo.apps.openrosa.apps.api.exceptions import LegacyAPIException +from kobo.apps.openrosa.apps.logger.models import XForm from kobo.apps.openrosa.libs.constants import ( - CAN_DELETE_DATA_XFORM, CAN_CHANGE_XFORM, + CAN_DELETE_DATA_XFORM, CAN_VALIDATE_XFORM, CAN_VIEW_XFORM, ) -from kobo.apps.openrosa.apps.logger.models import XForm -from kobo.apps.openrosa.apps.api.exceptions import LegacyAPIException class ViewDjangoObjectPermissions(DjangoObjectPermissions): diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py index 3a61038693..58ed6a9ebd 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py @@ -8,17 +8,19 @@ ) from django.core.files.base import ContentFile from django.test import TestCase -from django_digest.test import DigestAuth from rest_framework import status from rest_framework.reverse import reverse from rest_framework.test import APIRequestFactory +from django_digest.test import DigestAuth from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.api.viewsets.metadata_viewset import MetaDataViewSet -from kobo.apps.openrosa.apps.logger.models import XForm, Attachment +from kobo.apps.openrosa.apps.logger.models import Attachment, XForm from kobo.apps.openrosa.apps.main import tests as main_tests -from kobo.apps.openrosa.apps.main.models import UserProfile, MetaData -from kobo.apps.openrosa.libs.tests.mixins.make_submission_mixin import MakeSubmissionMixin +from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile +from kobo.apps.openrosa.libs.tests.mixins.make_submission_mixin import ( + MakeSubmissionMixin, +) from kobo.apps.openrosa.libs.tests.mixins.request_mixin import RequestMixin from kobo.apps.openrosa.libs.utils import logger_tools @@ -230,7 +232,7 @@ def _submit_transport_instance_w_attachment( ): survey_datetime = self.surveys[survey_at] if not media_file: - media_file = "1335783522563.jpg" + media_file = '1335783522563.jpg' path = os.path.join( self.main_directory, 'fixtures', diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py index 7e874b9858..04bcc1cbe9 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py @@ -5,7 +5,7 @@ from rest_framework.reverse import reverse from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import ( - TestAbstractViewSet + TestAbstractViewSet, ) from kobo.apps.openrosa.apps.api.viewsets.attachment_viewset import AttachmentViewSet from kobo.apps.openrosa.apps.logger.models.attachment import Attachment @@ -152,7 +152,7 @@ def test_direct_image_link(self): def test_direct_image_link_uppercase(self): self._submit_transport_instance_w_attachment( - media_file="1335783522564.JPG") + media_file='1335783522564.JPG') filename = self.attachment.media_file.name file_base, file_extension = os.path.splitext(filename) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py index 1be75afba9..dd64fc3b1d 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py @@ -1,21 +1,20 @@ # coding: utf-8 import requests - from django.test import RequestFactory -from kobo.apps.openrosa.libs.utils.guardian import assign_perm, remove_perm +from httmock import HTTMock, all_requests from rest_framework import status from kobo.apps.openrosa.apps.api.viewsets.data_viewset import DataViewSet from kobo.apps.openrosa.apps.api.viewsets.xform_viewset import XFormViewSet -from kobo.apps.openrosa.apps.main.tests.test_base import TestBase from kobo.apps.openrosa.apps.logger.models import XForm +from kobo.apps.openrosa.apps.main.tests.test_base import TestBase from kobo.apps.openrosa.apps.viewer.models import ParsedInstance from kobo.apps.openrosa.libs.constants import ( CAN_CHANGE_XFORM, CAN_DELETE_DATA_XFORM, CAN_VIEW_XFORM, ) -from httmock import all_requests, HTTMock +from kobo.apps.openrosa.libs.utils.guardian import assign_perm, remove_perm @all_requests @@ -201,14 +200,14 @@ def test_add_form_tag_propagates_to_data_tags(self): response = view(request, pk=pk) self.assertEqual(response.data, []) # add tag "hello" - request = self.factory.post('/', data={"tags": "hello"}, **self.extra) + request = self.factory.post('/', data={'tags': 'hello'}, **self.extra) response = view(request, pk=pk) self.assertEqual(response.status_code, 201) self.assertEqual(response.data, ['hello']) for i in self.xform.instances.all(): self.assertIn('hello', i.tags.names()) # remove tag "hello" - request = self.factory.delete('/', data={"tags": "hello"}, + request = self.factory.delete('/', data={'tags': 'hello'}, **self.extra) response = view(request, pk=pk, label='hello') self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -326,14 +325,14 @@ def test_get_enketo_edit_url(self): request = self.factory.get( '/', - data={'return_url': "http://test.io/test_url"}, + data={'return_url': 'http://test.io/test_url'}, **self.extra ) with HTTMock(enketo_mock): response = view(request, pk=formid, dataid=dataid) self.assertEqual( - response.data['url'], "https://hmh2a.enketo.formhub.org" + response.data['url'], 'https://hmh2a.enketo.formhub.org' ) def test_get_enketo_view_url(self): diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py index b625f2462f..0f41357f15 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py @@ -3,16 +3,17 @@ from django.conf import settings from django.core.files.uploadedfile import InMemoryUploadedFile -from kobo.apps.openrosa.libs.utils.guardian import assign_perm from rest_framework import status from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import ( - TestAbstractViewSet) + TestAbstractViewSet, +) from kobo.apps.openrosa.apps.api.viewsets.metadata_viewset import MetaDataViewSet from kobo.apps.openrosa.apps.api.viewsets.xform_viewset import XFormViewSet from kobo.apps.openrosa.apps.main.models.meta_data import MetaData -from kobo.apps.openrosa.libs.serializers.xform_serializer import XFormSerializer from kobo.apps.openrosa.libs.constants import CAN_CHANGE_XFORM, CAN_VIEW_XFORM +from kobo.apps.openrosa.libs.serializers.xform_serializer import XFormSerializer +from kobo.apps.openrosa.libs.utils.guardian import assign_perm class TestMetaDataViewSet(TestAbstractViewSet): @@ -24,10 +25,10 @@ def setUp(self): 'post': 'create' }) self.publish_xls_form() - self.data_value = "screenshot.png" + self.data_value = 'screenshot.png' self.fixture_dir = os.path.join( - settings.OPENROSA_APP_DIR, "apps", "main", "tests", "fixtures", - "transportation" + settings.OPENROSA_APP_DIR, 'apps', 'main', 'tests', 'fixtures', + 'transportation' ) self.path = os.path.join(self.fixture_dir, self.data_value) @@ -182,7 +183,7 @@ def test_list_metadata_for_specific_form(self): response = self.view(request) self.assertEqual(response.status_code, 404) - data['xform'] = "INVALID" + data['xform'] = 'INVALID' request = self.factory.get('/', data, **self.extra) response = self.view(request) self.assertEqual(response.status_code, 400) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py index 2c0d6e5b31..66e8ee29d1 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py @@ -7,6 +7,7 @@ from rest_framework.reverse import reverse from kobo.apps.openrosa.apps.logger.models.xform import XForm + from .test_abstract_viewset import TestAbstractViewSet diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py index 34dee502fb..fa8eb92cfd 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py @@ -3,18 +3,16 @@ import simplejson as json from django.contrib.auth.models import AnonymousUser from django.core.files.uploadedfile import InMemoryUploadedFile -from django_digest.test import DigestAuth -from kobo.apps.openrosa.libs.utils.guardian import assign_perm from rest_framework import status +from django_digest.test import DigestAuth from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import ( TestAbstractViewSet, ) from kobo.apps.openrosa.apps.api.viewsets.xform_submission_api import XFormSubmissionApi from kobo.apps.openrosa.apps.logger.models import Attachment -from kobo.apps.openrosa.libs.constants import ( - CAN_ADD_SUBMISSIONS -) +from kobo.apps.openrosa.libs.constants import CAN_ADD_SUBMISSIONS +from kobo.apps.openrosa.libs.utils.guardian import assign_perm from kobo.apps.openrosa.libs.utils.logger_tools import OpenRosaTemporarilyUnavailable @@ -22,8 +20,8 @@ class TestXFormSubmissionApi(TestAbstractViewSet): def setUp(self): super().setUp() self.view = XFormSubmissionApi.as_view({ - "head": "create", - "post": "create" + 'head': 'create', + 'post': 'create' }) self.publish_xls_form() @@ -139,7 +137,7 @@ def test_post_submission_uuid_other_user_username_not_provided(self): } self._create_user_profile(alice_data) s = self.surveys[0] - media_file = "1335783522563.jpg" + media_file = '1335783522563.jpg' path = os.path.join(self.main_directory, 'fixtures', 'transportation', 'instances', s, media_file) with open(path, 'rb') as f: @@ -230,7 +228,7 @@ def test_post_submission_authenticated_bad_json(self): def test_post_submission_require_auth(self): count = Attachment.objects.count() s = self.surveys[0] - media_file = "1335783522563.jpg" + media_file = '1335783522563.jpg' path = os.path.join(self.main_directory, 'fixtures', 'transportation', 'instances', s, media_file) with open(path, 'rb') as f: @@ -270,7 +268,7 @@ def test_post_submission_require_auth(self): def test_post_submission_require_auth_anonymous_user(self): count = Attachment.objects.count() s = self.surveys[0] - media_file = "1335783522563.jpg" + media_file = '1335783522563.jpg' path = os.path.join(self.main_directory, 'fixtures', 'transportation', 'instances', s, media_file) with open(path, 'rb') as f: @@ -300,7 +298,7 @@ def test_post_submission_require_auth_other_user(self): count = Attachment.objects.count() s = self.surveys[0] - media_file = "1335783522563.jpg" + media_file = '1335783522563.jpg' path = os.path.join(self.main_directory, 'fixtures', 'transportation', 'instances', s, media_file) with open(path, 'rb') as f: @@ -341,7 +339,7 @@ def test_post_submission_require_auth_data_entry_role(self): count = Attachment.objects.count() s = self.surveys[0] - media_file = "1335783522563.jpg" + media_file = '1335783522563.jpg' path = os.path.join(self.main_directory, 'fixtures', 'transportation', 'instances', s, media_file) with open(path, 'rb') as f: @@ -370,7 +368,7 @@ def test_post_submission_require_auth_data_entry_role(self): status_code=201) def test_post_submission_json_without_submission_key(self): - data = {"id": "transportation_2011_07_25"} + data = {'id': 'transportation_2011_07_25'} request = self.factory.post('/submission', data, format='json') response = self.view(request) self.assertEqual(response.status_code, 401) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py index 3a77be64da..8f5210063b 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py @@ -3,10 +3,9 @@ from xml.dom import Node import pytest +from defusedxml import minidom from django.conf import settings from django.urls import reverse -from defusedxml import minidom -from kobo.apps.openrosa.libs.utils.guardian import assign_perm from pyxform.errors import PyXFormError from rest_framework import status @@ -15,10 +14,9 @@ ) from kobo.apps.openrosa.apps.api.viewsets.xform_viewset import XFormViewSet from kobo.apps.openrosa.apps.logger.models import XForm -from kobo.apps.openrosa.libs.constants import ( - CAN_VIEW_XFORM -) +from kobo.apps.openrosa.libs.constants import CAN_VIEW_XFORM from kobo.apps.openrosa.libs.serializers.xform_serializer import XFormSerializer +from kobo.apps.openrosa.libs.utils.guardian import assign_perm class TestXFormViewSet(TestAbstractViewSet): @@ -149,11 +147,11 @@ def test_form_format(self): }) formid = self.xform.pk data = { - "name": "transportation_2011_07_25", # Since commit 3c0e17d0b6041ae96b06c3ef4d2f78a2d0739cbc - "title": "transportation_2011_07_25", - "default_language": "default", - "id_string": "transportation_2011_07_25", - "type": "survey", + 'name': 'transportation_2011_07_25', # Since commit 3c0e17d0b6041ae96b06c3ef4d2f78a2d0739cbc # flake8: noqa + 'title': 'transportation_2011_07_25', + 'default_language': 'default', + 'id_string': 'transportation_2011_07_25', + 'type': 'survey', } request = self.factory.get('/', **self.extra) # test for unsupported format @@ -172,25 +170,34 @@ def test_form_format(self): self.assertEqual(response.status_code, status.HTTP_200_OK) xml_path = os.path.join( - settings.OPENROSA_APP_DIR, "apps", "main", "tests", "fixtures", - "transportation", "transportation.xml") + settings.OPENROSA_APP_DIR, + 'apps', + 'main', + 'tests', + 'fixtures', + 'transportation', + 'transportation.xml', + ) with open(xml_path) as xml_file: expected_doc = minidom.parse(xml_file) model_node = [ - n for n in - response_doc.getElementsByTagName("h:head")[0].childNodes - if n.nodeType == Node.ELEMENT_NODE and - n.tagName == "model"][0] + n + for n in response_doc.getElementsByTagName('h:head')[0].childNodes + if n.nodeType == Node.ELEMENT_NODE and n.tagName == 'model' + ][0] # check for UUID and remove uuid_nodes = [ - node for node in model_node.childNodes + node + for node in model_node.childNodes if node.nodeType == Node.ELEMENT_NODE - and node.getAttribute("nodeset") == "/transportation_2011_07_25/formhub/uuid"] + and node.getAttribute('nodeset') + == '/transportation_2011_07_25/formhub/uuid' + ] self.assertEqual(len(uuid_nodes), 1) uuid_node = uuid_nodes[0] - uuid_node.setAttribute("calculate", "''") + uuid_node.setAttribute('calculate', "''") # check content without UUID self.assertEqual(response_doc.toxml(), expected_doc.toxml()) @@ -213,26 +220,26 @@ def test_form_tags(self): self.assertEqual(response.data, []) # add tag "hello" - request = self.factory.post('/', data={"tags": "hello"}, **self.extra) + request = self.factory.post('/', data={'tags': 'hello'}, **self.extra) response = view(request, pk=formid) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.data, ['hello']) # check filter by tag - request = self.factory.get('/', data={"tags": "hello"}, **self.extra) + request = self.factory.get('/', data={'tags': 'hello'}, **self.extra) self.form_data = XFormSerializer( self.xform, context={'request': request}).data response = list_view(request, pk=formid) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, [self.form_data]) - request = self.factory.get('/', data={"tags": "goodbye"}, **self.extra) + request = self.factory.get('/', data={'tags': 'goodbye'}, **self.extra) response = list_view(request, pk=formid) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, []) # remove tag "hello" - request = self.factory.delete('/', data={"tags": "hello"}, + request = self.factory.delete('/', data={'tags': 'hello'}, **self.extra) response = view(request, pk=formid, label='hello') self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/kobo/apps/openrosa/apps/api/tools.py b/kobo/apps/openrosa/apps/api/tools.py index 5996db6d74..2a2a049a81 100644 --- a/kobo/apps/openrosa/apps/api/tools.py +++ b/kobo/apps/openrosa/apps/api/tools.py @@ -11,7 +11,7 @@ HttpResponseNotFound, HttpResponseRedirect, ) -from django.urls import resolve, Resolver404 +from django.urls import Resolver404, resolve from django.utils.translation import gettext as t from rest_framework import exceptions from rest_framework.request import Request @@ -53,7 +53,7 @@ def _get_id_for_type(record, mongo_field): date_field = datetime_from_str(record[mongo_field]) mongo_str = '$' + mongo_field - return {"$substr": [mongo_str, 0, 10]} if isinstance(date_field, datetime)\ + return {'$substr': [mongo_str, 0, 10]} if isinstance(date_field, datetime)\ else mongo_str @@ -69,8 +69,8 @@ def publish_xlsform(request, user, existing_xform=None): ) ): raise exceptions.PermissionDenied( - detail=t("User %(user)s has no permission to add xforms to " - "account %(account)s" % {'user': request.user.username, + detail=t('User %(user)s has no permission to add xforms to ' + 'account %(account)s' % {'user': request.user.username, 'account': user.username})) if ( existing_xform @@ -78,8 +78,8 @@ def publish_xlsform(request, user, existing_xform=None): and not request.user.has_perm('change_xform', existing_xform) ): raise exceptions.PermissionDenied( - detail=t("User %(user)s has no permission to change this " - "form." % {'user': request.user.username, }) + detail=t('User %(user)s has no permission to change this ' + 'form.' % {'user': request.user.username, }) ) def set_form(): @@ -103,7 +103,7 @@ def get_xform(formid, request, username=None): if not xform: raise exceptions.PermissionDenied(t( - "You do not have permission to view data from this form.")) + 'You do not have permission to view data from this form.')) return xform diff --git a/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py index 7dd379e958..7ba1c7f12b 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py @@ -6,9 +6,10 @@ from kobo.apps.openrosa.apps.main.models.user_profile import UserProfile from kobo.apps.openrosa.libs.mixins.object_lookup_mixin import ObjectLookupMixin from kobo.apps.openrosa.libs.serializers.user_profile_serializer import ( - UserProfileWithTokenSerializer + UserProfileWithTokenSerializer, ) from kpi.utils.object_permission import get_database_user + from ..utils.rest_framework.viewsets import OpenRosaGenericViewSet diff --git a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py index 505a6046c1..7ce1d73f9d 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py @@ -6,25 +6,24 @@ from django.utils.translation import gettext as t from rest_framework import status from rest_framework.decorators import action -from rest_framework.response import Response from rest_framework.exceptions import ParseError +from rest_framework.response import Response from rest_framework.serializers import ValidationError from rest_framework.settings import api_settings from kobo.apps.openrosa.apps.api.exceptions import ( NoConfirmationProvidedAPIException, ) -from kobo.apps.openrosa.apps.api.viewsets.xform_viewset import ( - custom_response_handler, +from kobo.apps.openrosa.apps.api.permissions import ( + EnketoSubmissionEditPermissions, + EnketoSubmissionViewPermissions, + XFormDataPermissions, ) from kobo.apps.openrosa.apps.api.tools import ( add_tags_to_instance, ) -from kobo.apps.openrosa.apps.logger.utils.instance import ( - add_validation_status_to_instance, - delete_instances, - remove_validation_status_from_instance, - set_instance_validation_statuses, +from kobo.apps.openrosa.apps.api.viewsets.xform_viewset import ( + custom_response_handler, ) from kobo.apps.openrosa.apps.logger.exceptions import ( BuildDbQueriesAttributeError, @@ -32,30 +31,32 @@ BuildDbQueriesNoConfirmationProvidedError, MissingValidationStatusPayloadError, ) -from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.apps.logger.models.instance import ( Instance, ) -from kobo.apps.openrosa.libs.renderers import renderers +from kobo.apps.openrosa.apps.logger.models.xform import XForm +from kobo.apps.openrosa.apps.logger.utils.instance import ( + add_validation_status_to_instance, + delete_instances, + remove_validation_status_from_instance, + set_instance_validation_statuses, +) +from kobo.apps.openrosa.libs import filters from kobo.apps.openrosa.libs.mixins.anonymous_user_public_forms_mixin import ( AnonymousUserPublicFormsMixin, ) -from kobo.apps.openrosa.apps.api.permissions import ( - EnketoSubmissionEditPermissions, - EnketoSubmissionViewPermissions, - XFormDataPermissions, -) +from kobo.apps.openrosa.libs.renderers import renderers from kobo.apps.openrosa.libs.serializers.data_serializer import ( - DataSerializer, - DataListSerializer, DataInstanceSerializer, + DataListSerializer, + DataSerializer, ) -from kobo.apps.openrosa.libs import filters from kobo.apps.openrosa.libs.utils.viewer_tools import ( EnketoError, get_enketo_submission_url, ) from kpi.utils.object_permission import get_database_user + from ..utils.rest_framework.viewsets import OpenRosaModelViewSet SAFE_METHODS = ['GET', 'HEAD', 'OPTIONS'] @@ -492,11 +493,11 @@ def get_object(self) -> Union[XForm, Instance]: try: int(pk) except ValueError: - raise ParseError(t("Invalid pk `%(pk)s`" % {'pk': pk})) + raise ParseError(t('Invalid pk `%(pk)s`' % {'pk': pk})) try: int(dataid) except ValueError: - raise ParseError(t("Invalid dataid `%(dataid)s`" + raise ParseError(t('Invalid dataid `%(dataid)s`' % {'dataid': dataid})) return get_object_or_404(Instance, pk=dataid, xform__pk=pk) @@ -512,7 +513,7 @@ def _filtered_or_shared_qs(self, qs, pk): filter_kwargs['shared_data'] = True qs = XForm.objects.filter(**filter_kwargs) if not qs: - raise Http404(t("No data matches with given query.")) + raise Http404(t('No data matches with given query.')) return qs @@ -529,13 +530,13 @@ def filter_queryset(self, queryset, view=None): try: int(pk) except ValueError: - raise ParseError(t("Invalid pk %(pk)s" % {'pk': pk})) + raise ParseError(t('Invalid pk %(pk)s' % {'pk': pk})) else: qs = self._filtered_or_shared_qs(qs, pk) return qs - @action(detail=True, methods=["GET", "PATCH", "DELETE"]) + @action(detail=True, methods=['GET', 'PATCH', 'DELETE']) def validation_status(self, request, *args, **kwargs): """ View or modify validation status of specific instance. @@ -667,7 +668,7 @@ def _enketo_request(self, request, action_, *args, **kwargs): def retrieve(self, request, *args, **kwargs): # XML rendering does not a serializer - if request.accepted_renderer.format == "xml": + if request.accepted_renderer.format == 'xml': instance = self.get_object() return Response(instance.xml) else: @@ -693,7 +694,7 @@ def list(self, request, *args, **kwargs): return Response(serializer.data) xform = self.get_object() - query = request.GET.get("query", {}) + query = request.GET.get('query', {}) export_type = kwargs.get('format') if export_type is None or export_type in ['json']: # perform default viewset retrieve, no data export diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py b/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py index 5f90eb1735..a2f57c0d06 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py @@ -1,4 +1,5 @@ from datetime import datetime + try: from zoneinfo import ZoneInfo except ImportError: @@ -9,20 +10,25 @@ from django.shortcuts import get_object_or_404 from django.utils import timezone from rest_framework import permissions, status +from rest_framework.decorators import action from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.decorators import action from kobo.apps.openrosa.apps.api.tools import get_media_file_response from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.apps.main.models.meta_data import MetaData from kobo.apps.openrosa.libs import filters -from kobo.apps.openrosa.libs.renderers.renderers import MediaFileContentNegotiation -from kobo.apps.openrosa.libs.renderers.renderers import XFormListRenderer -from kobo.apps.openrosa.libs.renderers.renderers import XFormManifestRenderer -from kobo.apps.openrosa.libs.serializers.xform_serializer import XFormListSerializer -from kobo.apps.openrosa.libs.serializers.xform_serializer import XFormManifestSerializer +from kobo.apps.openrosa.libs.renderers.renderers import ( + MediaFileContentNegotiation, + XFormListRenderer, + XFormManifestRenderer, +) +from kobo.apps.openrosa.libs.serializers.xform_serializer import ( + XFormListSerializer, + XFormManifestSerializer, +) from kpi.authentication import DigestAuthentication + from ..utils.rest_framework.viewsets import OpenRosaReadOnlyModelViewSet diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py b/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py index 5ca73aaa58..aed4a60467 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py @@ -1,19 +1,18 @@ # coding: utf-8 -import re import io +import re from django.shortcuts import get_object_or_404 from django.utils.translation import gettext as t -from rest_framework import permissions -from rest_framework import status -from rest_framework import mixins +from rest_framework import mixins, permissions, status from rest_framework.authentication import ( BasicAuthentication, + SessionAuthentication, TokenAuthentication, - SessionAuthentication,) +) from rest_framework.exceptions import NotAuthenticated -from rest_framework.response import Response from rest_framework.renderers import BrowsableAPIRenderer, JSONRenderer +from rest_framework.response import Response from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.logger.models import Instance @@ -22,13 +21,14 @@ from kobo.apps.openrosa.libs.renderers.renderers import TemplateXMLRenderer from kobo.apps.openrosa.libs.serializers.data_serializer import SubmissionSerializer from kobo.apps.openrosa.libs.utils.logger_tools import ( + UnauthenticatedEditAttempt, dict2xform, safe_create_instance, - UnauthenticatedEditAttempt, ) from kobo.apps.openrosa.libs.utils.string import dict_lists2strings from kpi.authentication import DigestAuthentication from kpi.utils.object_permission import get_database_user + from ..utils.rest_framework.viewsets import OpenRosaGenericViewSet xml_error_re = re.compile('>(.*)<') @@ -53,7 +53,7 @@ def create_instance_from_json(username, request): if submission is None: # return an error - return [t("No submission key provided."), None] + return [t('No submission key provided.'), None] # convert lists in submission dict to joined strings submission_joined = dict_lists2strings(submission) @@ -196,7 +196,7 @@ def create(self, request, *args, **kwargs): def error_response(self, error, is_json_request, request): if not error: - error_msg = t("Unable to create submission.") + error_msg = t('Unable to create submission.') status_code = status.HTTP_400_BAD_REQUEST elif isinstance(error, str): error_msg = error diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py index 3cdd4b8827..4f785abae6 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py @@ -6,8 +6,7 @@ from django.http import Http404, HttpResponseBadRequest from django.shortcuts import get_object_or_404 from django.utils.translation import gettext as t -from rest_framework import exceptions -from rest_framework import status +from rest_framework import exceptions, status from rest_framework.decorators import action from rest_framework.response import Response from rest_framework.settings import api_settings @@ -20,7 +19,7 @@ from kobo.apps.openrosa.libs import filters from kobo.apps.openrosa.libs.exceptions import NoRecordsFoundError from kobo.apps.openrosa.libs.mixins.anonymous_user_public_forms_mixin import ( - AnonymousUserPublicFormsMixin + AnonymousUserPublicFormsMixin, ) from kobo.apps.openrosa.libs.mixins.labels_mixin import LabelsMixin from kobo.apps.openrosa.libs.renderers import renderers @@ -28,9 +27,9 @@ from kobo.apps.openrosa.libs.utils.common_tags import SUBMISSION_TIME from kobo.apps.openrosa.libs.utils.export_tools import ( generate_export, + newset_export_for, should_create_new_export, ) -from kobo.apps.openrosa.libs.utils.export_tools import newset_export_for from kobo.apps.openrosa.libs.utils.logger_tools import response_with_mimetype_and_name from kobo.apps.openrosa.libs.utils.storage import rmdir from kobo.apps.openrosa.libs.utils.string import str2bool @@ -39,6 +38,7 @@ default_kobocat_storage as default_storage, ) from kpi.utils.object_permission import get_database_user + from ..utils.rest_framework.viewsets import OpenRosaModelViewSet EXPORT_EXT = { @@ -89,7 +89,7 @@ def _set_start_end_params(request, query): ) except ValueError: raise exceptions.ParseError( - t("Dates must be in the format YY_MM_DD_hh_mm_ss") + t('Dates must be in the format YY_MM_DD_hh_mm_ss') ) else: query = json.dumps(query) @@ -107,7 +107,7 @@ def _generate_new_export(request, xform, query, export_type): xform.id_string, None, query ) except NoRecordsFoundError: - raise Http404(t("No records found to export")) + raise Http404(t('No records found to export')) else: return export @@ -126,7 +126,7 @@ def _get_owner(request): if owner is None: raise ValidationError( - "User with username %(owner)s does not exist." + 'User with username %(owner)s does not exist.' ) return owner @@ -140,7 +140,7 @@ def response_for_format(form, format=None): if file_path != '' and default_storage.exists(file_path): formatted_data = form.xls else: - raise Http404(t("No XLSForm found.")) + raise Http404(t('No XLSForm found.')) else: formatted_data = json.loads(form.json) return Response(formatted_data) @@ -606,7 +606,7 @@ def form(self, request, format='json', **kwargs): content_type='application/json', status=400) - filename = form.id_string + "." + format + filename = form.id_string + '.' + format response = response_for_format(form, format=format) response['Content-Disposition'] = 'attachment; filename=' + filename diff --git a/kobo/apps/openrosa/apps/logger/app.py b/kobo/apps/openrosa/apps/logger/app.py index e189993c2c..23161116f1 100644 --- a/kobo/apps/openrosa/apps/logger/app.py +++ b/kobo/apps/openrosa/apps/logger/app.py @@ -8,5 +8,4 @@ class LoggerAppConfig(AppConfig): def ready(self): # Makes sure all signal handlers are connected - from . import signals super().ready() diff --git a/kobo/apps/openrosa/apps/logger/migrations/0030_backfill_lost_monthly_counters.py b/kobo/apps/openrosa/apps/logger/migrations/0030_backfill_lost_monthly_counters.py index ad495779f2..f72c4c584e 100644 --- a/kobo/apps/openrosa/apps/logger/migrations/0030_backfill_lost_monthly_counters.py +++ b/kobo/apps/openrosa/apps/logger/migrations/0030_backfill_lost_monthly_counters.py @@ -1,9 +1,7 @@ from django.db import migrations from django.db.migrations.recorder import MigrationRecorder -from django.db.models import Sum -from django.db.models import Value, F, DateField -from django.db.models.functions import Cast, Concat -from django.db.models.functions import ExtractYear, ExtractMonth +from django.db.models import DateField, F, Sum, Value +from django.db.models.functions import Cast, Concat, ExtractMonth, ExtractYear from django.utils import timezone from kobo.apps.openrosa.apps.logger.utils.counters import ( diff --git a/kobo/apps/openrosa/apps/logger/migrations/0036_alter_instance_date_created_and_more.py b/kobo/apps/openrosa/apps/logger/migrations/0036_alter_instance_date_created_and_more.py index 032802b94a..357fe42d07 100644 --- a/kobo/apps/openrosa/apps/logger/migrations/0036_alter_instance_date_created_and_more.py +++ b/kobo/apps/openrosa/apps/logger/migrations/0036_alter_instance_date_created_and_more.py @@ -2,9 +2,7 @@ from django.conf import settings from django.db import migrations, models -import django.db.models.deletion -import kobo.apps.openrosa.apps.logger.models.attachment -import kobo.apps.openrosa.apps.logger.models.xform + import kpi.deployment_backends.kc_access.storage import kpi.models.abstract_models diff --git a/kobo/apps/openrosa/apps/logger/models/__init__.py b/kobo/apps/openrosa/apps/logger/models/__init__.py index 6defed3784..e3b90ea8d9 100644 --- a/kobo/apps/openrosa/apps/logger/models/__init__.py +++ b/kobo/apps/openrosa/apps/logger/models/__init__.py @@ -1,12 +1,12 @@ -# coding: utf-8 -from kobo.apps.openrosa.apps.logger.models.attachment import Attachment # flake8: noqa -from kobo.apps.openrosa.apps.logger.models.instance import Instance -from kobo.apps.openrosa.apps.logger.models.survey_type import SurveyType -from kobo.apps.openrosa.apps.logger.models.xform import XForm -from kobo.apps.openrosa.apps.logger.models.note import Note +# flake8: noqa: F401 +from kobo.apps.openrosa.apps.logger.models.attachment import Attachment from kobo.apps.openrosa.apps.logger.models.daily_xform_submission_counter import ( DailyXFormSubmissionCounter, ) +from kobo.apps.openrosa.apps.logger.models.instance import Instance from kobo.apps.openrosa.apps.logger.models.monthly_xform_submission_counter import ( MonthlyXFormSubmissionCounter, ) +from kobo.apps.openrosa.apps.logger.models.note import Note +from kobo.apps.openrosa.apps.logger.models.survey_type import SurveyType +from kobo.apps.openrosa.apps.logger.models.xform import XForm diff --git a/kobo/apps/openrosa/apps/logger/models/attachment.py b/kobo/apps/openrosa/apps/logger/models/attachment.py index 584be6edad..80c38f2c24 100644 --- a/kobo/apps/openrosa/apps/logger/models/attachment.py +++ b/kobo/apps/openrosa/apps/logger/models/attachment.py @@ -8,17 +8,20 @@ from django.db import models from django.utils.http import urlencode +from kobo.apps.openrosa.libs.utils.hash import get_hash from kobo.apps.openrosa.libs.utils.image_tools import ( get_optimized_image_path, resize, ) -from kobo.apps.openrosa.libs.utils.hash import get_hash from kpi.deployment_backends.kc_access.storage import ( - default_kobocat_storage as default_storage, KobocatFileSystemStorage, ) +from kpi.deployment_backends.kc_access.storage import ( + default_kobocat_storage as default_storage, +) from kpi.fields.file import ExtendedFileField from kpi.mixins.audio_transcoding import AudioTranscodingMixin + from .instance import Instance diff --git a/kobo/apps/openrosa/apps/logger/models/instance.py b/kobo/apps/openrosa/apps/logger/models/instance.py index d0c564a8b3..640507046c 100644 --- a/kobo/apps/openrosa/apps/logger/models/instance.py +++ b/kobo/apps/openrosa/apps/logger/models/instance.py @@ -35,10 +35,10 @@ MONGO_STRFTIME, NOTES, SUBMISSION_TIME, + SUBMITTED_BY, TAGS, UUID, XFORM_ID_STRING, - SUBMITTED_BY ) from kobo.apps.openrosa.libs.utils.model_tools import set_uuid from kpi.models.abstract_models import AbstractTimeStampedModel @@ -175,7 +175,7 @@ def _set_json(self): self.json = doc def _set_parser(self): - if not hasattr(self, "_parser"): + if not hasattr(self, '_parser'): self._parser = XFormInstanceParser( self.xml, self.xform.data_dictionary()) @@ -318,7 +318,7 @@ def point(self): return gc[0] def save(self, *args, **kwargs): - force = kwargs.pop("force", False) + force = kwargs.pop('force', False) self.check_active(force) diff --git a/kobo/apps/openrosa/apps/logger/models/xform.py b/kobo/apps/openrosa/apps/logger/models/xform.py index 8105064129..f18e976f03 100644 --- a/kobo/apps/openrosa/apps/logger/models/xform.py +++ b/kobo/apps/openrosa/apps/logger/models/xform.py @@ -9,8 +9,8 @@ from django.apps import apps from django.conf import settings from django.core.exceptions import ObjectDoesNotExist -from django.urls import reverse from django.db import models +from django.urls import reverse from django.utils.encoding import smart_str from django.utils.translation import gettext_lazy as t from taggit.managers import TaggableManager @@ -20,9 +20,9 @@ from kobo.apps.openrosa.koboform.pyxform_utils import convert_csv_to_xls from kobo.apps.openrosa.libs.constants import ( CAN_ADD_SUBMISSIONS, - CAN_VALIDATE_XFORM, CAN_DELETE_DATA_XFORM, CAN_TRANSFER_OWNERSHIP, + CAN_VALIDATE_XFORM, ) from kobo.apps.openrosa.libs.utils.hash import get_hash from kpi.deployment_backends.kc_access.storage import ( @@ -33,7 +33,7 @@ from kpi.utils.xml import XMLFormWithDisclaimer XFORM_TITLE_LENGTH = 255 -title_pattern = re.compile(r"([^<]+)") +title_pattern = re.compile(r'([^<]+)') def upload_to(instance, filename): @@ -75,7 +75,7 @@ class XForm(AbstractTimeStampedModel): id_string = models.SlugField( editable=False, - verbose_name=t("ID"), + verbose_name=t('ID'), max_length=MAX_ID_LENGTH ) title = models.CharField(editable=False, max_length=XFORM_TITLE_LENGTH) @@ -100,10 +100,10 @@ class XForm(AbstractTimeStampedModel): class Meta: app_label = 'logger' - unique_together = (("user", "id_string"),) - verbose_name = t("XForm") - verbose_name_plural = t("XForms") - ordering = ("id_string",) + unique_together = (('user', 'id_string'),) + verbose_name = t('XForm') + verbose_name_plural = t('XForms') + ordering = ('id_string',) permissions = ( (CAN_ADD_SUBMISSIONS, t('Can make submissions to the form')), (CAN_TRANSFER_OWNERSHIP, t('Can transfer form ownership.')), @@ -179,7 +179,7 @@ def has_instances_with_geopoints(self): def _set_id_string(self): matches = self.instance_id_regex.findall(self.xml) if len(matches) != 1: - raise XLSFormError(t("There should be a single id string.")) + raise XLSFormError(t('There should be a single id string.')) self.id_string = matches[0] def _set_title(self): @@ -189,13 +189,13 @@ def _set_title(self): title_xml = matches[0][:XFORM_TITLE_LENGTH] if len(matches) != 1: - raise XLSFormError(t("There should be a single title."), matches) + raise XLSFormError(t('There should be a single title.'), matches) if self.title and title_xml != self.title: title_xml = self.title[:XFORM_TITLE_LENGTH] title_xml = xml_escape(title_xml) self.xml = title_pattern.sub( - "%s" % title_xml, self.xml) + '%s' % title_xml, self.xml) self.title = title_xml @@ -229,14 +229,14 @@ def save(self, *args, **kwargs): {'new_id': self.id_string, 'old_id': old_id_string})) if getattr(settings, 'STRICT', True) and \ - not re.search(r"^[\w-]+$", self.id_string): + not re.search(r'^[\w-]+$', self.id_string): raise XLSFormError(t('In strict mode, the XForm ID must be a ' 'valid slug and contain no spaces.')) super().save(*args, **kwargs) def __str__(self): - return getattr(self, "id_string", "") + return getattr(self, 'id_string', '') def submission_count(self, force_update=False): if self.num_of_submissions == 0 or force_update: @@ -244,7 +244,7 @@ def submission_count(self, force_update=False): self.num_of_submissions = count self.save(update_fields=['num_of_submissions']) return self.num_of_submissions - submission_count.short_description = t("Submission Count") + submission_count.short_description = t('Submission Count') def geocoded_submission_count(self): """Number of geocoded submissions.""" @@ -253,7 +253,7 @@ def geocoded_submission_count(self): def time_of_last_submission(self): if self.last_submission_time is None and self.num_of_submissions > 0: try: - last_submission = self.instances.latest("date_created") + last_submission = self.instances.latest('date_created') except ObjectDoesNotExist: pass else: @@ -265,7 +265,7 @@ def time_of_last_submission_update(self): try: # We don't need to filter on `deleted_at` field anymore. # Instances are really deleted and not flagged as deleted. - return self.instances.latest("date_modified").date_modified + return self.instances.latest('date_modified').date_modified except ObjectDoesNotExist: pass diff --git a/kobo/apps/openrosa/apps/logger/signals.py b/kobo/apps/openrosa/apps/logger/signals.py index 4e72c598a6..64c2884e68 100644 --- a/kobo/apps/openrosa/apps/logger/signals.py +++ b/kobo/apps/openrosa/apps/logger/signals.py @@ -6,28 +6,25 @@ from django.db import transaction from django.db.models import Case, F, When from django.db.models.signals import ( - post_save, post_delete, + post_save, pre_delete, ) from django.dispatch import receiver from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.logger.models.attachment import Attachment -from kobo.apps.openrosa.apps.logger.models.instance import Instance -from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.apps.logger.models.daily_xform_submission_counter import ( DailyXFormSubmissionCounter, ) +from kobo.apps.openrosa.apps.logger.models.instance import Instance from kobo.apps.openrosa.apps.logger.models.monthly_xform_submission_counter import ( MonthlyXFormSubmissionCounter, ) +from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.apps.main.models.user_profile import UserProfile +from kobo.apps.openrosa.libs.utils.guardian import assign_perm, get_perms_for_model from kobo.apps.openrosa.libs.utils.image_tools import get_optimized_image_path -from kobo.apps.openrosa.libs.utils.guardian import ( - assign_perm, - get_perms_for_model -) from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) diff --git a/kobo/apps/openrosa/apps/logger/tests/models/test_attachment.py b/kobo/apps/openrosa/apps/logger/tests/models/test_attachment.py index 3ae291cc20..cf3736af8a 100644 --- a/kobo/apps/openrosa/apps/logger/tests/models/test_attachment.py +++ b/kobo/apps/openrosa/apps/logger/tests/models/test_attachment.py @@ -5,8 +5,8 @@ from django.core.files.base import File from django.core.management import call_command -from kobo.apps.openrosa.apps.main.tests.test_base import TestBase from kobo.apps.openrosa.apps.logger.models import Attachment, Instance +from kobo.apps.openrosa.apps.main.tests.test_base import TestBase from kobo.apps.openrosa.libs.utils.image_tools import image_url from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, @@ -18,7 +18,7 @@ class TestAttachment(TestBase): def setUp(self): super().setUp() self._publish_transportation_form_and_submit_instance() - self.media_file = "1335783522563.jpg" + self.media_file = '1335783522563.jpg' media_file = os.path.join( self.this_directory, 'fixtures', @@ -53,7 +53,7 @@ def test_thumbnails(self): self.assertFalse(default_storage.exists(thumbnail)) def test_create_thumbnails_command(self): - call_command("create_image_thumbnails") + call_command('create_image_thumbnails') created_times = {} for attachment in Attachment.objects.filter(instance=self.instance): filename = attachment.media_file.name.replace('.jpg', '') @@ -64,7 +64,7 @@ def test_create_thumbnails_command(self): thumbnail ) # replace or regenerate thumbnails if they exist - call_command("create_image_thumbnails", force=True) + call_command('create_image_thumbnails', force=True) for attachment in Attachment.objects.filter(instance=self.instance): filename = attachment.media_file.name.replace('.jpg', '') for size in settings.THUMB_CONF.keys(): diff --git a/kobo/apps/openrosa/apps/logger/tests/test_parsing.py b/kobo/apps/openrosa/apps/logger/tests/test_parsing.py index a80c001373..f322f9bcc4 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_parsing.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_parsing.py @@ -4,18 +4,21 @@ from defusedxml import minidom +from kobo.apps.openrosa.apps.logger.xform_instance_parser import ( + XFormInstanceParser, + _xml_node_to_dict, + clean_and_parse_xml, + get_deprecated_uuid_from_xml, + get_meta_from_xml, + get_uuid_from_xml, + xpath_from_xml_node, +) from kobo.apps.openrosa.apps.main.tests.test_base import TestBase -from kobo.apps.openrosa.apps.logger.xform_instance_parser import XFormInstanceParser,\ - xpath_from_xml_node -from kobo.apps.openrosa.apps.logger.xform_instance_parser import get_uuid_from_xml,\ - get_meta_from_xml, get_deprecated_uuid_from_xml,\ - _xml_node_to_dict, clean_and_parse_xml from kobo.apps.openrosa.libs.utils.common_tags import XFORM_ID_STRING - -XML = "xml" -DICT = "dict" -FLAT_DICT = "flat_dict" +XML = 'xml' +DICT = 'dict' +FLAT_DICT = 'flat_dict' ID = XFORM_ID_STRING @@ -25,15 +28,15 @@ def _publish_and_submit_new_repeats(self): # publish our form which contains some some repeats xls_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), - "../fixtures/new_repeats/new_repeats.xls" + '../fixtures/new_repeats/new_repeats.xls' ) self._publish_xls_file_and_set_xform(xls_file_path) # submit an instance xml_submission_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), - "../fixtures/new_repeats/instances/" - "new_repeats_2012-07-05-14-33-53.xml" + '../fixtures/new_repeats/instances/' + 'new_repeats_2012-07-05-14-33-53.xml' ) self._make_submission(xml_submission_file_path) @@ -88,7 +91,7 @@ def test_parse_xform_nested_repeats(self): self.assertEqual(flat_dict, expected_flat_dict) def test_xpath_from_xml_node(self): - xml_str = '' \ 'c911d71ce1ac48478e5f8bac99addc4e' \ '-1.2625149 36.7924478 0.0 30.0' \ @@ -96,7 +99,7 @@ def test_xpath_from_xml_node(self): '-1.2625072 36.7924328 0.0 30.0' \ 'What' clean_xml_str = xml_str.strip() - clean_xml_str = re.sub(r">\s+<", "><", clean_xml_str) + clean_xml_str = re.sub(r'>\s+<', '><', clean_xml_str) root_node = minidom.parseString(clean_xml_str).documentElement # get the first top-level gps element gps_node = root_node.firstChild.nextSibling @@ -110,71 +113,71 @@ def test_xpath_from_xml_node(self): def test_get_meta_from_xml(self): with open( os.path.join( - os.path.dirname(__file__), "..", "fixtures", "tutorial", - "instances", "tutorial_2012-06-27_11-27-53_w_uuid_edited.xml"), - "r") as xml_file: + os.path.dirname(__file__), '..', 'fixtures', 'tutorial', + 'instances', 'tutorial_2012-06-27_11-27-53_w_uuid_edited.xml'), + 'r') as xml_file: xml_str = xml_file.read() - instanceID = get_meta_from_xml(xml_str, "instanceID") + instanceID = get_meta_from_xml(xml_str, 'instanceID') self.assertEqual(instanceID, - "uuid:2d8c59eb-94e9-485d-a679-b28ffe2e9b98") - deprecatedID = get_meta_from_xml(xml_str, "deprecatedID") - self.assertEqual(deprecatedID, "uuid:729f173c688e482486a48661700455ff") + 'uuid:2d8c59eb-94e9-485d-a679-b28ffe2e9b98') + deprecatedID = get_meta_from_xml(xml_str, 'deprecatedID') + self.assertEqual(deprecatedID, 'uuid:729f173c688e482486a48661700455ff') def test_get_meta_from_xml_without_uuid_returns_none(self): with open( os.path.join( - os.path.dirname(__file__), "..", "fixtures", "tutorial", - "instances", "tutorial_2012-06-27_11-27-53.xml"), - "r") as xml_file: + os.path.dirname(__file__), '..', 'fixtures', 'tutorial', + 'instances', 'tutorial_2012-06-27_11-27-53.xml'), + 'r') as xml_file: xml_str = xml_file.read() - instanceID = get_meta_from_xml(xml_str, "instanceID") + instanceID = get_meta_from_xml(xml_str, 'instanceID') self.assertIsNone(instanceID) def test_get_uuid_from_xml(self): with open( os.path.join( - os.path.dirname(__file__), "..", "fixtures", "tutorial", - "instances", "tutorial_2012-06-27_11-27-53_w_uuid.xml"), - "r") as xml_file: + os.path.dirname(__file__), '..', 'fixtures', 'tutorial', + 'instances', 'tutorial_2012-06-27_11-27-53_w_uuid.xml'), + 'r') as xml_file: xml_str = xml_file.read() instanceID = get_uuid_from_xml(xml_str) - self.assertEqual(instanceID, "729f173c688e482486a48661700455ff") + self.assertEqual(instanceID, '729f173c688e482486a48661700455ff') def test_get_deprecated_uuid_from_xml(self): with open( os.path.join( - os.path.dirname(__file__), "..", "fixtures", "tutorial", - "instances", "tutorial_2012-06-27_11-27-53_w_uuid_edited.xml"), - "r") as xml_file: + os.path.dirname(__file__), '..', 'fixtures', 'tutorial', + 'instances', 'tutorial_2012-06-27_11-27-53_w_uuid_edited.xml'), + 'r') as xml_file: xml_str = xml_file.read() deprecatedID = get_deprecated_uuid_from_xml(xml_str) - self.assertEqual(deprecatedID, "729f173c688e482486a48661700455ff") + self.assertEqual(deprecatedID, '729f173c688e482486a48661700455ff') def test_parse_xform_nested_repeats_multiple_nodes(self): self._create_user_and_login() # publish our form which contains some some repeats xls_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), - "../fixtures/new_repeats/new_repeats.xls" + '../fixtures/new_repeats/new_repeats.xls' ) self._publish_xls_file_and_set_xform(xls_file_path) # submit an instance xml_submission_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), - "../fixtures/new_repeats/instances/" - "multiple_nodes_error.xml" + '../fixtures/new_repeats/instances/' + 'multiple_nodes_error.xml' ) self._make_submission(xml_submission_file_path) def test_xml_repeated_group_to_dict(self): xml_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), - "../fixtures/repeated_group/repeated_group.xml" + '../fixtures/repeated_group/repeated_group.xml' ) json_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), - "../fixtures/repeated_group/repeated_group.json" + '../fixtures/repeated_group/repeated_group.json' ) with open(xml_file) as file: dict_ = _xml_node_to_dict(clean_and_parse_xml(file.read())) diff --git a/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py b/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py index 30647fdef6..82805834f4 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py @@ -9,8 +9,8 @@ from django.core.management.base import CommandError from pyxform.errors import PyXFormError -from kobo.apps.openrosa.apps.main.tests.test_base import TestBase from kobo.apps.openrosa.apps.logger.models.xform import XForm +from kobo.apps.openrosa.apps.main.tests.test_base import TestBase from kobo.apps.openrosa.libs.utils.logger_tools import report_exception @@ -18,8 +18,8 @@ class TestPublishXLS(TestBase): def test_publish_xls(self): xls_file_path = os.path.join( - self.this_directory, "fixtures", - "transportation", "transportation.xls") + self.this_directory, 'fixtures', + 'transportation', 'transportation.xls') count = XForm.objects.count() call_command('publish_xls', xls_file_path, self.user.username) self.assertEqual(XForm.objects.count(), count + 1) @@ -29,14 +29,14 @@ def test_publish_xls(self): def test_publish_xls_replacement(self): count = XForm.objects.count() xls_file_path = os.path.join( - self.this_directory, "fixtures", - "transportation", "transportation.xls") + self.this_directory, 'fixtures', + 'transportation', 'transportation.xls') call_command('publish_xls', xls_file_path, self.user.username) self.assertEqual(XForm.objects.count(), count + 1) count = XForm.objects.count() xls_file_path = os.path.join( - self.this_directory, "fixtures", - "transportation", "transportation_updated.xls") + self.this_directory, 'fixtures', + 'transportation', 'transportation_updated.xls') # call command without replace param with self.assertRaises(CommandError): call_command('publish_xls', xls_file_path, self.user.username) @@ -55,11 +55,11 @@ def test_publish_xls_replacement(self): @unittest.skip('Fails under Django 1.6') def test_line_break_in_variables(self): xls_file_path = os.path.join( - self.this_directory, "fixtures", 'exp_line_break.xlsx') + self.this_directory, 'fixtures', 'exp_line_break.xlsx') xml_file_path = os.path.join( - self.this_directory, "fixtures", 'exp_line_break.xml') + self.this_directory, 'fixtures', 'exp_line_break.xml') test_xml_file_path = os.path.join( - self.this_directory, "fixtures", 'test_exp_line_break.xml') + self.this_directory, 'fixtures', 'test_exp_line_break.xml') self._publish_xls_file(xls_file_path) xforms = XForm.objects.filter(id_string='exp_line_break') self.assertTrue(xforms.count() > 0) @@ -77,23 +77,23 @@ def test_line_break_in_variables(self): os.remove(test_xml_file_path) def test_report_exception_with_exc_info(self): - e = Exception("A test exception") + e = Exception('A test exception') try: raise e except Exception as e: exc_info = sys.exc_info() try: - report_exception(subject="Test report exception", info=e, + report_exception(subject='Test report exception', info=e, exc_info=exc_info) except Exception as e: - raise AssertionError("%s" % e) + raise AssertionError('%s' % e) def test_report_exception_without_exc_info(self): - e = Exception("A test exception") + e = Exception('A test exception') try: - report_exception(subject="Test report exception", info=e) + report_exception(subject='Test report exception', info=e) except Exception as e: - raise AssertionError("%s" % e) + raise AssertionError('%s' % e) def test_publish_invalid_xls_form(self): path = os.path.join( diff --git a/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py b/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py index a400b9c7ee..c9a54958e9 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py @@ -1,5 +1,5 @@ # coding: utf-8 -from django.test import TestCase, RequestFactory +from django.test import RequestFactory, TestCase from pyxform import SurveyElementBuilder from kobo.apps.kobo_auth.shortcuts import User @@ -7,7 +7,8 @@ from kobo.apps.openrosa.apps.main.models.user_profile import UserProfile from kobo.apps.openrosa.apps.viewer.models.data_dictionary import DataDictionary from kobo.apps.openrosa.libs.utils.logger_tools import ( - create_instance, safe_create_instance + create_instance, + safe_create_instance, ) @@ -50,8 +51,8 @@ def _submit_simple_yes(self): def setUp(self): self.user = User.objects.create( - username="admin", email="sample@example.com") - self.user.set_password("pass") + username='admin', email='sample@example.com') + self.user.set_password('pass') UserProfile.objects.get_or_create(user=self.user) self.xform1 = DataDictionary() diff --git a/kobo/apps/openrosa/apps/logger/utils/__init__.py b/kobo/apps/openrosa/apps/logger/utils/__init__.py index 54d1dd56ae..3b8a09ebec 100644 --- a/kobo/apps/openrosa/apps/logger/utils/__init__.py +++ b/kobo/apps/openrosa/apps/logger/utils/__init__.py @@ -1,4 +1,4 @@ +# flake8: noqa: F401 from .counters import delete_null_user_daily_counters from .database_query import build_db_queries -from .instance import delete_instances -from .instance import set_instance_validation_statuses +from .instance import delete_instances, set_instance_validation_statuses diff --git a/kobo/apps/openrosa/apps/logger/utils/database_query.py b/kobo/apps/openrosa/apps/logger/utils/database_query.py index b8a81db872..c47dbae4ac 100644 --- a/kobo/apps/openrosa/apps/logger/utils/database_query.py +++ b/kobo/apps/openrosa/apps/logger/utils/database_query.py @@ -3,6 +3,7 @@ import json from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance + from ..exceptions import ( BuildDbQueriesAttributeError, BuildDbQueriesBadArgumentError, diff --git a/kobo/apps/openrosa/apps/logger/utils/instance.py b/kobo/apps/openrosa/apps/logger/utils/instance.py index f87c71612f..1c6ec30b9c 100644 --- a/kobo/apps/openrosa/apps/logger/utils/instance.py +++ b/kobo/apps/openrosa/apps/logger/utils/instance.py @@ -2,7 +2,7 @@ import time from django.conf import settings -from django.db.models.signals import pre_delete, post_delete +from django.db.models.signals import post_delete, pre_delete from kobo.apps.openrosa.apps.logger.signals import ( nullify_exports_time_of_last_submission, @@ -11,11 +11,10 @@ from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance from kobo.apps.openrosa.apps.viewer.signals import remove_from_mongo - -from .database_query import build_db_queries from ..exceptions import MissingValidationStatusPayloadError -from ..models.xform import XForm from ..models.instance import Instance +from ..models.xform import XForm +from .database_query import build_db_queries def add_validation_status_to_instance( diff --git a/kobo/apps/openrosa/apps/logger/xform_instance_parser.py b/kobo/apps/openrosa/apps/logger/xform_instance_parser.py index bd1e6d0077..1deea34511 100644 --- a/kobo/apps/openrosa/apps/logger/xform_instance_parser.py +++ b/kobo/apps/openrosa/apps/logger/xform_instance_parser.py @@ -4,8 +4,8 @@ import re import sys from datetime import datetime -from xml.dom import Node from typing import Optional +from xml.dom import Node import dateutil.parser import six @@ -22,22 +22,22 @@ class XLSFormError(Exception): class DuplicateInstance(Exception): def __str__(self): - return t("Duplicate Instance") + return t('Duplicate Instance') class InstanceInvalidUserError(Exception): def __str__(self): - return t("Could not determine the user.") + return t('Could not determine the user.') class InstanceParseError(Exception): def __str__(self): - return t("The instance could not be parsed.") + return t('The instance could not be parsed.') class InstanceEmptyError(InstanceParseError): def __str__(self): - return t("Empty instance") + return t('Empty instance') class InstanceMultipleNodeError(Exception): @@ -50,12 +50,12 @@ def get_meta_from_xml(xml_str, meta_name): # children ideally contains a single element # that is the parent of all survey elements if children.length == 0: - raise ValueError(t("XML string must have a survey element.")) + raise ValueError(t('XML string must have a survey element.')) survey_node = children[0] meta_tags = [n for n in survey_node.childNodes if n.nodeType == Node.ELEMENT_NODE and - (n.tagName.lower() == "meta" or - n.tagName.lower() == "orx:meta")] + (n.tagName.lower() == 'meta' or + n.tagName.lower() == 'orx:meta')] if len(meta_tags) == 0: return None @@ -80,8 +80,8 @@ def _uuid_only(uuid, regex): if matches and len(matches.groups()) > 0: return matches.groups()[0] return None - uuid = get_meta_from_xml(xml, "instanceID") - regex = re.compile(r"uuid:(.*)") + uuid = get_meta_from_xml(xml, 'instanceID') + regex = re.compile(r'uuid:(.*)') if uuid: return _uuid_only(uuid, regex) # check in survey_node attributes @@ -90,7 +90,7 @@ def _uuid_only(uuid, regex): # children ideally contains a single element # that is the parent of all survey elements if children.length == 0: - raise ValueError(t("XML string must have a survey element.")) + raise ValueError(t('XML string must have a survey element.')) survey_node = children[0] uuid = survey_node.getAttribute('instanceID') if uuid != '': @@ -105,7 +105,7 @@ def get_submission_date_from_xml(xml) -> Optional[datetime]: # children ideally contains a single element # that is the parent of all survey elements if children.length == 0: - raise ValueError(t("XML string must have a survey element.")) + raise ValueError(t('XML string must have a survey element.')) survey_node = children[0] submission_date = survey_node.getAttribute('submissionDate') if submission_date != '': @@ -114,8 +114,8 @@ def get_submission_date_from_xml(xml) -> Optional[datetime]: def get_deprecated_uuid_from_xml(xml): - uuid = get_meta_from_xml(xml, "deprecatedID") - regex = re.compile(r"uuid:(.*)") + uuid = get_meta_from_xml(xml, 'deprecatedID') + regex = re.compile(r'uuid:(.*)') if uuid: matches = regex.match(uuid) if matches and len(matches.groups()) > 0: @@ -211,7 +211,7 @@ def _flatten_dict(d, prefix): # hack: removing [1] index to be consistent across # surveys that have a single repitition of the # loop versus mutliple. - item_prefix[-1] += "[%s]" % str(i + 1) + item_prefix[-1] += '[%s]' % str(i + 1) if type(item) == dict: for pair in _flatten_dict(item, item_prefix): yield pair @@ -242,10 +242,10 @@ def _flatten_dict_nest_repeats(d, prefix): for path, value in \ _flatten_dict_nest_repeats(item, item_prefix): # TODO: this only considers the first level of repeats - repeat.update({"/".join(path[1:]): value}) + repeat.update({'/'.join(path[1:]): value}) repeats.append(repeat) else: - repeats.append({"/".join(item_prefix[1:]): item}) + repeats.append({'/'.join(item_prefix[1:]): item}) yield new_prefix, repeats else: yield new_prefix, value @@ -262,14 +262,14 @@ def _gather_parent_node_list(node): def xpath_from_xml_node(node): node_names = _gather_parent_node_list(node) - return "/".join(node_names[1:]) + return '/'.join(node_names[1:]) def _get_all_attributes(node): """ Go through an XML document returning all the attributes we see. """ - if hasattr(node, "hasAttributes") and node.hasAttributes(): + if hasattr(node, 'hasAttributes') and node.hasAttributes(): for key in node.attributes.keys(): yield key, node.getAttribute(key) for child in node.childNodes: @@ -287,7 +287,7 @@ def __init__(self, xml_str, data_dictionary): try: self.parse(xml_str) except Exception as e: - logger = logging.getLogger("console_logger") + logger = logging.getLogger('console_logger') logger.error( "Failed to parse instance '%s'" % xml_str, exc_info=True) # `self.parse()` has been wrapped in to try/except but it makes the @@ -300,12 +300,12 @@ def parse(self, xml_str): self._xml_obj = clean_and_parse_xml(xml_str) self._root_node = self._xml_obj.documentElement repeats = [e.get_abbreviated_xpath() - for e in self.dd.get_survey_elements_of_type("repeat")] + for e in self.dd.get_survey_elements_of_type('repeat')] self._dict = _xml_node_to_dict(self._root_node, repeats) if self._dict is None: raise InstanceEmptyError for path, value in _flatten_dict_nest_repeats(self._dict, []): - self._flat_dict["/".join(path[1:])] = value + self._flat_dict['/'.join(path[1:])] = value self._set_attributes() def get_root_node(self): @@ -343,7 +343,7 @@ def _set_attributes(self): self._attributes[key] = value def get_xform_id_string(self): - return self._attributes.get("id") + return self._attributes.get('id') def get_flat_dict_with_attributes(self): result = self.to_flat_dict().copy() diff --git a/kobo/apps/openrosa/apps/main/migrations/0011_drop_old_kpi_tables.py b/kobo/apps/openrosa/apps/main/migrations/0011_drop_old_kpi_tables.py index 9822e38ae7..403bd7ce14 100644 --- a/kobo/apps/openrosa/apps/main/migrations/0011_drop_old_kpi_tables.py +++ b/kobo/apps/openrosa/apps/main/migrations/0011_drop_old_kpi_tables.py @@ -1,8 +1,7 @@ # Generated by Django 3.2.15 on 2023-03-22 14:05 -from django.db import migrations, connections from django.conf import settings - +from django.db import connections, migrations KPI_TABLES = [ 'constance_config', diff --git a/kobo/apps/openrosa/apps/main/migrations/0014_drop_old_formdisclaimer_tables.py b/kobo/apps/openrosa/apps/main/migrations/0014_drop_old_formdisclaimer_tables.py index 9d5c0ce7b0..495ea3dcd4 100644 --- a/kobo/apps/openrosa/apps/main/migrations/0014_drop_old_formdisclaimer_tables.py +++ b/kobo/apps/openrosa/apps/main/migrations/0014_drop_old_formdisclaimer_tables.py @@ -1,8 +1,7 @@ # Generated by Django 3.2.15 on 2023-03-22 14:05 -from django.db import migrations, connections from django.conf import settings - +from django.db import connections, migrations KC_FORM_DISCLAIMER_TABLES = [ 'form_disclaimer_formdisclaimer', diff --git a/kobo/apps/openrosa/apps/main/migrations/0016_drop_old_restservice_tables.py b/kobo/apps/openrosa/apps/main/migrations/0016_drop_old_restservice_tables.py index 942d5a5ede..a54359d6fc 100644 --- a/kobo/apps/openrosa/apps/main/migrations/0016_drop_old_restservice_tables.py +++ b/kobo/apps/openrosa/apps/main/migrations/0016_drop_old_restservice_tables.py @@ -1,8 +1,7 @@ # Generated by Django 4.2.11 on 2024-07-31 15:59 -from django.db import migrations, connections from django.conf import settings - +from django.db import connections, migrations KC_REST_SERVICES_TABLES = [ 'restservice_restservice', diff --git a/kobo/apps/openrosa/apps/main/models/meta_data.py b/kobo/apps/openrosa/apps/main/models/meta_data.py index a853492bf3..fcfb51ccaa 100644 --- a/kobo/apps/openrosa/apps/main/models/meta_data.py +++ b/kobo/apps/openrosa/apps/main/models/meta_data.py @@ -1,15 +1,15 @@ import mimetypes import os -import requests from contextlib import closing from urllib.parse import urlparse +import requests +from django.conf import settings from django.core.exceptions import ValidationError from django.core.files.temp import NamedTemporaryFile from django.core.files.uploadedfile import InMemoryUploadedFile from django.core.validators import URLValidator from django.db import models -from django.conf import settings from requests.exceptions import RequestException from kobo.apps.openrosa.apps.logger.models import XForm diff --git a/kobo/apps/openrosa/apps/main/models/user_profile.py b/kobo/apps/openrosa/apps/main/models/user_profile.py index e7b9f6475f..5eb49da58a 100644 --- a/kobo/apps/openrosa/apps/main/models/user_profile.py +++ b/kobo/apps/openrosa/apps/main/models/user_profile.py @@ -39,8 +39,8 @@ class UserProfile(models.Model): class Meta: app_label = 'main' permissions = ( - ('can_add_xform', "Can add/upload an xform to user profile"), - ('view_profile', "Can view user profile"), + ('can_add_xform', 'Can add/upload an xform to user profile'), + ('view_profile', 'Can view user profile'), ) def __str__(self): diff --git a/kobo/apps/openrosa/apps/main/tests/test_base.py b/kobo/apps/openrosa/apps/main/tests/test_base.py index be22d72e3f..679e4fa332 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_base.py +++ b/kobo/apps/openrosa/apps/main/tests/test_base.py @@ -2,8 +2,8 @@ import os import socket from io import BytesIO -from urllib.request import urlopen from urllib.error import URLError +from urllib.request import urlopen from django.conf import settings from django.contrib.auth.models import AnonymousUser, Permission @@ -11,18 +11,20 @@ from django.test import TestCase from django.test.client import Client from django.utils import timezone -from django_digest.test import Client as DigestClient from rest_framework.test import APIRequestFactory +from django_digest.test import Client as DigestClient from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.apps.logger.models import XForm, Attachment +from kobo.apps.openrosa.apps.logger.models import Attachment, XForm from kobo.apps.openrosa.apps.main.models import UserProfile -from kobo.apps.openrosa.libs.tests.mixins.make_submission_mixin import MakeSubmissionMixin +from kobo.apps.openrosa.libs.tests.mixins.make_submission_mixin import ( + MakeSubmissionMixin, +) from kobo.apps.openrosa.libs.tests.mixins.request_mixin import RequestMixin -from kobo.apps.openrosa.libs.utils.string import base64_encodestring from kobo.apps.openrosa.libs.utils.logger_tools import ( publish_xls_form, ) +from kobo.apps.openrosa.libs.utils.string import base64_encodestring class TestBase(RequestMixin, MakeSubmissionMixin, TestCase): @@ -76,7 +78,7 @@ def _logout(self, client=None): client = self.client client.logout() - def _create_user_and_login(self, username="bob", password="bob"): + def _create_user_and_login(self, username='bob', password='bob'): self.login_username = username self.login_password = password self.user = self._create_user(username, password) @@ -119,9 +121,9 @@ def _share_form_data(self, id_string='transportation_2011_07_25'): def _publish_transportation_form(self): xls_path = os.path.join( self.this_directory, - "fixtures", - "transportation", - "transportation.xls", + 'fixtures', + 'transportation', + 'transportation.xls', ) count = XForm.objects.count() TestBase._publish_xls_file(self, xls_path) @@ -141,7 +143,7 @@ def _submit_transport_instance_w_uuid(self, name): def _submit_transport_instance_w_attachment(self, survey_at=0): s = self.surveys[survey_at] - media_file = "1335783522563.jpg" + media_file = '1335783522563.jpg' self._make_submission_w_attachment( os.path.join( self.this_directory, diff --git a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py index afe5c0989b..46873a27bc 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py +++ b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py @@ -4,7 +4,8 @@ import pytest from pyxform.errors import PyXFormError -from kobo.apps.openrosa.apps.logger.models import XForm, Instance +from kobo.apps.openrosa.apps.logger.models import Instance, XForm + from .test_base import TestBase diff --git a/kobo/apps/openrosa/apps/main/tests/test_process.py b/kobo/apps/openrosa/apps/main/tests/test_process.py index c86efb9a9c..eba5974323 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_process.py +++ b/kobo/apps/openrosa/apps/main/tests/test_process.py @@ -8,20 +8,20 @@ from xml.dom import Node from defusedxml import minidom -from django.urls import reverse from django.conf import settings -from django_digest.test import Client as DigestClient from django.core.files.uploadedfile import UploadedFile +from django.urls import reverse -from kobo.apps.openrosa.apps.main.models import MetaData +from django_digest.test import Client as DigestClient from kobo.apps.openrosa.apps.logger.models import XForm from kobo.apps.openrosa.apps.logger.models.xform import XFORM_TITLE_LENGTH from kobo.apps.openrosa.apps.logger.xform_instance_parser import clean_and_parse_xml +from kobo.apps.openrosa.apps.main.models import MetaData from kobo.apps.openrosa.apps.viewer.models.data_dictionary import DataDictionary -from kobo.apps.openrosa.libs.utils.common_tags import UUID, SUBMISSION_TIME +from kobo.apps.openrosa.libs.utils.common_tags import SUBMISSION_TIME, UUID from kobo.apps.openrosa.libs.utils.hash import get_hash -from .test_base import TestBase +from .test_base import TestBase uuid_regex = re.compile( r'(.*uuid[^//]+="\')([^\']+)(\'".*)', re.DOTALL) @@ -90,7 +90,7 @@ def test_publish_xlsx_file(self): # containing the files you would like to test. # DO NOT CHECK IN PRIVATE XLS FILES!! def test_upload_all_xls(self): - root_dir = os.path.join(self.this_directory, "fixtures", "online_xls") + root_dir = os.path.join(self.this_directory, 'fixtures', 'online_xls') if os.path.exists(root_dir): success = True for root, sub_folders, filenames in os.walk(root_dir): @@ -151,25 +151,25 @@ def _download_xform(self): response = client.get(self.download_url) response_doc = minidom.parseString(response.content) - xml_path = os.path.join(self.this_directory, "fixtures", - "transportation", "transportation.xml") + xml_path = os.path.join(self.this_directory, 'fixtures', + 'transportation', 'transportation.xml') with open(xml_path) as xml_file: expected_doc = minidom.parse(xml_file) model_node = [ n for n in - response_doc.getElementsByTagName("h:head")[0].childNodes + response_doc.getElementsByTagName('h:head')[0].childNodes if n.nodeType == Node.ELEMENT_NODE and - n.tagName == "model"][0] + n.tagName == 'model'][0] # check for UUID and remove uuid_nodes = [node for node in model_node.childNodes if node.nodeType == Node.ELEMENT_NODE and - node.getAttribute("nodeset") == - "/transportation/formhub/uuid"] + node.getAttribute('nodeset') == + '/transportation/formhub/uuid'] self.assertEqual(len(uuid_nodes), 1) uuid_node = uuid_nodes[0] - uuid_node.setAttribute("calculate", "''") + uuid_node.setAttribute('calculate', "''") # check content without UUID self.assertEqual(response_doc.toxml(), expected_doc.toxml()) @@ -186,8 +186,8 @@ def _check_data_dictionary(self): qs = DataDictionary.objects.filter(user=self.user) self.assertEqual(qs.count(), 1) self.data_dictionary = DataDictionary.objects.all()[0] - with open(os.path.join(self.this_directory, "fixtures", - "transportation", "headers.json")) as f: + with open(os.path.join(self.this_directory, 'fixtures', + 'transportation', 'headers.json')) as f: expected_list = json.load(f) self.assertEqual(self.data_dictionary.get_headers(), expected_list) @@ -198,24 +198,24 @@ def _check_data_dictionary(self): def _check_data_for_csv_export(self): data = [ - {"available_transportation_types_to_referral_facility/ambulance": + {'available_transportation_types_to_referral_facility/ambulance': True, - "available_transportation_types_to_referral_facility/bicycle": + 'available_transportation_types_to_referral_facility/bicycle': True, - self.ambulance_key: "daily", - self.bicycle_key: "weekly" + self.ambulance_key: 'daily', + self.bicycle_key: 'weekly' }, {}, - {"available_transportation_types_to_referral_facility/ambulance": + {'available_transportation_types_to_referral_facility/ambulance': True, - self.ambulance_key: "weekly", + self.ambulance_key: 'weekly', }, - {"available_transportation_types_to_referral_facility/taxi": True, - "available_transportation_types_to_referral_facility/other": True, - "available_transportation_types_to_referral_facility_other": - "camel", - self.taxi_key: "daily", - self.other_key: "other", + {'available_transportation_types_to_referral_facility/taxi': True, + 'available_transportation_types_to_referral_facility/other': True, + 'available_transportation_types_to_referral_facility_other': + 'camel', + self.taxi_key: 'daily', + self.other_key: 'other', } ] for d_from_db in self.data_dictionary.get_data_for_excel(): @@ -239,31 +239,31 @@ def _check_group_xpaths_do_not_appear_in_dicts_for_export(self): instance = i expected_dict = { - "transportation": { - "meta": { - "instanceID": uuid + 'transportation': { + 'meta': { + 'instanceID': uuid }, - "transport": { - "loop_over_transport_types_frequency": {"bicycle": { - "frequency_to_referral_facility": "weekly" + 'transport': { + 'loop_over_transport_types_frequency': {'bicycle': { + 'frequency_to_referral_facility': 'weekly' }, - "ambulance": { - "frequency_to_referral_facility": "daily" + 'ambulance': { + 'frequency_to_referral_facility': 'daily' } }, - "available_transportation_types_to_referral_facility": - "ambulance bicycle", + 'available_transportation_types_to_referral_facility': + 'ambulance bicycle', } } } self.assertEqual(instance.get_dict(flat=False), expected_dict) expected_dict = { - "transport/available_transportation_types_to_referral_facility": - "ambulance bicycle", - self.transport_ambulance_key: "daily", - self.transport_bicycle_key: "weekly", - "_xform_id_string": "transportation_2011_07_25", - "meta/instanceID": uuid + 'transport/available_transportation_types_to_referral_facility': + 'ambulance bicycle', + self.transport_ambulance_key: 'daily', + self.transport_bicycle_key: 'weekly', + '_xform_id_string': 'transportation_2011_07_25', + 'meta/instanceID': uuid } self.assertEqual(instance.get_dict(), expected_dict) @@ -275,14 +275,14 @@ def _get_csv_(self): response = self.client.get(url) self.assertEqual(response.status_code, 200) actual_csv = self._get_response_content(response) - actual_lines = actual_csv.split("\n") + actual_lines = actual_csv.split('\n') return csv.reader(actual_lines) def _check_csv_export_first_pass(self): actual_csv = self._get_csv_() f = open(os.path.join( - self.this_directory, "fixtures", - "transportation", "transportation.csv"), "r") + self.this_directory, 'fixtures', + 'transportation', 'transportation.csv'), 'r') expected_csv = csv.reader(f) for actual_row, expected_row in zip(actual_csv, expected_csv): for actual_cell, expected_cell in zip(actual_row, expected_row): @@ -295,7 +295,7 @@ def _check_csv_export_second_pass(self): response = self.client.get(url) self.assertEqual(response.status_code, 200) actual_csv = self._get_response_content(response) - actual_lines = actual_csv.split("\n") + actual_lines = actual_csv.split('\n') actual_csv = csv.reader(actual_lines) headers = next(actual_csv) data = [ @@ -304,33 +304,33 @@ def _check_csv_export_second_pass(self): '_submission_time': '2013-02-14T15:37:21', '_tags': '', '_notes': '' }, - {"available_transportation_types_to_referral_facility/ambulance": - "True", - "available_transportation_types_to_referral_facility/bicycle": - "True", - self.ambulance_key: "daily", - self.bicycle_key: "weekly", - "meta/instanceID": "uuid:f3d8dc65-91a6-4d0f-9e97-802128083390", + {'available_transportation_types_to_referral_facility/ambulance': + 'True', + 'available_transportation_types_to_referral_facility/bicycle': + 'True', + self.ambulance_key: 'daily', + self.bicycle_key: 'weekly', + 'meta/instanceID': 'uuid:f3d8dc65-91a6-4d0f-9e97-802128083390', '_uuid': 'f3d8dc65-91a6-4d0f-9e97-802128083390', '_submission_time': '2013-02-14T15:37:22', '_tags': '', '_notes': '' }, - {"available_transportation_types_to_referral_facility/ambulance": - "True", - self.ambulance_key: "weekly", - "meta/instanceID": "uuid:9c6f3468-cfda-46e8-84c1-75458e72805d", + {'available_transportation_types_to_referral_facility/ambulance': + 'True', + self.ambulance_key: 'weekly', + 'meta/instanceID': 'uuid:9c6f3468-cfda-46e8-84c1-75458e72805d', '_uuid': '9c6f3468-cfda-46e8-84c1-75458e72805d', '_submission_time': '2013-02-14T15:37:23', '_tags': '', '_notes': '' }, - {"available_transportation_types_to_referral_facility/taxi": - "True", - "available_transportation_types_to_referral_facility/other": - "True", - "available_transportation_types_to_referral_facility_other": - "camel", - self.taxi_key: "daily", - "meta/instanceID": "uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf", + {'available_transportation_types_to_referral_facility/taxi': + 'True', + 'available_transportation_types_to_referral_facility/other': + 'True', + 'available_transportation_types_to_referral_facility_other': + 'camel', + self.taxi_key: 'daily', + 'meta/instanceID': 'uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf', '_uuid': '9f0a1508-c3b7-4c99-be00-9b237c26bcbf', '_submission_time': '2013-02-14T15:37:24', '_tags': '', '_notes': '' @@ -342,14 +342,14 @@ def _check_csv_export_second_pass(self): d = dict(zip(headers, row)) d_iter = dict(d) for k, v in d_iter.items(): - if v in ["n/a", "False"] or k in dd._additional_headers(): + if v in ['n/a', 'False'] or k in dd._additional_headers(): del d[k] l = [] for k, v in expected_dict.items(): - if k == 'meta/instanceID' or k.startswith("_"): + if k == 'meta/instanceID' or k.startswith('_'): l.append((k, v)) else: - l.append(("transport/" + k, v)) + l.append(('transport/' + k, v)) self.assertEqual(d, dict(l)) def _check_delete(self): @@ -378,8 +378,8 @@ def test_publish_bad_xls_with_unicode_in_error(self): def test_metadata_file_hash(self): self._publish_transportation_form() - src = os.path.join(self.this_directory, "fixtures", - "transportation", "screenshot.png") + src = os.path.join(self.this_directory, 'fixtures', + 'transportation', 'screenshot.png') uf = UploadedFile(file=open(src, 'rb'), content_type='image/png') count = MetaData.objects.count() MetaData.media_upload(self.xform, uf) @@ -412,11 +412,11 @@ def test_uuid_injection_in_cascading_select(self): xml = clean_and_parse_xml(xform.xml) # check for instance nodes that are direct children of the model node - model_node = xml.getElementsByTagName("model")[0] + model_node = xml.getElementsByTagName('model')[0] instance_nodes = [node for node in model_node.childNodes if node.nodeType == Node.ELEMENT_NODE and - node.tagName.lower() == "instance" and - not node.hasAttribute("id")] + node.tagName.lower() == 'instance' and + not node.hasAttribute('id')] self.assertEqual(len(instance_nodes), 1) instance_node = instance_nodes[0] @@ -424,37 +424,37 @@ def test_uuid_injection_in_cascading_select(self): # id_string form_nodes = [node for node in instance_node.childNodes if node.nodeType == Node.ELEMENT_NODE and - node.getAttribute("id") == xform.id_string] + node.getAttribute('id') == xform.id_string] form_node = form_nodes[0] # find the formhub node that has a uuid child node - formhub_nodes = form_node.getElementsByTagName("formhub") + formhub_nodes = form_node.getElementsByTagName('formhub') self.assertEqual(len(formhub_nodes), 1) - uuid_nodes = formhub_nodes[0].getElementsByTagName("uuid") + uuid_nodes = formhub_nodes[0].getElementsByTagName('uuid') self.assertEqual(len(uuid_nodes), 1) # check for the calculate bind calculate_bind_nodes = [node for node in model_node.childNodes if node.nodeType == Node.ELEMENT_NODE and - node.tagName == "bind" and - node.getAttribute("nodeset") == - "/%s/formhub/uuid" % xform.id_string] + node.tagName == 'bind' and + node.getAttribute('nodeset') == + '/%s/formhub/uuid' % xform.id_string] self.assertEqual(len(calculate_bind_nodes), 1) calculate_bind_node = calculate_bind_nodes[0] self.assertEqual( - calculate_bind_node.getAttribute("calculate"), "'%s'" % xform.uuid) + calculate_bind_node.getAttribute('calculate'), "'%s'" % xform.uuid) def test_truncate_xform_title_to_255(self): self._publish_transportation_form() - title = "a" * (XFORM_TITLE_LENGTH + 1) + title = 'a' * (XFORM_TITLE_LENGTH + 1) groups = re.match( - r"(.+)([^<]+)(.*)", + r'(.+)([^<]+)(.*)', self.xform.xml, re.DOTALL).groups() - self.xform.xml = "{0}{1}{2}".format( + self.xform.xml = '{0}{1}{2}'.format( groups[0], title, groups[2]) self.xform.title = title self.xform.save() - self.assertEqual(self.xform.title, "a" * XFORM_TITLE_LENGTH) + self.assertEqual(self.xform.title, 'a' * XFORM_TITLE_LENGTH) @unittest.skip('Fails under Django 1.6') def test_multiple_submissions_by_different_users(self): @@ -463,4 +463,4 @@ def test_multiple_submissions_by_different_users(self): CSV export would break. """ TestProcess.test_process(self) - TestProcess.test_process(self, "doug", "doug") + TestProcess.test_process(self, 'doug', 'doug') diff --git a/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py b/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py index 4d0d88a9db..a3fede2ed2 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py +++ b/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py @@ -5,6 +5,7 @@ from django.db.utils import IntegrityError from kobo.apps.openrosa.apps.logger.models import XForm + from .test_base import TestBase diff --git a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py index d4f624dd36..10728d5d15 100644 --- a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py +++ b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py @@ -8,25 +8,24 @@ from django.utils.translation import gettext as t from pymongo.errors import PyMongoError -from kobo.celery import celery_app +from kobo.apps.hook.utils.services import call_services from kobo.apps.openrosa.apps.api.mongo_helper import MongoHelper -from kobo.apps.openrosa.apps.logger.models import Instance -from kobo.apps.openrosa.apps.logger.models import Note +from kobo.apps.openrosa.apps.logger.models import Instance, Note from kobo.apps.openrosa.libs.utils.common_tags import ( - ID, - UUID, ATTACHMENTS, GEOLOCATION, - SUBMISSION_TIME, + ID, MONGO_STRFTIME, - TAGS, NOTES, + SUBMISSION_TIME, SUBMITTED_BY, - VALIDATION_STATUS + TAGS, + UUID, + VALIDATION_STATUS, ) from kobo.apps.openrosa.libs.utils.decorators import apply_form_field_names from kobo.apps.openrosa.libs.utils.model_tools import queryset_iterator -from kobo.apps.hook.utils.services import call_services +from kobo.celery import celery_app from kpi.utils.log import logging # this is Mongo Collection where we will store the parsed submissions @@ -68,7 +67,7 @@ class ParsedInstance(models.Model): DEFAULT_LIMIT = 30000 DEFAULT_BATCHSIZE = 1000 - instance = models.OneToOneField(Instance, related_name="parsed_instance", on_delete=models.CASCADE) + instance = models.OneToOneField(Instance, related_name='parsed_instance', on_delete=models.CASCADE) start_time = models.DateTimeField(null=True) end_time = models.DateTimeField(null=True) # TODO: decide if decimal field is better than float field. @@ -108,7 +107,7 @@ def query_mongo(cls, username, id_string, query, fields, sort, start=0, sort = sort if sort else {} if start < 0 or limit < 0: - raise ValueError(t("Invalid start/limit params")) + raise ValueError(t('Invalid start/limit params')) return cls._get_paginated_and_sorted_cursor(cursor, start, limit, sort) @@ -124,9 +123,9 @@ def mongo_aggregate(cls, query, pipeline): query = json.loads( query, object_hook=json_util.object_hook) if query else {} if not (isinstance(pipeline, dict) or isinstance(pipeline, list)): - raise Exception(t("Invalid pipeline! %s" % pipeline)) + raise Exception(t('Invalid pipeline! %s' % pipeline)) if not isinstance(query, dict): - raise Exception(t("Invalid query! %s" % query)) + raise Exception(t('Invalid query! %s' % query)) query = MongoHelper.to_safe_dict(query) k = [{'$match': query}] if isinstance(pipeline, list): @@ -160,7 +159,7 @@ def query_mongo_minimal( sort = sort if sort else {} if start < 0 or limit < 0: - raise ValueError(t("Invalid start/limit params")) + raise ValueError(t('Invalid start/limit params')) if limit > cls.DEFAULT_LIMIT: limit = cls.DEFAULT_LIMIT @@ -283,14 +282,14 @@ def to_dict_for_mongo(self): def update_mongo(self, asynchronous=True): d = self.to_dict_for_mongo() - if d.get("_xform_id_string") is None: + if d.get('_xform_id_string') is None: # if _xform_id_string, Instance could not be parsed. # so, we don't update mongo. return False else: if asynchronous: # TODO update self.instance after async save is made - update_mongo_instance.apply_async((), {"record": d}) + update_mongo_instance.apply_async((), {'record': d}) else: success = update_mongo_instance(d) # Only update self.instance is `success` is different from @@ -316,7 +315,7 @@ def bulk_delete(query): return xform_instances.delete_many(query) def to_dict(self): - if not hasattr(self, "_dict_cache"): + if not hasattr(self, '_dict_cache'): self._dict_cache = self.instance.get_dict() return self._dict_cache @@ -342,8 +341,7 @@ def _get_name_for_type(self, type_value): def get_data_dictionary(self): # TODO: fix hack to get around a circular import - from kobo.apps.openrosa.apps.viewer.models.data_dictionary import\ - DataDictionary + from kobo.apps.openrosa.apps.viewer.models.data_dictionary import DataDictionary return DataDictionary.objects.get( user=self.instance.xform.user, id_string=self.instance.xform.id_string diff --git a/kobo/apps/openrosa/libs/filters.py b/kobo/apps/openrosa/libs/filters.py index e28dcec9bd..e5f52186be 100644 --- a/kobo/apps/openrosa/libs/filters.py +++ b/kobo/apps/openrosa/libs/filters.py @@ -181,7 +181,7 @@ def filter_queryset(self, request, queryset, view): try: int(instance_id) except ValueError: - raise ParseError("Invalid value for instance %s." % instance_id) + raise ParseError('Invalid value for instance %s.' % instance_id) instance = get_object_or_404(Instance, pk=instance_id) queryset = queryset.filter(instance=instance) diff --git a/kobo/apps/openrosa/libs/serializers/xform_serializer.py b/kobo/apps/openrosa/libs/serializers/xform_serializer.py index f9ad5c0685..866fd8ddd8 100644 --- a/kobo/apps/openrosa/libs/serializers/xform_serializer.py +++ b/kobo/apps/openrosa/libs/serializers/xform_serializer.py @@ -8,8 +8,8 @@ from kobo.apps.openrosa.apps.logger.models import XForm from kobo.apps.openrosa.libs.permissions import get_object_users_with_permissions from kobo.apps.openrosa.libs.serializers.fields.boolean_field import BooleanField -from kobo.apps.openrosa.libs.serializers.tag_list_serializer import TagListSerializer from kobo.apps.openrosa.libs.serializers.metadata_serializer import MetaDataSerializer +from kobo.apps.openrosa.libs.serializers.tag_list_serializer import TagListSerializer from kobo.apps.openrosa.libs.utils.decorators import check_obj @@ -53,7 +53,7 @@ class Meta: @check_obj def get_hash(self, obj): - return "md5:%s" % obj.md5_hash + return 'md5:%s' % obj.md5_hash # Tests are expecting this `public` to be passed only "True" or "False" # and as a string. I don't know how it worked pre-migrations to django 1.8 @@ -192,4 +192,4 @@ def get_url(self, obj): @check_obj def get_hash(self, obj): - return "%s" % (obj.md5_hash or 'md5:') + return '%s' % (obj.md5_hash or 'md5:') diff --git a/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py b/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py index 99c0089b9a..d5412b6a44 100644 --- a/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py +++ b/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py @@ -5,10 +5,10 @@ from typing import Union from django.contrib.auth import authenticate -from django_digest.test import DigestAuth from rest_framework import status from rest_framework.test import APIRequestFactory +from django_digest.test import DigestAuth from kobo.apps.openrosa.apps.api.viewsets.xform_submission_api import XFormSubmissionApi from kobo.apps.openrosa.apps.logger.models import Instance, XForm from kobo.apps.openrosa.libs.utils.logger_tools import ( @@ -22,8 +22,8 @@ class MakeSubmissionMixin: def submission_view(self): if not hasattr(self, '_submission_view'): setattr(self, '_submission_view', XFormSubmissionApi.as_view({ - "head": "create", - "post": "create" + 'head': 'create', + 'post': 'create' })) return self._submission_view diff --git a/kobo/apps/openrosa/libs/utils/image_tools.py b/kobo/apps/openrosa/libs/utils/image_tools.py index 39f83bc35c..f2585d4118 100644 --- a/kobo/apps/openrosa/libs/utils/image_tools.py +++ b/kobo/apps/openrosa/libs/utils/image_tools.py @@ -1,5 +1,4 @@ # coding: utf-8 -import os from io import BytesIO from tempfile import NamedTemporaryFile diff --git a/kobo/apps/openrosa/libs/utils/logger_tools.py b/kobo/apps/openrosa/libs/utils/logger_tools.py index 55270b47df..454a8ef920 100644 --- a/kobo/apps/openrosa/libs/utils/logger_tools.py +++ b/kobo/apps/openrosa/libs/utils/logger_tools.py @@ -11,23 +11,27 @@ from typing import Generator, Optional, Union from xml.etree import ElementTree as ET from xml.parsers.expat import ExpatError + try: from zoneinfo import ZoneInfo except ImportError: from backports.zoneinfo import ZoneInfo +from wsgiref.util import FileWrapper +from xml.dom import Node + from dict2xml import dict2xml from django.conf import settings -from django.core.exceptions import ValidationError, PermissionDenied +from django.core.exceptions import PermissionDenied, ValidationError from django.core.files.base import File from django.core.mail import mail_admins from django.db import IntegrityError, transaction from django.db.models import Q from django.http import ( + Http404, HttpResponse, HttpResponseNotFound, StreamingHttpResponse, - Http404 ) from django.shortcuts import get_object_or_404 from django.utils import timezone as dj_timezone @@ -37,8 +41,6 @@ from pyxform.errors import PyXFormError from pyxform.xform2json import create_survey_element_from_xml from rest_framework.exceptions import NotAuthenticated -from xml.dom import Node -from wsgiref.util import FileWrapper from kobo.apps.openrosa.apps.logger.exceptions import ( DuplicateUUIDError, @@ -63,14 +65,14 @@ update_xform_submission_count, ) from kobo.apps.openrosa.apps.logger.xform_instance_parser import ( + DuplicateInstance, InstanceEmptyError, InstanceInvalidUserError, InstanceMultipleNodeError, - DuplicateInstance, clean_and_parse_xml, - get_uuid_from_xml, get_deprecated_uuid_from_xml, get_submission_date_from_xml, + get_uuid_from_xml, get_xform_media_question_xpaths, ) from kobo.apps.openrosa.apps.viewer.models.data_dictionary import DataDictionary @@ -85,7 +87,6 @@ ) from kpi.utils.object_permission import get_database_user - OPEN_ROSA_VERSION_HEADER = 'X-OpenRosa-Version' HTTP_OPEN_ROSA_VERSION_HEADER = 'HTTP_X_OPENROSA_VERSION' OPEN_ROSA_VERSION = '1.0' @@ -216,7 +217,7 @@ def disposition_ext_and_date(name, extension, show_date=True): if name is None: return 'attachment;' if show_date: - name = "%s_%s" % (name, date.today().strftime("%Y_%m_%d")) + name = '%s_%s' % (name, date.today().strftime('%Y_%m_%d')) return 'attachment; filename=%s.%s' % (name, extension) @@ -239,7 +240,7 @@ def get_instance_or_404(**criteria): :param criteria: dict :return: Instance """ - instances = Instance.objects.filter(**criteria).order_by("id") + instances = Instance.objects.filter(**criteria).order_by('id') if instances: instance = instances[0] xml_hash = instance.xml_hash @@ -247,8 +248,8 @@ def get_instance_or_404(**criteria): if instance_.xml_hash == xml_hash: continue raise DuplicateUUIDError( - "Multiple instances with different content exist for UUID " - "{}".format(instance.uuid) + 'Multiple instances with different content exist for UUID ' + '{}'.format(instance.uuid) ) return instance @@ -347,16 +348,16 @@ def inject_instanceid(xml_str, uuid): xml = clean_and_parse_xml(xml_str) children = xml.childNodes if children.length == 0: - raise ValueError(t("XML string must have a survey element.")) + raise ValueError(t('XML string must have a survey element.')) # check if we have a meta tag survey_node = children.item(0) meta_tags = [ n for n in survey_node.childNodes if n.nodeType == Node.ELEMENT_NODE - and n.tagName.lower() == "meta"] + and n.tagName.lower() == 'meta'] if len(meta_tags) == 0: - meta_tag = xml.createElement("meta") + meta_tag = xml.createElement('meta') xml.documentElement.appendChild(meta_tag) else: meta_tag = meta_tags[0] @@ -365,14 +366,14 @@ def inject_instanceid(xml_str, uuid): uuid_tags = [ n for n in meta_tag.childNodes if n.nodeType == Node.ELEMENT_NODE - and n.tagName == "instanceID"] + and n.tagName == 'instanceID'] if len(uuid_tags) == 0: - uuid_tag = xml.createElement("instanceID") + uuid_tag = xml.createElement('instanceID') meta_tag.appendChild(uuid_tag) else: uuid_tag = uuid_tags[0] # insert meta and instanceID - text_node = xml.createTextNode("uuid:%s" % uuid) + text_node = xml.createTextNode('uuid:%s' % uuid) uuid_tag.appendChild(text_node) return xml.toxml() return xml_str @@ -407,21 +408,21 @@ def mongo_sync_status(remongo=False, update_all=False, user=None, xform=None): found = 0 done = 0 total_to_remongo = 0 - report_string = "" + report_string = '' for xform in queryset_iterator(qs, 100): # get the count user = xform.user instance_count = Instance.objects.filter(xform=xform).count() - userform_id = "%s_%s" % (user.username, xform.id_string) + userform_id = '%s_%s' % (user.username, xform.id_string) mongo_count = mongo_instances.count_documents( {common_tags.USERFORM_ID: userform_id}, maxTimeMS=settings.MONGO_QUERY_TIMEOUT ) if instance_count != mongo_count or update_all: - line = "user: %s, id_string: %s\nInstance count: %d\t" \ - "Mongo count: %d\n---------------------------------" \ - "-----\n" % ( + line = 'user: %s, id_string: %s\nInstance count: %d\t' \ + 'Mongo count: %d\n---------------------------------' \ + '-----\n' % ( user.username, xform.id_string, instance_count, mongo_count) report_string += line @@ -432,24 +433,24 @@ def mongo_sync_status(remongo=False, update_all=False, user=None, xform=None): if remongo or (remongo and update_all): if update_all: sys.stdout.write( - "Updating all records for %s\n--------------------" - "---------------------------\n" % xform.id_string) + 'Updating all records for %s\n--------------------' + '---------------------------\n' % xform.id_string) else: sys.stdout.write( - "Updating missing records for %s\n----------------" - "-------------------------------\n" + 'Updating missing records for %s\n----------------' + '-------------------------------\n' % xform.id_string) _update_mongo_for_xform( xform, only_update_missing=not update_all ) done += 1 sys.stdout.write( - "%.2f %% done ...\r" % ((float(done) / float(total)) * 100)) + '%.2f %% done ...\r' % ((float(done) / float(total)) * 100)) # only show stats if we are not updating mongo, the update function # will show progress if not remongo: - line = "Total # of forms out of sync: %d\n" \ - "Total # of records to remongo: %d\n" % (found, total_to_remongo) + line = 'Total # of forms out of sync: %d\n' \ + 'Total # of records to remongo: %d\n' % (found, total_to_remongo) report_string += line return report_string @@ -492,7 +493,7 @@ def publish_form(callback): # ODK validation errors are vanilla errors and it masks a lot of regular # errors if we try to catch it so let's catch it, BUT reraise it # if we don't see typical ODK validation error messages in it. - if "ODK Validate Errors" not in str(e): + if 'ODK Validate Errors' not in str(e): raise # error in the XLS file; show an error to the user @@ -552,16 +553,16 @@ def report_exception(subject, info, exc_info=None): # TODO: replace with standard logging (i.e. `import logging`) if exc_info: cls, err = exc_info[:2] - message = t("Exception in request:" - " %(class)s: %(error)s")\ + message = t('Exception in request:' + ' %(class)s: %(error)s')\ % {'class': cls.__name__, 'error': err} - message += "".join(traceback.format_exception(*exc_info)) + message += ''.join(traceback.format_exception(*exc_info)) else: - message = "%s" % info + message = '%s' % info if settings.DEBUG or settings.TESTING: - sys.stdout.write("Subject: %s\n" % subject) - sys.stdout.write("Message: %s\n" % message) + sys.stdout.write('Subject: %s\n' % subject) + sys.stdout.write('Message: %s\n' % message) else: mail_admins(subject=subject, message=message) @@ -572,7 +573,7 @@ def response_with_mimetype_and_name( if extension is None: extension = mimetype if not full_mime: - mimetype = "application/%s" % mimetype + mimetype = 'application/%s' % mimetype if file_path: try: if not use_local_filesystem: @@ -585,7 +586,7 @@ def response_with_mimetype_and_name( response['Content-Length'] = os.path.getsize(file_path) except IOError: response = HttpResponseNotFound( - t("The requested file could not be found.")) + t('The requested file could not be found.')) else: response = HttpResponse(content_type=mimetype) response['Content-Disposition'] = disposition_ext_and_date( @@ -878,19 +879,19 @@ def _update_mongo_for_xform(xform, only_update_missing=True): instance_ids = set( [i.id for i in Instance.objects.only('id').filter(xform=xform)]) - sys.stdout.write("Total no of instances: %d\n" % len(instance_ids)) + sys.stdout.write('Total no of instances: %d\n' % len(instance_ids)) mongo_ids = set() user = xform.user - userform_id = "%s_%s" % (user.username, xform.id_string) + userform_id = '%s_%s' % (user.username, xform.id_string) if only_update_missing: - sys.stdout.write("Only updating missing mongo instances\n") + sys.stdout.write('Only updating missing mongo instances\n') mongo_ids = set( [rec[common_tags.ID] for rec in mongo_instances.find( {common_tags.USERFORM_ID: userform_id}, {common_tags.ID: 1}, max_time_ms=settings.MONGO_QUERY_TIMEOUT )]) - sys.stdout.write("Total no of mongo instances: %d\n" % len(mongo_ids)) + sys.stdout.write('Total no of mongo instances: %d\n' % len(mongo_ids)) # get the difference instance_ids = instance_ids.difference(mongo_ids) else: @@ -899,7 +900,7 @@ def _update_mongo_for_xform(xform, only_update_missing=True): # get instances sys.stdout.write( - "Total no of instances to update: %d\n" % len(instance_ids)) + 'Total no of instances to update: %d\n' % len(instance_ids)) instances = Instance.objects.only('id').in_bulk( [id_ for id_ in instance_ids]) total = len(instances) @@ -910,23 +911,23 @@ def _update_mongo_for_xform(xform, only_update_missing=True): save_success = pi.save(asynchronous=False) except InstanceEmptyError: print( - "\033[91m[WARNING] - Skipping Instance #{}/uuid:{} because " - "it is empty\033[0m".format(id_, instance.uuid) + '\033[91m[WARNING] - Skipping Instance #{}/uuid:{} because ' + 'it is empty\033[0m'.format(id_, instance.uuid) ) else: if not save_success: print( - "\033[91m[ERROR] - Instance #{}/uuid:{} - Could not save " - "the parsed instance\033[0m".format(id_, instance.uuid) + '\033[91m[ERROR] - Instance #{}/uuid:{} - Could not save ' + 'the parsed instance\033[0m'.format(id_, instance.uuid) ) else: done += 1 - progress = "\r%.2f %% done..." % ((float(done) / float(total)) * 100) + progress = '\r%.2f %% done...' % ((float(done) / float(total)) * 100) sys.stdout.write(progress) sys.stdout.flush() sys.stdout.write( - "\nUpdated %s\n------------------------------------------\n" + '\nUpdated %s\n------------------------------------------\n' % xform.id_string) diff --git a/kobo/apps/openrosa/libs/utils/middleware.py b/kobo/apps/openrosa/libs/utils/middleware.py index 7839eeae48..f4ea4a8817 100644 --- a/kobo/apps/openrosa/libs/utils/middleware.py +++ b/kobo/apps/openrosa/libs/utils/middleware.py @@ -3,12 +3,11 @@ from django.conf import settings from django.contrib.auth import get_user_model -from django.db import connection from django.http import ( - HttpResponseNotAllowed, - HttpResponseForbidden, HttpRequest, HttpResponse, + HttpResponseForbidden, + HttpResponseNotAllowed, ) from django.middleware.locale import LocaleMiddleware from django.template import loader diff --git a/kobo/apps/organizations/utils.py b/kobo/apps/organizations/utils.py index 0d17572af4..42fa00f56d 100644 --- a/kobo/apps/organizations/utils.py +++ b/kobo/apps/organizations/utils.py @@ -1,12 +1,13 @@ from typing import Union + try: from zoneinfo import ZoneInfo except ImportError: from backports.zoneinfo import ZoneInfo from datetime import datetime -from dateutil.relativedelta import relativedelta +from dateutil.relativedelta import relativedelta from django.utils import timezone from kobo.apps.organizations.models import Organization diff --git a/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py b/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py index 6be2719b24..50c7951eb3 100644 --- a/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py +++ b/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py @@ -8,8 +8,8 @@ TransferStatusTypeChoices, ) from ...utils import ( - move_media_files, move_attachments, + move_media_files, rewrite_mongo_userform_id, ) diff --git a/kobo/apps/project_ownership/models/transfer.py b/kobo/apps/project_ownership/models/transfer.py index c4368ab54f..7e0655fda4 100644 --- a/kobo/apps/project_ownership/models/transfer.py +++ b/kobo/apps/project_ownership/models/transfer.py @@ -19,15 +19,16 @@ from kpi.fields import KpiUidField from kpi.models import Asset, ObjectPermission from kpi.models.abstract_models import AbstractTimeStampedModel + +from ..exceptions import TransferAlreadyProcessedException +from ..tasks import async_task, send_email_to_admins +from ..utils import get_target_folder from .choices import ( InviteStatusChoices, TransferStatusChoices, TransferStatusTypeChoices, ) from .invite import Invite -from ..exceptions import TransferAlreadyProcessedException -from ..tasks import async_task, send_email_to_admins -from ..utils import get_target_folder class Transfer(AbstractTimeStampedModel): @@ -223,7 +224,7 @@ def _sent_in_app_messages(self): message_recipient_ids = ( ObjectPermission.objects.filter(asset_id=self.asset_id) .exclude(user_id__in=exclusions) - .values_list("user_id", flat=True) + .values_list('user_id', flat=True) ) if len(message_recipient_ids): diff --git a/kobo/apps/project_ownership/tasks.py b/kobo/apps/project_ownership/tasks.py index ac2d194fa0..a88f9308e6 100644 --- a/kobo/apps/project_ownership/tasks.py +++ b/kobo/apps/project_ownership/tasks.py @@ -1,7 +1,7 @@ from datetime import timedelta -from celery.signals import task_failure, task_retry from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded +from celery.signals import task_failure, task_retry from constance import config from django.apps import apps from django.conf import settings @@ -11,6 +11,7 @@ from kobo.celery import celery_app from kpi.utils.mailer import EmailMessage, Mailer + from .exceptions import AsyncTaskException, TransferStillPendingException from .models.choices import ( InviteStatusChoices, diff --git a/kobo/apps/project_ownership/tests/api/v2/test_api.py b/kobo/apps/project_ownership/tests/api/v2/test_api.py index 02ff20bdb6..2a4cbe0847 100644 --- a/kobo/apps/project_ownership/tests/api/v2/test_api.py +++ b/kobo/apps/project_ownership/tests/api/v2/test_api.py @@ -1,10 +1,10 @@ import uuid +from unittest.mock import MagicMock, patch from constance.test import override_config from django.conf import settings from django.contrib.auth import get_user_model from django.utils import timezone -from mock import patch, MagicMock from rest_framework import status from rest_framework.reverse import reverse diff --git a/kobo/apps/project_ownership/tests/test_transfer_status.py b/kobo/apps/project_ownership/tests/test_transfer_status.py index 2853083e66..8c2641209f 100644 --- a/kobo/apps/project_ownership/tests/test_transfer_status.py +++ b/kobo/apps/project_ownership/tests/test_transfer_status.py @@ -3,6 +3,7 @@ from kpi.models import Asset from kpi.tests.utils.transaction import immediate_on_commit + from ..models import ( Invite, InviteStatusChoices, diff --git a/kobo/apps/project_ownership/utils.py b/kobo/apps/project_ownership/utils.py index f3db74104c..716f20e15f 100644 --- a/kobo/apps/project_ownership/utils.py +++ b/kobo/apps/project_ownership/utils.py @@ -7,8 +7,9 @@ from kobo.apps.openrosa.apps.logger.models import Attachment from kobo.apps.openrosa.apps.main.models import MetaData from kpi.models.asset import AssetFile -from .models.choices import TransferStatusChoices, TransferStatusTypeChoices + from .exceptions import AsyncTaskException +from .models.choices import TransferStatusChoices, TransferStatusTypeChoices def get_target_folder( diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index 3d59479ebd..d40eb0ab69 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -1,11 +1,13 @@ import timeit + try: from zoneinfo import ZoneInfo except ImportError: from backports.zoneinfo import ZoneInfo -import pytest from datetime import datetime + +import pytest from dateutil.relativedelta import relativedelta from django.core.cache import cache from django.test import override_settings @@ -18,16 +20,16 @@ from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.organizations.models import Organization, OrganizationUser -from kobo.apps.trackers.tests.submission_utils import ( - create_mock_assets, - add_mock_submissions, -) from kobo.apps.stripe.tests.utils import ( generate_enterprise_subscription, generate_plan_subscription, ) -from kpi.tests.test_usage_calculator import BaseServiceUsageTestCase +from kobo.apps.trackers.tests.submission_utils import ( + add_mock_submissions, + create_mock_assets, +) from kpi.tests.api.v2.test_api_asset_usage import AssetUsageAPITestCase +from kpi.tests.test_usage_calculator import BaseServiceUsageTestCase class OrganizationServiceUsageAPIMultiUserTestCase(BaseServiceUsageTestCase): diff --git a/kobo/apps/subsequences/actions/automatic_transcription.py b/kobo/apps/subsequences/actions/automatic_transcription.py index 960288f31b..6981ae6c93 100644 --- a/kobo/apps/subsequences/actions/automatic_transcription.py +++ b/kobo/apps/subsequences/actions/automatic_transcription.py @@ -1,5 +1,6 @@ from kobo.apps.subsequences.constants import GOOGLETS -from ..actions.base import BaseAction, ACTION_NEEDED, PASSES + +from ..actions.base import ACTION_NEEDED, PASSES, BaseAction NOT_REQUESTED = 'NOT_REQUESTED' REQUESTED_BY_USER = 'REQUESTED_BY_USER' @@ -120,13 +121,13 @@ def addl_fields(self): }, } - ''' + """ {"value": "My translation", "languageCode": "en", "date": "12today"} AQ1 Translation (FR) AQ1 Translation (XZ) -------------------- -------------------- "My translation" - ''' + """ def engines(self): manual_name = f'engines/transcript_manual' diff --git a/kobo/apps/subsequences/actions/base.py b/kobo/apps/subsequences/actions/base.py index 8b01b9b14a..14feafe035 100644 --- a/kobo/apps/subsequences/actions/base.py +++ b/kobo/apps/subsequences/actions/base.py @@ -1,4 +1,5 @@ import datetime + try: from zoneinfo import ZoneInfo except ImportError: @@ -6,7 +7,7 @@ from django.utils import timezone -from kobo.apps.subsequences.constants import (GOOGLETS, GOOGLETX) +from kobo.apps.subsequences.constants import GOOGLETS, GOOGLETX ACTION_NEEDED = 'ACTION_NEEDED' PASSES = 'PASSES' @@ -39,10 +40,10 @@ def modify_jsonschema(self, schema): return schema def compile_revised_record(self, content, edits): - ''' + """ a method that applies changes to a json structure and appends previous changes to a revision history - ''' + """ if self.ID is None: return content for field_name, vals in edits.items(): diff --git a/kobo/apps/subsequences/actions/keyword_search.py b/kobo/apps/subsequences/actions/keyword_search.py index 3e83748e77..6d5ecffaf4 100644 --- a/kobo/apps/subsequences/actions/keyword_search.py +++ b/kobo/apps/subsequences/actions/keyword_search.py @@ -1,11 +1,12 @@ import copy -from ..actions.base import BaseAction, ACTION_NEEDED, PASSES + +from ..actions.base import ACTION_NEEDED, PASSES, BaseAction class KeywordSearchAction(BaseAction): ID = 'keyword_search' - ''' + """ @classmethod def build_params(cls, params, content): possible_transcribed_fields = [] @@ -14,7 +15,7 @@ def build_params(cls, params, content): possible_transcribed_fields.append(cls.get_xpath(cls, row)) params = {'values': possible_transcribed_fields} return params - ''' + """ @classmethod def get_values_for_content(cls, content): diff --git a/kobo/apps/subsequences/actions/qual.py b/kobo/apps/subsequences/actions/qual.py index e1a6b53902..4ee2bc7a51 100644 --- a/kobo/apps/subsequences/actions/qual.py +++ b/kobo/apps/subsequences/actions/qual.py @@ -1,4 +1,4 @@ -from ..actions.base import BaseAction, ACTION_NEEDED, PASSES +from ..actions.base import BaseAction from ..jsonschemas.qual_schema import DEFINITIONS as QUAL_DEFINITIONS @@ -14,10 +14,10 @@ def build_params(cls, survey_content): return {'values': _fields} def load_params(self, params): - ''' + """ Action.load_params is called when the instance is initialized for each Asset. It will - ''' + """ self.fields = params.get('values', []) self.qual_survey = params.get('qual_survey', []) self.everything_else = params diff --git a/kobo/apps/subsequences/actions/translation.py b/kobo/apps/subsequences/actions/translation.py index 644a6dd985..34e0568a80 100644 --- a/kobo/apps/subsequences/actions/translation.py +++ b/kobo/apps/subsequences/actions/translation.py @@ -1,8 +1,8 @@ -from django.utils import timezone -from ..actions.base import BaseAction, ACTION_NEEDED, PASSES from kobo.apps.subsequences.constants import GOOGLETX +from ..actions.base import BaseAction + TRANSLATED = 'translation' diff --git a/kobo/apps/subsequences/integrations/google/base.py b/kobo/apps/subsequences/integrations/google/base.py index 123e6e6a25..41156d8a10 100644 --- a/kobo/apps/subsequences/integrations/google/base.py +++ b/kobo/apps/subsequences/integrations/google/base.py @@ -5,19 +5,18 @@ from typing import Any import constance -from google.cloud import storage -from google.api_core.operation import Operation -from googleapiclient import discovery from django.conf import settings -from django.contrib.auth.models import User from django.core.cache import cache +from google.api_core.operation import Operation +from google.cloud import storage +from googleapiclient import discovery from kobo.apps.trackers.utils import update_nlp_counter -from kpi.utils.log import logging -from .utils import google_credentials_from_constance_config -from ...models import SubmissionExtras + from ...constants import GOOGLE_CACHE_TIMEOUT, make_nlp_async_cache_key from ...exceptions import SubsequenceTimeoutError +from ...models import SubmissionExtras +from .utils import google_credentials_from_constance_config class GoogleService(ABC): diff --git a/kobo/apps/subsequences/integrations/google/google_transcribe.py b/kobo/apps/subsequences/integrations/google/google_transcribe.py index 53b3f804cd..eb55fd1d17 100644 --- a/kobo/apps/subsequences/integrations/google/google_transcribe.py +++ b/kobo/apps/subsequences/integrations/google/google_transcribe.py @@ -1,25 +1,24 @@ from __future__ import annotations -import uuid import posixpath -from concurrent.futures import TimeoutError +import uuid from datetime import timedelta -from typing import Union, Any +from typing import Any, Union import constance from django.conf import settings from google.api_core.exceptions import InvalidArgument -from google.cloud import speech, storage +from google.cloud import speech -from kobo.apps.languages.models.transcription import TranscriptionService from kpi.utils.log import logging -from .base import GoogleService -from ...constants import GOOGLE_CODE, GOOGLETS + +from ...constants import GOOGLETS from ...exceptions import ( AudioTooLongError, SubsequenceTimeoutError, TranscriptionResultsNotFound, ) +from .base import GoogleService # https://cloud.google.com/speech-to-text/quotas#content ASYNC_MAX_LENGTH = timedelta(minutes=479) diff --git a/kobo/apps/subsequences/integrations/google/google_translate.py b/kobo/apps/subsequences/integrations/google/google_translate.py index abbff2a2d1..01432b13cf 100644 --- a/kobo/apps/subsequences/integrations/google/google_translate.py +++ b/kobo/apps/subsequences/integrations/google/google_translate.py @@ -3,24 +3,24 @@ import posixpath from datetime import date from hashlib import md5 -from typing import Union, Any +from typing import Any, Union import constance from django.conf import settings -from django.utils import timezone from google.api_core.exceptions import InvalidArgument -from google.cloud import translate_v3 as translate, storage +from google.cloud import translate_v3 as translate from kobo.apps.languages.models.translation import TranslationService from kpi.utils.log import logging -from .base import GoogleService -from .utils import google_credentials_from_constance_config -from ...constants import GOOGLETX, GOOGLE_CODE + +from ...constants import GOOGLE_CODE, GOOGLETX from ...exceptions import ( SubsequenceTimeoutError, - TranslationResultsNotFound, TranslationAsyncResultAvailable, + TranslationResultsNotFound, ) +from .base import GoogleService +from .utils import google_credentials_from_constance_config MAX_SYNC_CHARS = 30720 diff --git a/kobo/apps/subsequences/models.py b/kobo/apps/subsequences/models.py index e5a85a27b1..5c371f5884 100644 --- a/kobo/apps/subsequences/models.py +++ b/kobo/apps/subsequences/models.py @@ -1,11 +1,10 @@ # coding: utf-8 from django.db import models -from kobo.apps.trackers.utils import update_nlp_counter from kpi.models import Asset from kpi.models.abstract_models import AbstractTimeStampedModel -from kpi.utils.log import logging -from .constants import GOOGLETS, GOOGLETX, ASYNC_TRANSLATION_DELAY_INTERVAL + +from .constants import GOOGLETS, GOOGLETX from .utils.determine_export_cols_with_values import ( determine_export_cols_indiv, ) diff --git a/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py b/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py index 3cd6128590..1d7360479e 100644 --- a/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py +++ b/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py @@ -1,7 +1,9 @@ import json -from kpi.models import Asset + from jsonschema import validate +from kpi.models import Asset + EXAMPLES = [ { 'labels': {'_default': 'Any descriptors?'}, diff --git a/kobo/apps/subsequences/scripts/repop_known_cols.py b/kobo/apps/subsequences/scripts/repop_known_cols.py index 8f1b57a043..ef6ff9b387 100644 --- a/kobo/apps/subsequences/scripts/repop_known_cols.py +++ b/kobo/apps/subsequences/scripts/repop_known_cols.py @@ -8,12 +8,12 @@ from django.core.paginator import Paginator from kobo.apps.subsequences.models import SubmissionExtras -from kobo.apps.subsequences.utils.determine_export_cols_with_values import ( - determine_export_cols_with_values, -) from kobo.apps.subsequences.utils.deprecation import ( - get_sanitized_known_columns, get_sanitized_dict_keys, + get_sanitized_known_columns, +) +from kobo.apps.subsequences.utils.determine_export_cols_with_values import ( + determine_export_cols_with_values, ) from kpi.models.asset import Asset @@ -70,7 +70,7 @@ def migrate_advanced_features(asset, save=True): def run(asset_uid=None): - if asset_uid == "!": + if asset_uid == '!': SubmissionExtras.objects.all().delete() for asset in Asset.objects.exclude(advanced_features__exact={}).iterator(): asset.advanced_features = {} diff --git a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py index bb731336d5..e990d240a9 100644 --- a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py +++ b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py @@ -1,5 +1,5 @@ from copy import deepcopy -from unittest.mock import patch, Mock +from unittest.mock import Mock, patch from constance.test import override_config from django.test import override_settings @@ -18,8 +18,6 @@ TranslationService, TranslationServiceLanguageM2M, ) -from kpi.models.asset import Asset -from kpi.utils.fuzzy_int import FuzzyInt from kpi.constants import ( PERM_ADD_SUBMISSIONS, PERM_CHANGE_ASSET, @@ -27,6 +25,9 @@ PERM_VIEW_ASSET, PERM_VIEW_SUBMISSIONS, ) +from kpi.models.asset import Asset +from kpi.utils.fuzzy_int import FuzzyInt + from ..constants import GOOGLETS, GOOGLETX from ..models import SubmissionExtras diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index 3be21a587d..15f3bb574d 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -1,14 +1,13 @@ from collections import defaultdict from copy import deepcopy +from ..actions.automatic_transcription import AutomaticTranscriptionAction +from ..actions.qual import QualAction +from ..actions.translation import TranslationAction from .deprecation import ( get_sanitized_advanced_features, get_sanitized_dict_keys, ) -from ..actions.automatic_transcription import AutomaticTranscriptionAction -from ..actions.translation import TranslationAction -from ..actions.qual import QualAction - AVAILABLE_ACTIONS = ( AutomaticTranscriptionAction, diff --git a/kobo/apps/subsequences/utils/determine_export_cols_with_values.py b/kobo/apps/subsequences/utils/determine_export_cols_with_values.py index 220e4f12ed..2a0a05f7a7 100644 --- a/kobo/apps/subsequences/utils/determine_export_cols_with_values.py +++ b/kobo/apps/subsequences/utils/determine_export_cols_with_values.py @@ -1,5 +1,5 @@ # coding: utf-8 -''' +""" this util has 2 functions that serve to parse the submission_extras for an asset and build a list of extra columns that should be included in exports. @@ -13,7 +13,7 @@ - q1:translt:de output is a more descriptive structure. (See test_parse_known_cols) -''' +""" KEY_TYPE_DICTS = { diff --git a/kobo/apps/superuser_stats/models.py b/kobo/apps/superuser_stats/models.py index 6c8f824cc8..581bc44df5 100644 --- a/kobo/apps/superuser_stats/models.py +++ b/kobo/apps/superuser_stats/models.py @@ -1,6 +1,4 @@ -from kobo.apps.openrosa.apps.logger.models import ( - MonthlyXFormSubmissionCounter -) +from kobo.apps.openrosa.apps.logger.models import MonthlyXFormSubmissionCounter class SuperuserStatsModel(MonthlyXFormSubmissionCounter): diff --git a/kobo/apps/superuser_stats/tasks.py b/kobo/apps/superuser_stats/tasks.py index f858fc6b96..157a0fdc13 100644 --- a/kobo/apps/superuser_stats/tasks.py +++ b/kobo/apps/superuser_stats/tasks.py @@ -2,15 +2,11 @@ from __future__ import annotations import csv -from celery import shared_task from collections import Counter from datetime import datetime from typing import Union -try: - from zoneinfo import ZoneInfo -except ImportError: - from backports.zoneinfo import ZoneInfo +from celery import shared_task from dateutil.relativedelta import relativedelta from django.conf import settings from django.core.files.storage import default_storage @@ -18,8 +14,8 @@ CharField, Count, DateField, - IntegerField, F, + IntegerField, Q, Sum, Value, @@ -28,18 +24,17 @@ from hub.models import ExtraUserDetail from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.trackers.models import NLPUsageCounter -from kobo.static_lists import COUNTRIES -from kpi.constants import ASSET_TYPE_SURVEY from kobo.apps.openrosa.apps.logger.models import ( Instance, MonthlyXFormSubmissionCounter, XForm, ) from kobo.apps.openrosa.apps.main.models import UserProfile +from kobo.apps.trackers.models import NLPUsageCounter +from kobo.static_lists import COUNTRIES +from kpi.constants import ASSET_TYPE_SURVEY from kpi.models.asset import Asset, AssetDeploymentStatus - # Make sure this app is listed in `INSTALLED_APPS`; otherwise, Celery will # complain that the task is unregistered diff --git a/kobo/apps/trackers/tests/test_trackers.py b/kobo/apps/trackers/tests/test_trackers.py index 083ec16b7c..3340340ecc 100644 --- a/kobo/apps/trackers/tests/test_trackers.py +++ b/kobo/apps/trackers/tests/test_trackers.py @@ -1,6 +1,5 @@ from datetime import datetime - from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.trackers.models import NLPUsageCounter from kobo.apps.trackers.utils import update_nlp_counter diff --git a/kobo/apps/trash_bin/models/project.py b/kobo/apps/trash_bin/models/project.py index e0ee2354a7..1c4360a1d9 100644 --- a/kobo/apps/trash_bin/models/project.py +++ b/kobo/apps/trash_bin/models/project.py @@ -14,6 +14,7 @@ from kpi.fields import KpiUidField from kpi.models.asset import Asset, AssetDeploymentStatus from kpi.utils.django_orm_helper import UpdateJSONFieldAttributes + from . import BaseTrash diff --git a/kobo/apps/trash_bin/utils.py b/kobo/apps/trash_bin/utils.py index dd1318ff4c..89e86e8552 100644 --- a/kobo/apps/trash_bin/utils.py +++ b/kobo/apps/trash_bin/utils.py @@ -4,8 +4,8 @@ from copy import deepcopy from datetime import timedelta -from django.contrib.auth import get_user_model from django.conf import settings +from django.contrib.auth import get_user_model from django.db import IntegrityError, models, transaction from django.db.models import F, Q from django.db.models.signals import pre_delete @@ -24,11 +24,12 @@ from kpi.models import Asset, ExportTask, ImportTask from kpi.utils.mongo_helper import MongoHelper from kpi.utils.storage import rmdir + from .constants import DELETE_PROJECT_STR_PREFIX, DELETE_USER_STR_PREFIX from .exceptions import ( TrashIntegrityError, - TrashNotImplementedError, TrashMongoDeleteOrphansError, + TrashNotImplementedError, TrashTaskInProgressError, ) from .models import TrashStatus diff --git a/kobo/settings/base.py b/kobo/settings/base.py index 01c982f6dd..234c45bdce 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -22,6 +22,7 @@ FREE_TIER_NO_THRESHOLDS, ) from kpi.utils.json import LazyJSONSerializable + from ..static_lists import EXTRA_LANG_INFO, SECTOR_CHOICE_DEFAULTS env = environ.Env() diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index b953d6245a..36692634e0 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -7,30 +7,30 @@ import json import os import uuid -from datetime import date from contextlib import contextmanager -from typing import Union, Iterator, Optional +from datetime import date +from typing import Iterator, Optional, Union from bson import json_util from django.conf import settings +from django.core.exceptions import PermissionDenied from django.db.models.query import QuerySet from django.utils import timezone from django.utils.translation import gettext_lazy as t -from django.core.exceptions import PermissionDenied from rest_framework import serializers -from rest_framework.reverse import reverse from rest_framework.pagination import _positive_int as positive_int +from rest_framework.reverse import reverse from shortuuid import ShortUUID from kobo.apps.openrosa.libs.utils.logger_tools import ( http_open_rosa_error_handler, ) from kpi.constants import ( - SUBMISSION_FORMAT_TYPE_XML, - SUBMISSION_FORMAT_TYPE_JSON, PERM_CHANGE_SUBMISSIONS, PERM_PARTIAL_SUBMISSIONS, PERM_VIEW_SUBMISSIONS, + SUBMISSION_FORMAT_TYPE_JSON, + SUBMISSION_FORMAT_TYPE_XML, ) from kpi.exceptions import BulkUpdateSubmissionsClientException from kpi.models.asset_file import AssetFile diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index 394cdf3755..ff2183416b 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -11,13 +11,14 @@ from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.libs.utils.logger_tools import ( - dict2xform, create_instance, + dict2xform, ) from kpi.constants import PERM_ADD_SUBMISSIONS, SUBMISSION_FORMAT_TYPE_JSON from kpi.tests.utils.dicts import convert_hierarchical_keys_to_nested_dict -from .openrosa_backend import OpenRosaDeploymentBackend + from ..utils.files import ExtendedContentFile +from .openrosa_backend import OpenRosaDeploymentBackend class MockDeploymentBackend(OpenRosaDeploymentBackend): diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index 3e3c9cfd30..a10ca69d96 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -3,20 +3,21 @@ from collections import defaultdict from contextlib import contextmanager from datetime import date, datetime -from typing import Generator, Optional, Union, Literal +from typing import Generator, Literal, Optional, Union from urllib.parse import urlparse + try: from zoneinfo import ZoneInfo except ImportError: from backports.zoneinfo import ZoneInfo -import requests import redis.exceptions +import requests from defusedxml import ElementTree as DET from django.conf import settings from django.core.files import File from django.core.files.base import ContentFile -from django.db.models import Sum, F +from django.db.models import F, Sum from django.db.models.functions import Coalesce from django.db.models.query import QuerySet from django.utils import timezone @@ -24,12 +25,11 @@ from django_redis import get_redis_connection from rest_framework import status -from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile from kobo.apps.openrosa.apps.logger.models import ( Attachment, DailyXFormSubmissionCounter, - MonthlyXFormSubmissionCounter, Instance, + MonthlyXFormSubmissionCounter, XForm, ) from kobo.apps.openrosa.apps.logger.utils.instance import ( @@ -38,6 +38,7 @@ remove_validation_status_from_instance, set_instance_validation_statuses, ) +from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile from kobo.apps.openrosa.libs.utils.logger_tools import ( create_instance, publish_xls_form, @@ -45,14 +46,14 @@ from kobo.apps.subsequences.utils import stream_with_extras from kobo.apps.trackers.models import NLPUsageCounter from kpi.constants import ( - SUBMISSION_FORMAT_TYPE_JSON, - SUBMISSION_FORMAT_TYPE_XML, - PERM_FROM_KC_ONLY, PERM_CHANGE_SUBMISSIONS, PERM_DELETE_SUBMISSIONS, + PERM_FROM_KC_ONLY, PERM_PARTIAL_SUBMISSIONS, PERM_VALIDATE_SUBMISSIONS, PERM_VIEW_SUBMISSIONS, + SUBMISSION_FORMAT_TYPE_JSON, + SUBMISSION_FORMAT_TYPE_XML, ) from kpi.exceptions import ( AttachmentNotFoundException, @@ -73,14 +74,15 @@ from kpi.utils.mongo_helper import MongoHelper from kpi.utils.object_permission import get_database_user from kpi.utils.xml import fromstring_preserve_root_xmlns, xml_tostring + +from ..exceptions import ( + BadFormatException, +) from .base_backend import BaseDeploymentBackend from .kc_access.utils import ( assign_applicable_kc_permissions, kc_transaction_atomic, ) -from ..exceptions import ( - BadFormatException, -) class OpenRosaDeploymentBackend(BaseDeploymentBackend): @@ -548,8 +550,8 @@ def get_daily_counts( 'format': '%Y-%m-%d', 'date': { '$dateFromString': { - 'format': "%Y-%m-%dT%H:%M:%S", - 'dateString': "$_submission_time" + 'format': '%Y-%m-%dT%H:%M:%S', + 'dateString': '$_submission_time' } } } @@ -729,7 +731,7 @@ def get_submissions( submissions = self.__get_submissions_in_xml(**params) else: raise BadFormatException( - "The format {} is not supported".format(format_type) + 'The format {} is not supported'.format(format_type) ) return submissions diff --git a/kpi/management/commands/sync_kobocat_perms.py b/kpi/management/commands/sync_kobocat_perms.py index 5e7e1b1396..c20b2aa014 100644 --- a/kpi/management/commands/sync_kobocat_perms.py +++ b/kpi/management/commands/sync_kobocat_perms.py @@ -6,20 +6,20 @@ from guardian.models import UserObjectPermission from kpi.constants import PERM_FROM_KC_ONLY -from kpi.models import Asset, ObjectPermission from kpi.deployment_backends.kc_access.utils import ( assign_applicable_kc_permissions, kc_transaction_atomic, ) from kpi.management.commands.sync_kobocat_xforms import _sync_permissions +from kpi.models import Asset, ObjectPermission from kpi.utils.object_permission import get_perm_ids_from_code_names class Command(BaseCommand): help = ( - "Synchronize permissions of deployed forms with KoBoCAT.\n" - "They are synced bidirectionally unless `--mirror-kpi` option is used." + 'Synchronize permissions of deployed forms with KoBoCAT.\n' + 'They are synced bidirectionally unless `--mirror-kpi` option is used.' ) def add_arguments(self, parser): @@ -28,7 +28,7 @@ def add_arguments(self, parser): action='store', dest='asset_uid', default=None, - help="Sync only a specific asset", + help='Sync only a specific asset', ) parser.add_argument( '--username', @@ -38,10 +38,10 @@ def add_arguments(self, parser): help="Sync only a specific user's assets", ) parser.add_argument( - "--chunks", + '--chunks', default=1000, type=int, - help="Update records by batch of `chunks`.", + help='Update records by batch of `chunks`.', ) parser.add_argument( '--mirror-kpi', diff --git a/kpi/management/commands/sync_kobocat_xforms.py b/kpi/management/commands/sync_kobocat_xforms.py index 32e5fd3214..9307759b52 100644 --- a/kpi/management/commands/sync_kobocat_xforms.py +++ b/kpi/management/commands/sync_kobocat_xforms.py @@ -15,18 +15,18 @@ from django.core.management.base import BaseCommand from django.db import transaction from guardian.models import UserObjectPermission -from formpack.utils.xls_to_ss_structure import xlsx_to_dicts from pyxform import xls2json_backends from rest_framework.authtoken.models import Token +from formpack.utils.xls_to_ss_structure import xlsx_to_dicts from kobo.apps.kobo_auth.shortcuts import User -from kpi.constants import PERM_FROM_KC_ONLY -from kpi.utils.log import logging from kobo.apps.openrosa.apps.logger.models.xform import XForm +from kpi.constants import PERM_FROM_KC_ONLY from kpi.deployment_backends.openrosa_backend import OpenRosaDeploymentBackend from kpi.models import Asset, ObjectPermission -from kpi.utils.object_permission import get_anonymous_user +from kpi.utils.log import logging from kpi.utils.models import _set_auto_field_update +from kpi.utils.object_permission import get_anonymous_user TIMESTAMP_DIFFERENCE_TOLERANCE = datetime.timedelta(seconds=30) @@ -99,7 +99,7 @@ def _convert_dict_to_xls(ss_dict): workbook = xlwt.Workbook() for sheet_name in ss_dict.keys(): # pyxform.xls2json_backends adds "_header" items for each sheet..... - if not re.match(r".*_header$", sheet_name): + if not re.match(r'.*_header$', sheet_name): # Sheets with empty names are rejected by xlwt; omit them if not sheet_name: continue @@ -480,8 +480,8 @@ def handle(self, *args, **options): self._print_str('%d users selected' % users.count()) # We'll be copying the date fields from KC, so don't auto-update them - _set_auto_field_update(Asset, "date_created", False) - _set_auto_field_update(Asset, "date_modified", False) + _set_auto_field_update(Asset, 'date_created', False) + _set_auto_field_update(Asset, 'date_modified', False) for user in users: # Make sure the user has a token for access to KC's API @@ -561,8 +561,8 @@ def handle(self, *args, **options): logging.exception('sync_kobocat_xforms: {}'.format( ', '.join(error_information))) - _set_auto_field_update(Asset, "date_created", True) - _set_auto_field_update(Asset, "date_modified", True) + _set_auto_field_update(Asset, 'date_created', True) + _set_auto_field_update(Asset, 'date_modified', True) if populate_xform_kpi_asset_uid: call_command( diff --git a/kpi/migrations/0011_explode_asset_deployments.py b/kpi/migrations/0011_explode_asset_deployments.py index 0111e83a4f..98193cb2ad 100644 --- a/kpi/migrations/0011_explode_asset_deployments.py +++ b/kpi/migrations/0011_explode_asset_deployments.py @@ -1,5 +1,6 @@ # coding: utf-8 import sys + from django.db import migrations from kpi.deployment_backends.openrosa_backend import OpenRosaDeploymentBackend @@ -15,8 +16,8 @@ def explode_assets(apps, schema_editor): asset_progress_interval = max(1, int(total_assets / 50)) assets_done = 0 # Do not automatically update asset timestamps during this migration - _set_auto_field_update(Asset, "date_created", False) - _set_auto_field_update(Asset, "date_modified", False) + _set_auto_field_update(Asset, 'date_created', False) + _set_auto_field_update(Asset, 'date_modified', False) for asset in deployed_assets: deployment = asset.assetdeployment_set.last() # Copy the deployment-related data @@ -35,8 +36,8 @@ def explode_assets(apps, schema_editor): if assets_done % asset_progress_interval == 0: sys.stdout.write('.') sys.stdout.flush() - _set_auto_field_update(Asset, "date_created", True) - _set_auto_field_update(Asset, "date_modified", True) + _set_auto_field_update(Asset, 'date_created', True) + _set_auto_field_update(Asset, 'date_modified', True) ContentType = apps.get_model('contenttypes', 'ContentType') try: diff --git a/kpi/migrations/0012_onetimeauthenticationkey.py b/kpi/migrations/0012_onetimeauthenticationkey.py index 7d9708189c..ad0a4f0c3f 100644 --- a/kpi/migrations/0012_onetimeauthenticationkey.py +++ b/kpi/migrations/0012_onetimeauthenticationkey.py @@ -1,12 +1,12 @@ -# coding: utf-8 from functools import partial from secrets import token_urlsafe -from django.db import migrations, models -import kpi.models.authorized_application import django.core.validators +from django.db import migrations, models from django.conf import settings +from kpi.utils.datetime import ten_minutes_from_now + class Migration(migrations.Migration): @@ -19,10 +19,32 @@ class Migration(migrations.Migration): migrations.CreateModel( name='OneTimeAuthenticationKey', fields=[ - ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), - ('key', models.CharField(default=partial(token_urlsafe, nbytes=45), max_length=60, validators=[django.core.validators.MinLengthValidator(60)])), - ('expiry', models.DateTimeField(default=kpi.models.authorized_application.ten_minutes_from_now)), - ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)), + ( + 'id', + models.AutoField( + verbose_name='ID', + serialize=False, + auto_created=True, + primary_key=True, + ), + ), + ( + 'key', + models.CharField( + default=partial(token_urlsafe, nbytes=45), + max_length=60, + validators=[ + django.core.validators.MinLengthValidator(60) + ], + ), + ), + ('expiry', models.DateTimeField(default=ten_minutes_from_now)), + ( + 'user', + models.ForeignKey( + to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE + ), + ), ], ), ] diff --git a/kpi/mixins/formpack_xlsform_utils.py b/kpi/mixins/formpack_xlsform_utils.py index 3a9d3e5b60..6eb20a2a44 100644 --- a/kpi/mixins/formpack_xlsform_utils.py +++ b/kpi/mixins/formpack_xlsform_utils.py @@ -8,29 +8,28 @@ from formpack.utils.flatten_content import flatten_content from formpack.utils.spreadsheet_content import flatten_to_spreadsheet_content - from kobo.apps.reports.constants import FUZZY_VERSION_PATTERN +from kpi.utils.absolute_paths import ( + insert_full_paths_in_place, +) from kpi.utils.asset_translation_utils import ( - compare_translations, - # TRANSLATIONS_EQUAL, - TRANSLATIONS_OUT_OF_ORDER, - TRANSLATION_RENAMED, - TRANSLATION_DELETED, TRANSLATION_ADDED, TRANSLATION_CHANGE_UNSUPPORTED, + TRANSLATION_DELETED, + TRANSLATION_RENAMED, TRANSLATIONS_MULTIPLE_CHANGES, + # TRANSLATIONS_EQUAL, + TRANSLATIONS_OUT_OF_ORDER, + compare_translations, ) from kpi.utils.autoname import ( autoname_fields_in_place, autovalue_choices_in_place, ) -from kpi.utils.absolute_paths import ( - insert_full_paths_in_place, -) from kpi.utils.kobo_to_xlsform import ( expand_rank_and_score_in_place, - replace_with_autofields, remove_empty_expressions_in_place, + replace_with_autofields, ) from kpi.utils.random_id import random_id from kpi.utils.standardize_content import ( @@ -275,10 +274,10 @@ def _prioritize_translation(self, content, translation_name, is_new=False): # Remove None from translations we want to display to users valid_translations = [t for t in _translations if t is not None] raise ValueError( - "`{translation_name}` is specified as the default language, " - "but only these translations are present in the form: `{translations}`".format( + '`{translation_name}` is specified as the default language, ' + 'but only these translations are present in the form: `{translations}`'.format( translation_name=translation_name, - translations="`, `".join(valid_translations) + translations='`, `'.join(valid_translations) ) ) diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 7165e8d48c..99c0b20dbc 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -8,38 +8,35 @@ from django.conf import settings from django.contrib.auth.models import Permission from django.contrib.postgres.indexes import BTreeIndex, GinIndex -from django.db import models -from django.db import transaction -from django.db.models import Prefetch, Q, F +from django.db import models, transaction +from django.db.models import F, Prefetch, Q from django.utils.translation import gettext_lazy as t from django_request_cache import cache_for_request from taggit.managers import TaggableManager, _TaggableManager from taggit.utils import require_instance_manager + from formpack.utils.flatten_content import flatten_content from formpack.utils.json_hash import json_hash from formpack.utils.kobo_locking import strip_kobo_locking_profile - from kobo.apps.reports.constants import ( - SPECIFIC_REPORTS_KEY, DEFAULT_REPORTS_KEY, + SPECIFIC_REPORTS_KEY, +) +from kobo.apps.subsequences.advanced_features_params_schema import ( + ADVANCED_FEATURES_PARAMS_SCHEMA, ) from kobo.apps.subsequences.utils import ( advanced_feature_instances, advanced_submission_jsonschema, ) -from kobo.apps.subsequences.advanced_features_params_schema import ( - ADVANCED_FEATURES_PARAMS_SCHEMA, -) from kobo.apps.subsequences.utils.deprecation import ( - get_sanitized_known_columns, - get_sanitized_dict_keys, get_sanitized_advanced_features, + get_sanitized_dict_keys, + get_sanitized_known_columns, qpath_to_xpath, ) from kobo.apps.subsequences.utils.parse_known_cols import parse_known_cols from kpi.constants import ( - ASSET_TYPES, - ASSET_TYPES_WITH_CONTENT, ASSET_TYPE_BLOCK, ASSET_TYPE_COLLECTION, ASSET_TYPE_EMPTY, @@ -47,6 +44,8 @@ ASSET_TYPE_SURVEY, ASSET_TYPE_TEMPLATE, ASSET_TYPE_TEXT, + ASSET_TYPES, + ASSET_TYPES_WITH_CONTENT, ATTACHMENT_QUESTION_TYPES, PERM_ADD_SUBMISSIONS, PERM_CHANGE_ASSET, @@ -75,8 +74,8 @@ from kpi.mixins import ( FormpackXLSFormUtilsMixin, ObjectPermissionMixin, - XlsExportableMixin, StandardizeSearchableFieldMixin, + XlsExportableMixin, ) from kpi.models.abstract_models import AbstractTimeStampedModel from kpi.models.asset_file import AssetFile @@ -268,7 +267,7 @@ class Meta: 'for specific users')), (PERM_CHANGE_SUBMISSIONS, t('Can modify submitted data for asset')), (PERM_DELETE_SUBMISSIONS, t('Can delete submitted data for asset')), - (PERM_VALIDATE_SUBMISSIONS, t("Can validate submitted data asset")), + (PERM_VALIDATE_SUBMISSIONS, t('Can validate submitted data asset')), # TEMPORARY Issue #1161: A flag to indicate that permissions came # solely from `sync_kobocat_xforms` and not from any user # interaction with KPI @@ -336,7 +335,7 @@ class Meta: # Depending on our `asset_type`, only some permissions might be applicable ASSIGNABLE_PERMISSIONS_BY_TYPE = { ASSET_TYPE_SURVEY: tuple( - (p for p in ASSIGNABLE_PERMISSIONS if p != PERM_DISCOVER_ASSET) + p for p in ASSIGNABLE_PERMISSIONS if p != PERM_DISCOVER_ASSET ), ASSET_TYPE_TEMPLATE: ( PERM_VIEW_ASSET, @@ -384,11 +383,9 @@ class Meta: PERM_CHANGE_ASSET: (PERM_VIEW_ASSET,), PERM_DISCOVER_ASSET: (PERM_VIEW_ASSET,), PERM_MANAGE_ASSET: tuple( - ( - p - for p in ASSIGNABLE_PERMISSIONS - if p not in (PERM_MANAGE_ASSET, PERM_PARTIAL_SUBMISSIONS) - ) + p + for p in ASSIGNABLE_PERMISSIONS + if p not in (PERM_MANAGE_ASSET, PERM_PARTIAL_SUBMISSIONS) ), PERM_VIEW_SUBMISSIONS: (PERM_VIEW_ASSET,), PERM_PARTIAL_SUBMISSIONS: (PERM_VIEW_ASSET,), @@ -749,7 +746,7 @@ def get_partial_perms( """ perms = self.asset_partial_permissions.filter(user_id=user_id)\ - .values_list("permissions", flat=True).first() + .values_list('permissions', flat=True).first() if perms: if with_filters: diff --git a/kpi/models/authorized_application.py b/kpi/models/authorized_application.py index 675966a81e..2886df2469 100644 --- a/kpi/models/authorized_application.py +++ b/kpi/models/authorized_application.py @@ -1,16 +1,14 @@ # coding: utf-8 -from functools import partial import math +from functools import partial from secrets import token_urlsafe +from django.contrib.auth.models import AnonymousUser +from django.core.validators import MinLengthValidator from django.db import models from django.utils.translation import gettext_lazy as t -from django.core.validators import MinLengthValidator -from django.contrib.auth.models import AnonymousUser -from rest_framework.authentication import TokenAuthentication from rest_framework import exceptions - -from kpi.utils.datetime import ten_minutes_from_now +from rest_framework.authentication import TokenAuthentication KEY_LENGTH = 60 NUM_KEY_BYTES = math.floor(KEY_LENGTH * 3 / 4) diff --git a/kpi/models/import_export_task.py b/kpi/models/import_export_task.py index 436eae476e..362bb15e18 100644 --- a/kpi/models/import_export_task.py +++ b/kpi/models/import_export_task.py @@ -1,7 +1,6 @@ # coding: utf-8 import base64 import datetime -import dateutil.parser import os import posixpath import re @@ -9,14 +8,16 @@ from collections import defaultdict from io import BytesIO from os.path import split, splitext -from typing import List, Dict, Optional, Tuple, Generator +from typing import Dict, Generator, List, Optional, Tuple + +import dateutil.parser + try: from zoneinfo import ZoneInfo except ImportError: from backports.zoneinfo import ZoneInfo import constance -import formpack import requests from django.conf import settings from django.contrib.postgres.indexes import BTreeIndex, HashIndex @@ -25,6 +26,13 @@ from django.db.models import F from django.urls import reverse from django.utils.translation import gettext as t +from openpyxl.utils.exceptions import InvalidFileException +from private_storage.fields import PrivateFileField +from pyxform.xls2json_backends import xls_to_dict, xlsx_to_dict +from rest_framework import exceptions +from werkzeug.http import parse_options_header + +import formpack from formpack.constants import ( KOBO_LOCK_SHEET, ) @@ -37,12 +45,6 @@ ) from formpack.utils.kobo_locking import get_kobo_locking_profiles from formpack.utils.string import ellipsize -from private_storage.fields import PrivateFileField -from rest_framework import exceptions -from werkzeug.http import parse_options_header -from openpyxl.utils.exceptions import InvalidFileException -from pyxform.xls2json_backends import xls_to_dict, xlsx_to_dict - from kobo.apps.reports.report_data import build_formpack from kobo.apps.subsequences.utils import stream_with_extras from kpi.constants import ( @@ -63,13 +65,13 @@ create_assets, resolve_url_to_asset, ) +from kpi.utils.project_view_exports import create_project_view_export from kpi.utils.rename_xls_sheet import ( + ConflictSheetError, + NoFromSheetError, rename_xls_sheet, rename_xlsx_sheet, - NoFromSheetError, - ConflictSheetError, ) -from kpi.utils.project_view_exports import create_project_view_export from kpi.utils.strings import to_str from kpi.zip_importer import HttpContentParse @@ -214,7 +216,7 @@ def get_absolute_filepath(self, filename: str) -> str: # was created concurrently. pass if not os.path.isdir(directory): - raise IOError("%s exists and is not a directory." % directory) + raise IOError('%s exists and is not a directory.' % directory) # Store filenames with forward slashes, even on Windows. filename = filename.replace('\\', '/') @@ -768,7 +770,7 @@ def _run_task(self, messages): with self.result.storage.open(absolute_filepath, 'wb') as output_file: if export_type == 'csv': for line in export.to_csv(submission_stream): - output_file.write((line + "\r\n").encode('utf-8')) + output_file.write((line + '\r\n').encode('utf-8')) elif export_type == 'geojson': for line in export.to_geojson( submission_stream, flatten=flatten diff --git a/kpi/permissions.py b/kpi/permissions.py index b068f20ee2..9d282ee4a1 100644 --- a/kpi/permissions.py +++ b/kpi/permissions.py @@ -114,7 +114,7 @@ def get_required_permissions(self, method): perms = [perm % kwargs for perm in perm_list] # Because `ObjectPermissionMixin.get_perms()` returns codenames only, # remove the `app_label` prefix before returning - return [perm.replace("{}.".format(app_label), "") for perm in perms] + return [perm.replace('{}.'.format(app_label), '') for perm in perms] def has_object_permission(self, request, view, obj): # Because authentication checks has already executed via @@ -328,7 +328,7 @@ class SubmissionPermission(AssetNestedObjectPermission): Permissions for submissions. """ - MODEL_NAME = "submissions" # Hard-code `model_name` to match permissions + MODEL_NAME = 'submissions' # Hard-code `model_name` to match permissions perms_map = { 'GET': ['%(app_label)s.view_%(model_name)s'], diff --git a/kpi/serializers/v2/asset.py b/kpi/serializers/v2/asset.py index af8a2d6808..b608c27ce5 100644 --- a/kpi/serializers/v2/asset.py +++ b/kpi/serializers/v2/asset.py @@ -7,10 +7,11 @@ from constance import config from django.conf import settings -from django.db.models import QuerySet, F -from django.utils.translation import gettext as t, ngettext as nt +from django.db.models import F, QuerySet +from django.utils.translation import gettext as t +from django.utils.translation import ngettext as nt from django_request_cache import cache_for_request -from rest_framework import serializers, exceptions +from rest_framework import exceptions, serializers from rest_framework.fields import empty from rest_framework.relations import HyperlinkedIdentityField from rest_framework.reverse import reverse @@ -32,13 +33,13 @@ ASSET_STATUS_PRIVATE, ASSET_STATUS_PUBLIC, ASSET_STATUS_SHARED, + ASSET_TYPE_COLLECTION, ASSET_TYPE_SURVEY, ASSET_TYPES, - ASSET_TYPE_COLLECTION, PERM_CHANGE_ASSET, PERM_CHANGE_METADATA_ASSET, - PERM_MANAGE_ASSET, PERM_DISCOVER_ASSET, + PERM_MANAGE_ASSET, PERM_VIEW_ASSET, PERM_VIEW_SUBMISSIONS, ) @@ -49,8 +50,8 @@ ) from kpi.models import ( Asset, - AssetVersion, AssetExportSettings, + AssetVersion, ObjectPermission, UserAssetSubscription, ) @@ -61,18 +62,16 @@ get_user_permission_assignments, get_user_permission_assignments_queryset, ) - -from .asset_file import AssetFileSerializer - from kpi.utils.project_views import ( get_project_view_user_permissions_for_asset, user_has_project_view_asset_perm, view_has_perm, ) -from .asset_version import AssetVersionListSerializer -from .asset_permission_assignment import AssetPermissionAssignmentSerializer from .asset_export_settings import AssetExportSettingsSerializer +from .asset_file import AssetFileSerializer +from .asset_permission_assignment import AssetPermissionAssignmentSerializer +from .asset_version import AssetVersionListSerializer class AssetBulkActionsSerializer(serializers.Serializer): diff --git a/kpi/signals.py b/kpi/signals.py index 2c41091ad7..b359d9227d 100644 --- a/kpi/signals.py +++ b/kpi/signals.py @@ -2,7 +2,7 @@ from django.conf import settings from django.contrib.auth.models import AnonymousUser -from django.db.models.signals import post_save, post_delete +from django.db.models.signals import post_delete, post_save from django.dispatch import receiver from taggit.models import Tag diff --git a/kpi/tasks.py b/kpi/tasks.py index 3da2466091..9ed84ddb19 100644 --- a/kpi/tasks.py +++ b/kpi/tasks.py @@ -7,7 +7,6 @@ from django.core.mail import send_mail from django.core.management import call_command - from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.markdownx_uploader.tasks import remove_unused_markdown_files from kobo.celery import celery_app diff --git a/kpi/tests/api/v1/test_api_assets.py b/kpi/tests/api/v1/test_api_assets.py index 41cd865d07..6be5ad05b5 100644 --- a/kpi/tests/api/v1/test_api_assets.py +++ b/kpi/tests/api/v1/test_api_assets.py @@ -4,22 +4,22 @@ from urllib.parse import unquote_plus from django.urls import reverse -from formpack.utils.expand_content import SCHEMA_VERSION from rest_framework import status from rest_framework.authtoken.models import Token +from formpack.utils.expand_content import SCHEMA_VERSION from kobo.apps.kobo_auth.shortcuts import User from kpi.constants import ASSET_TYPE_COLLECTION from kpi.models import Asset, ExportTask from kpi.models.import_export_task import export_upload_to from kpi.serializers.v1.asset import AssetListSerializer + # importing module instead of the class, avoid running the tests twice from kpi.tests.api.v2 import test_api_assets from kpi.tests.base_test_case import BaseTestCase from kpi.tests.kpi_test_case import KpiTestCase from kpi.utils.xml import check_lxml_fromstring - EMPTY_SURVEY = {'survey': [], 'schema': SCHEMA_VERSION, 'settings': {}} @@ -102,17 +102,17 @@ def test_api_xml_export_auto_title(self): self.assertNotEqual(title_elts[0].text, '') def test_xml_export_group(self): - example_formbuilder_output = {'survey': [{"type": "begin_group", - "relevant": "", - "appearance": "", - "name": "group_hl3hw45", - "label": "Group 1 Label"}, - {"required": "true", - "type": "decimal", - "label": "Question 1 Label"}, - {"type": "end_group"}], - "settings": [{"form_title": "", - "form_id": "group_form"}]} + example_formbuilder_output = {'survey': [{'type': 'begin_group', + 'relevant': '', + 'appearance': '', + 'name': 'group_hl3hw45', + 'label': 'Group 1 Label'}, + {'required': 'true', + 'type': 'decimal', + 'label': 'Question 1 Label'}, + {'type': 'end_group'}], + 'settings': [{'form_title': '', + 'form_id': 'group_form'}]} self.login('someuser', 'someuser') asset = self.create_asset('', json.dumps(example_formbuilder_output), format='json') @@ -131,7 +131,7 @@ class ObjectRelationshipsTests(BaseTestCase): def setUp(self): self.client.login(username='someuser', password='someuser') self.user = User.objects.get(username='someuser') - self.surv = Asset.objects.create(content={'survey': [{"type": "text", "name": "q1"}]}, + self.surv = Asset.objects.create(content={'survey': [{'type': 'text', 'name': 'q1'}]}, owner=self.user, asset_type='survey') self.coll = Asset.objects.create( @@ -157,7 +157,7 @@ def test_collection_can_have_asset(self): """ _ = self.client.get(reverse('asset-detail', args=[self.surv.uid])) coll_req1 = self.client.get( - reverse("asset-detail", args=[self.coll.uid]) + reverse('asset-detail', args=[self.coll.uid]) ) self.assertEqual(self._count_children_by_kind( coll_req1.data['children'], self.surv.kind), 0) @@ -166,13 +166,13 @@ def test_collection_can_have_asset(self): self.surv.save() surv_req2 = self.client.get( - reverse("asset-detail", args=[self.surv.uid]) + reverse('asset-detail', args=[self.surv.uid]) ) - self.assertIn("parent", surv_req2.data) - self.assertIn(self.coll.uid, surv_req2.data["parent"]) + self.assertIn('parent', surv_req2.data) + self.assertIn(self.coll.uid, surv_req2.data['parent']) coll_req2 = self.client.get( - reverse("asset-detail", args=[self.coll.uid]) + reverse('asset-detail', args=[self.coll.uid]) ) self.assertEqual(self._count_children_by_kind( coll_req2.data['children'], self.surv.kind), 1) @@ -189,7 +189,7 @@ def test_add_asset_to_collection(self): surv_url = reverse('asset-detail', args=[self.surv.uid]) patch_req = self.client.patch( surv_url, - data={"parent": reverse("asset-detail", args=[self.coll.uid])}, + data={'parent': reverse('asset-detail', args=[self.coll.uid])}, ) self.assertEqual(patch_req.status_code, status.HTTP_200_OK) req = self.client.get(surv_url) @@ -207,7 +207,7 @@ def test_remove_asset_from_collection(self): surv_url = reverse('asset-detail', args=[self.surv.uid]) patch_req = self.client.patch( surv_url, - data={"parent": reverse("asset-detail", args=[self.coll.uid])}, + data={'parent': reverse('asset-detail', args=[self.coll.uid])}, ) self.assertEqual(patch_req.status_code, status.HTTP_200_OK) req = self.client.get(surv_url) diff --git a/kpi/tests/api/v1/test_api_submissions.py b/kpi/tests/api/v1/test_api_submissions.py index 0d3427a009..44f7cee41d 100644 --- a/kpi/tests/api/v1/test_api_submissions.py +++ b/kpi/tests/api/v1/test_api_submissions.py @@ -26,7 +26,7 @@ def test_retrieve_submission_with_partial_permissions_as_anotheruser(self): pass def test_list_submissions_as_owner(self): - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) expected_ids = [s['_id'] for s in self.submissions] response_ids = [r['_id'] for r in response.data] @@ -35,7 +35,7 @@ def test_list_submissions_as_owner(self): def test_list_submissions_shared_as_anotheruser(self): self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self.client.force_login(self.anotheruser) - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) expected_ids = [s['_id'] for s in self.submissions] response_ids = [r['_id'] for r in response.data] diff --git a/kpi/tests/api/v2/test_api_asset_counts.py b/kpi/tests/api/v2/test_api_asset_counts.py index d5e3ffe8bc..6b08d89156 100644 --- a/kpi/tests/api/v2/test_api_asset_counts.py +++ b/kpi/tests/api/v2/test_api_asset_counts.py @@ -1,5 +1,5 @@ -from django.urls import reverse from django.test import override_settings +from django.urls import reverse from rest_framework import status from kobo.apps.kobo_auth.shortcuts import User diff --git a/kpi/tests/api/v2/test_api_asset_usage.py b/kpi/tests/api/v2/test_api_asset_usage.py index 4f597c0e03..ed5197b227 100644 --- a/kpi/tests/api/v2/test_api_asset_usage.py +++ b/kpi/tests/api/v2/test_api_asset_usage.py @@ -1,8 +1,8 @@ import os import uuid from datetime import datetime -from dateutil.relativedelta import relativedelta +from dateutil.relativedelta import relativedelta from django.conf import settings from django.urls import reverse from rest_framework import status diff --git a/kpi/tests/api/v2/test_api_assets.py b/kpi/tests/api/v2/test_api_assets.py index 05ef5f7d13..2abee0542f 100644 --- a/kpi/tests/api/v2/test_api_assets.py +++ b/kpi/tests/api/v2/test_api_assets.py @@ -1,11 +1,11 @@ # coding: utf-8 import base64 import copy -import dateutil.parser import json import os from io import StringIO +import dateutil.parser from django.urls import reverse from django.utils import timezone from rest_framework import status @@ -89,13 +89,13 @@ def test_asset_list_matches_detail(self): self.assertDictEqual(expected_list_data, dict(list_result_detail)) def test_assets_hash(self): - another_user = User.objects.get(username="anotheruser") + another_user = User.objects.get(username='anotheruser') user_asset = Asset.objects.get(pk=1) user_asset.save() - user_asset.assign_perm(another_user, "view_asset") + user_asset.assign_perm(another_user, 'view_asset') self.client.logout() - self.client.login(username="anotheruser", password="anotheruser") + self.client.login(username='anotheruser', password='anotheruser') creation_response = self.create_asset() another_user_asset = another_user.assets.last() @@ -107,9 +107,9 @@ def test_assets_hash(self): ] versions_ids.sort() expected_hash = calculate_hash(''.join(versions_ids)) - hash_url = reverse("asset-hash") + hash_url = reverse('asset-hash') hash_response = self.client.get(hash_url) - self.assertEqual(hash_response.data.get("hash"), expected_hash) + self.assertEqual(hash_response.data.get('hash'), expected_hash) def test_assets_search_query(self): someuser = User.objects.get(username='someuser') @@ -1052,18 +1052,18 @@ def test_submission_count(self): self.asset.deploy(backend='mock', active=True) submissions = [ { - "__version__": self.asset.latest_deployed_version.uid, - "q1": "a1", - "q2": "a2", - "_id": 1, - "_submitted_by": "" + '__version__': self.asset.latest_deployed_version.uid, + 'q1': 'a1', + 'q2': 'a2', + '_id': 1, + '_submitted_by': '' }, { - "__version__": self.asset.latest_deployed_version.uid, - "q1": "a3", - "q2": "a4", - "_id": 2, - "_submitted_by": anotheruser.username + '__version__': self.asset.latest_deployed_version.uid, + 'q1': 'a3', + 'q2': 'a4', + '_id': 2, + '_submitted_by': anotheruser.username } ] @@ -1341,13 +1341,13 @@ def get_asset_file_content(self, url): def asset_file_payload(self): geojson_ = StringIO(json.dumps( { - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [125.6, 10.1] + 'type': 'Feature', + 'geometry': { + 'type': 'Point', + 'coordinates': [125.6, 10.1] }, - "properties": { - "name": "Dinagat Islands" + 'properties': { + 'name': 'Dinagat Islands' } } )) diff --git a/kpi/tests/api/v2/test_api_attachments.py b/kpi/tests/api/v2/test_api_attachments.py index 460fa54960..e836343b70 100644 --- a/kpi/tests/api/v2/test_api_attachments.py +++ b/kpi/tests/api/v2/test_api_attachments.py @@ -11,8 +11,8 @@ ) from kpi.models import Asset from kpi.tests.base_test_case import BaseAssetTestCase -from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE from kpi.tests.utils.mock import guess_type_mock +from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE class AttachmentApiTests(BaseAssetTestCase): diff --git a/kpi/tests/api/v2/test_api_paired_data.py b/kpi/tests/api/v2/test_api_paired_data.py index ca49db2443..6546a7ed00 100644 --- a/kpi/tests/api/v2/test_api_paired_data.py +++ b/kpi/tests/api/v2/test_api_paired_data.py @@ -8,8 +8,8 @@ from kpi.constants import ( PERM_ADD_SUBMISSIONS, PERM_CHANGE_ASSET, - PERM_VIEW_ASSET, PERM_PARTIAL_SUBMISSIONS, + PERM_VIEW_ASSET, PERM_VIEW_SUBMISSIONS, ) from kpi.models import Asset @@ -37,7 +37,7 @@ def setUp(self): { 'name': 'group_restaurant', 'type': 'begin_group', - "label": "Restaurant" + 'label': 'Restaurant' }, { 'name': 'favourite_restaurant', diff --git a/kpi/tests/api/v2/test_api_submissions.py b/kpi/tests/api/v2/test_api_submissions.py index 34d50f86d4..93928269a0 100644 --- a/kpi/tests/api/v2/test_api_submissions.py +++ b/kpi/tests/api/v2/test_api_submissions.py @@ -5,20 +5,22 @@ import string import uuid from datetime import datetime + try: from zoneinfo import ZoneInfo except ImportError: from backports.zoneinfo import ZoneInfo +from unittest import mock + import lxml -import mock import pytest import responses from django.conf import settings from django.urls import reverse -from django_digest.test import Client as DigestClient from rest_framework import status +from django_digest.test import Client as DigestClient from kobo.apps.audit_log.models import AuditLog, AuditType from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.logger.models.instance import Instance @@ -26,8 +28,8 @@ from kobo.apps.openrosa.libs.utils.logger_tools import dict2xform from kpi.constants import ( ASSET_TYPE_SURVEY, - PERM_CHANGE_ASSET, PERM_ADD_SUBMISSIONS, + PERM_CHANGE_ASSET, PERM_CHANGE_SUBMISSIONS, PERM_DELETE_SUBMISSIONS, PERM_PARTIAL_SUBMISSIONS, @@ -39,15 +41,15 @@ ) from kpi.models import Asset from kpi.tests.base_test_case import BaseTestCase -from kpi.tests.utils.xml import get_form_and_submission_tag_names -from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE -from kpi.utils.object_permission import get_anonymous_user from kpi.tests.utils.mock import ( enketo_edit_instance_response, enketo_edit_instance_response_with_root_name_validation, enketo_edit_instance_response_with_uuid_validation, enketo_view_instance_response, ) +from kpi.tests.utils.xml import get_form_and_submission_tag_names +from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE +from kpi.utils.object_permission import get_anonymous_user from kpi.utils.xml import fromstring_preserve_root_xmlns, xml_tostring @@ -403,8 +405,8 @@ def test_cannot_create_submission(self): Test that no one can create submissions (with KPI endpoint) """ submission = { - "q1": "a5", - "q2": "a6", + 'q1': 'a5', + 'q2': 'a6', } # Owner response = self.client.post(self.submission_list_url, data=submission) @@ -426,7 +428,7 @@ def test_list_submissions_as_owner(self): someuser is the owner of the project. They can list their own data """ - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) response_ids = [r['_id'] for r in response.data.get('results')] submissions_ids = [s['_id'] for s in self.submissions] @@ -502,7 +504,7 @@ def test_list_submissions_not_shared_as_anotheruser(self): someuser's data existence should not be revealed. """ self.client.force_login(self.anotheruser) - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_list_submissions_shared_as_anotheruser(self): @@ -512,7 +514,7 @@ def test_list_submissions_shared_as_anotheruser(self): """ self.asset.assign_perm(self.anotheruser, PERM_VIEW_SUBMISSIONS) self.client.force_login(self.anotheruser) - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) response_ids = [r['_id'] for r in response.data.get('results')] submissions_ids = [s['_id'] for s in self.submissions] @@ -528,12 +530,12 @@ def test_list_submissions_with_partial_permissions_as_anotheruser(self): partial_perms = { PERM_VIEW_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.asset.assign_perm(self.anotheruser, PERM_PARTIAL_SUBMISSIONS, partial_perms=partial_perms) - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) # User `anotheruser` should only see submissions where `submitted_by` @@ -551,7 +553,7 @@ def test_list_submissions_as_anonymous(self): someuser's data existence should not be revealed. """ self.client.logout() - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_list_submissions_asset_publicly_shared_as_anonymous(self): @@ -563,7 +565,7 @@ def test_list_submissions_asset_publicly_shared_as_anonymous(self): self.client.logout() anonymous_user = get_anonymous_user() self.asset.assign_perm(anonymous_user, PERM_VIEW_SUBMISSIONS) - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_list_submissions_asset_publicly_shared_as_authenticated_user(self): @@ -587,7 +589,7 @@ def test_list_submissions_asset_publicly_shared_as_authenticated_user(self): # `self.asset` is owned by `someuser`; `anotheruser` has no # explicitly-granted access to it self.asset.assign_perm(anonymous_user, PERM_VIEW_SUBMISSIONS) - response = self.client.get(self.submission_list_url, {"format": "json"}) + response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.asset.remove_perm(anonymous_user, PERM_VIEW_SUBMISSIONS) @@ -1115,13 +1117,13 @@ def test_attachments_rewrite(self): expected_new_download_urls = [ 'http://testserver/api/v2/assets/' + asset.uid - + f"/data/{submission_id}/attachments/{attachment_0_id}/?format=json", + + f'/data/{submission_id}/attachments/{attachment_0_id}/?format=json', 'http://testserver/api/v2/assets/' + asset.uid - + f"/data/{submission_id}/attachments/{attachment_1_id}/?format=json", + + f'/data/{submission_id}/attachments/{attachment_1_id}/?format=json', 'http://testserver/api/v2/assets/' + asset.uid - + f"/data/{submission_id}/attachments/{attachment_2_id}/?format=json", + + f'/data/{submission_id}/attachments/{attachment_2_id}/?format=json', ] for idx, attachment in enumerate(attachments): @@ -1576,12 +1578,12 @@ def test_edit_submission_with_xml_missing_uuids(self): submission_xml = self.asset.deployment.get_submissions( user=self.asset.owner, format_type=SUBMISSION_FORMAT_TYPE_XML, - query={"find_this": "hello!"}, + query={'find_this': 'hello!'}, )[0] submission_json = self.asset.deployment.get_submissions( user=self.asset.owner, format_type=SUBMISSION_FORMAT_TYPE_JSON, - query={"find_this": "hello!"}, + query={'find_this': 'hello!'}, )[0] submission_xml_root = fromstring_preserve_root_xmlns(submission_xml) @@ -1913,7 +1915,7 @@ def _check_duplicate(self, response, submission: dict = None): submission = submission if submission else self.submission duplicate_submission = response.data - expected_next_id = max((sub['_id'] for sub in self.submissions)) + 1 + expected_next_id = max(sub['_id'] for sub in self.submissions) + 1 assert submission['_id'] != duplicate_submission['_id'] assert duplicate_submission['_id'] == expected_next_id @@ -2970,7 +2972,7 @@ def test_edit_some_submission_validation_statuses_with_partial_perms_as_anotheru # Get all submissions and ensure only the ones that anotheruser is # allowed to edit have been modified self.client.logout() - self.client.login(username="someuser", password="someuser") + self.client.login(username='someuser', password='someuser') response = self.client.get(self.submission_list_url) for submission in response.data['results']: validation_status = submission['_validation_status'] @@ -2992,13 +2994,13 @@ def test_edit_some_submission_validation_statuses_with_partial_perms_as_anotheru class SubmissionGeoJsonApiTests(BaseTestCase): - fixtures = ["test_data"] + fixtures = ['test_data'] URL_NAMESPACE = ROUTER_URL_NAMESPACE def setUp(self): - self.client.login(username="someuser", password="someuser") - self.someuser = User.objects.get(username="someuser") + self.client.login(username='someuser', password='someuser') + self.someuser = User.objects.get(username='someuser') self.asset = a = Asset() a.name = 'Two points and one text' a.owner = self.someuser diff --git a/kpi/tests/kpi_test_case.py b/kpi/tests/kpi_test_case.py index 2b005d6c54..67153868ed 100644 --- a/kpi/tests/kpi_test_case.py +++ b/kpi/tests/kpi_test_case.py @@ -13,11 +13,12 @@ from kpi.constants import ASSET_TYPE_COLLECTION +from ..models.asset import Asset +from ..models.object_permission import ObjectPermission + # FIXME: Remove the following line when the permissions API is in place. from .base_test_case import BaseTestCase from .test_permissions import BasePermissionsTestCase -from ..models.asset import Asset -from ..models.object_permission import ObjectPermission class KpiTestCase(BaseTestCase, BasePermissionsTestCase): @@ -95,7 +96,7 @@ def create_collection(self, name, owner=None, owner_password=None, kwargs.update({'name': name, 'asset_type': ASSET_TYPE_COLLECTION}) response = self.client.post( - reverse(self._get_endpoint("asset-list")), kwargs + reverse(self._get_endpoint('asset-list')), kwargs ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) diff --git a/kpi/tests/test_asset_content.py b/kpi/tests/test_asset_content.py index 5f8bedb7cc..06c8486f1f 100644 --- a/kpi/tests/test_asset_content.py +++ b/kpi/tests/test_asset_content.py @@ -247,9 +247,9 @@ def _name_to_autoname(rows): ] assert _name_to_autoname([{'label': x} for x in [ - "What is your favorite all-time place to go swimming?", - "What is your favorite all-time place to go running?", - "What is your favorite all-time place to go to relax?", + 'What is your favorite all-time place to go swimming?', + 'What is your favorite all-time place to go running?', + 'What is your favorite all-time place to go to relax?', ]]) == ['What_is_your_favorit_place_to_go_swimming', 'What_is_your_favorit_place_to_go_running', 'What_is_your_favorit_place_to_go_to_relax'] diff --git a/kpi/tests/test_asset_versions.py b/kpi/tests/test_asset_versions.py index 1e0a524c01..a6e1816815 100644 --- a/kpi/tests/test_asset_versions.py +++ b/kpi/tests/test_asset_versions.py @@ -2,6 +2,7 @@ import json from copy import deepcopy from datetime import datetime + try: from zoneinfo import ZoneInfo except ImportError: @@ -9,13 +10,13 @@ from django.test import TestCase from django.utils import timezone -from formpack.utils.expand_content import SCHEMA_VERSION +from formpack.utils.expand_content import SCHEMA_VERSION from kobo.apps.kobo_auth.shortcuts import User from kpi.exceptions import BadAssetTypeException from kpi.utils.hash import calculate_hash -from ..models import Asset -from ..models import AssetVersion + +from ..models import Asset, AssetVersion class AssetVersionTestCase(TestCase): diff --git a/kpi/tests/test_mock_data.py b/kpi/tests/test_mock_data.py index a7de62bc7d..3c21b5805b 100644 --- a/kpi/tests/test_mock_data.py +++ b/kpi/tests/test_mock_data.py @@ -1,10 +1,10 @@ # coding: utf-8 -from copy import deepcopy from collections import OrderedDict +from copy import deepcopy from django.test import TestCase -from formpack import FormPack +from formpack import FormPack from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.reports import report_data from kpi.models import Asset @@ -211,30 +211,30 @@ SUBMISSION_DATA = OrderedDict( [ - ("start", ["2016-06-0%dT12:00:00.000-04:00" % n for n in [1, 2, 3, 4]]), + ('start', ['2016-06-0%dT12:00:00.000-04:00' % n for n in [1, 2, 3, 4]]), ( - "end", - ["2016-06-0%dT11:0%d:00.000-04:00" % (n, n) for n in [1, 2, 3, 4]], + 'end', + ['2016-06-0%dT11:0%d:00.000-04:00' % (n, n) for n in [1, 2, 3, 4]], ), - ("Select_one", ["option_1", "option_1", "option_2", "option_1"]), - ("Select_Many", ["option_1", "option_2", "option_1 option_2", ""]), - ("Text", ["a", "b", "c", "a"]), - ("Number", [1, 2, 3, 2]), - ("Decimal", [1.5, 2.5, 3.5, 3.5]), - ("Date", ["2016-06-0%d" % n for n in [1, 2, 3, 5]]), - ("Time", ["%d:00:00" % n for n in [1, 2, 3, 5]]), + ('Select_one', ['option_1', 'option_1', 'option_2', 'option_1']), + ('Select_Many', ['option_1', 'option_2', 'option_1 option_2', '']), + ('Text', ['a', 'b', 'c', 'a']), + ('Number', [1, 2, 3, 2]), + ('Decimal', [1.5, 2.5, 3.5, 3.5]), + ('Date', ['2016-06-0%d' % n for n in [1, 2, 3, 5]]), + ('Time', ['%d:00:00' % n for n in [1, 2, 3, 5]]), ( - "Date_and_time", - ["2016-06-0%dT12:00:00.000-04:00" % n for n in [1, 2, 3, 5]], + 'Date_and_time', + ['2016-06-0%dT12:00:00.000-04:00' % n for n in [1, 2, 3, 5]], ), - ("GPS", ["1%d.43 -2%d.54 1 0" % (n, n) for n in [5, 7, 8, 5]]), - ("Photo", ["photo_%d.jpg" % (n) for n in [1, 2, 3, 4]]), - ("Audio", ["audio_%d.jpg" % (n) for n in [4, 3, 2, 1]]), - ("Video", ["video_%d.jpg" % (n) for n in [6, 7, 8, 9]]), - ("Note_Should_not_be_displayed", [None, None, None, None]), - ("Barcode", ["barcode%d" % (n) for n in [9, 7, 7, 6]]), - ("Acknowledge", [None, None, None, None]), - ("calculation", ["1", "1", "1", "1"]), + ('GPS', ['1%d.43 -2%d.54 1 0' % (n, n) for n in [5, 7, 8, 5]]), + ('Photo', ['photo_%d.jpg' % (n) for n in [1, 2, 3, 4]]), + ('Audio', ['audio_%d.jpg' % (n) for n in [4, 3, 2, 1]]), + ('Video', ['video_%d.jpg' % (n) for n in [6, 7, 8, 9]]), + ('Note_Should_not_be_displayed', [None, None, None, None]), + ('Barcode', ['barcode%d' % (n) for n in [9, 7, 7, 6]]), + ('Acknowledge', [None, None, None, None]), + ('calculation', ['1', '1', '1', '1']), ] ) @@ -294,23 +294,23 @@ def test_kobo_apps_reports_report_data(self): self.asset, submission_stream=self.submissions ) expected_names = [ - "start", - "end", - "Select_one", - "Select_Many", - "Text", - "Number", - "Decimal", - "Date", - "Time", - "Date_and_time", - "GPS", - "Photo", - "Audio", - "Video", - "Barcode", - "Acknowledge", - "calculation", + 'start', + 'end', + 'Select_one', + 'Select_Many', + 'Text', + 'Number', + 'Decimal', + 'Date', + 'Time', + 'Date_and_time', + 'GPS', + 'Photo', + 'Audio', + 'Video', + 'Barcode', + 'Acknowledge', + 'calculation', ] self.assertEqual([v['name'] for v in values], expected_names) self.assertEqual(len(values), 17) @@ -318,8 +318,8 @@ def test_kobo_apps_reports_report_data(self): def test_kobo_apps_reports_report_data_split_by(self): values = report_data.data_by_identifiers( self.asset, - split_by="Select_one", - field_names=["Date"], + split_by='Select_one', + field_names=['Date'], submission_stream=self.submissions, ) self.assertEqual( @@ -363,9 +363,9 @@ def test_kobo_apps_reports_report_data_split_by(self): def test_kobo_apps_reports_report_data_split_by_translated(self): values = report_data.data_by_identifiers( self.asset, - split_by="Select_one", - lang="Arabic", - field_names=["Date"], + split_by='Select_one', + lang='Arabic', + field_names=['Date'], submission_stream=self.submissions, ) responses = set() @@ -491,10 +491,10 @@ def _get_autoreport_values(qname, key, lang=None, index=False): self.assertEqual( _get_autoreport_values('Date', 'percentage', None), [ - ("2016-06-01", 25.0), - ("2016-06-02", 25.0), - ("2016-06-03", 25.0), - ("2016-06-05", 25.0), + ('2016-06-01', 25.0), + ('2016-06-02', 25.0), + ('2016-06-03', 25.0), + ('2016-06-05', 25.0), ], ) diff --git a/kpi/tests/test_mock_data_conflicting_version_exports.py b/kpi/tests/test_mock_data_conflicting_version_exports.py index 38fff9b67c..19ae9951f1 100644 --- a/kpi/tests/test_mock_data_conflicting_version_exports.py +++ b/kpi/tests/test_mock_data_conflicting_version_exports.py @@ -2,8 +2,8 @@ import itertools from collections import defaultdict -from django.urls import reverse from django.test import TestCase +from django.urls import reverse from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.reports import report_data @@ -57,7 +57,7 @@ def setUp(self): ] = fields_values @staticmethod - def _split_formpack_csv(line, sep=";", quote='"'): + def _split_formpack_csv(line, sep=';', quote='"'): return [field.strip(quote) for field in to_str(line).split(sep)] def test_csv_export(self): diff --git a/kpi/tests/test_mock_data_exports.py b/kpi/tests/test_mock_data_exports.py index fdc9986c74..b1d9da1f58 100644 --- a/kpi/tests/test_mock_data_exports.py +++ b/kpi/tests/test_mock_data_exports.py @@ -2,17 +2,19 @@ import os import zipfile from collections import defaultdict + try: from zoneinfo import ZoneInfo except ImportError: from backports.zoneinfo import ZoneInfo import datetime -import mock +from unittest import mock + import openpyxl from django.conf import settings -from django.urls import reverse from django.test import TestCase +from django.urls import reverse from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.reports import report_data @@ -23,8 +25,8 @@ PERM_VIEW_SUBMISSIONS, ) from kpi.models import Asset, ExportTask -from kpi.utils.object_permission import get_anonymous_user from kpi.utils.mongo_helper import drop_mock_only +from kpi.utils.object_permission import get_anonymous_user class MockDataExportsBase(TestCase): @@ -579,7 +581,7 @@ def test_csv_export_hierarchy_in_labels(self): self.run_csv_export_test(expected_lines, export_options) def test_csv_export_filter_fields(self): - export_options = {'fields': ["start", "end", "Do_you_descend_from_unicellular_organism", "_index"]} + export_options = {'fields': ['start', 'end', 'Do_you_descend_from_unicellular_organism', '_index']} expected_lines = [ '"start";"end";"Do you descend from an ancestral unicellular organism?";"_uuid";"_index"', '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"No";"48583952-1892-4931-8d9c-869e7b49bafb";"1"', diff --git a/kpi/tests/test_usage_calculator.py b/kpi/tests/test_usage_calculator.py index ef85d70ee5..abf4733862 100644 --- a/kpi/tests/test_usage_calculator.py +++ b/kpi/tests/test_usage_calculator.py @@ -14,8 +14,8 @@ from kobo.apps.trackers.models import NLPUsageCounter from kpi.models import Asset from kpi.tests.base_test_case import BaseAssetTestCase -from kpi.utils.usage_calculator import ServiceUsageCalculator from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE +from kpi.utils.usage_calculator import ServiceUsageCalculator class BaseServiceUsageTestCase(BaseAssetTestCase): diff --git a/kpi/tests/test_utils.py b/kpi/tests/test_utils.py index 65a1092eb4..bc295b1d93 100644 --- a/kpi/tests/test_utils.py +++ b/kpi/tests/test_utils.py @@ -8,12 +8,15 @@ from django.test import TestCase from kpi.exceptions import ( - SearchQueryTooShortException, QueryParserNotSupportedFieldLookup, + SearchQueryTooShortException, ) from kpi.tests.utils.dicts import convert_hierarchical_keys_to_nested_dict -from kpi.utils.autoname import autoname_fields, autoname_fields_to_field -from kpi.utils.autoname import autovalue_choices_in_place +from kpi.utils.autoname import ( + autoname_fields, + autoname_fields_to_field, + autovalue_choices_in_place, +) from kpi.utils.pyxform_compatibility import allow_choice_duplicates from kpi.utils.query_parser import parse from kpi.utils.sluggify import sluggify, sluggify_label @@ -182,15 +185,15 @@ def test_sluggify(self): def test_sluggify_label(self): inp_exps = [ - [["asdf jkl"], "asdf_jkl"], - [["asdf", ["asdf"]], "asdf_001"], - [["2. asdf"], "_2_asdf"], - [["2. asdf", ["_2_asdf"]], "_2_asdf_001"], - [["asdf#123"], "asdf_123"], - [[" hello "], "hello"], + [['asdf jkl'], 'asdf_jkl'], + [['asdf', ['asdf']], 'asdf_001'], + [['2. asdf'], '_2_asdf'], + [['2. asdf', ['_2_asdf']], '_2_asdf_001'], + [['asdf#123'], 'asdf_123'], + [[' hello '], 'hello'], # FIX THIS when we come up with a better way to summarize # arabic and cyrillic text - [["أين السوق؟", ["_", "__001"]], "__002"] + [['أين السوق؟', ['_', '__001']], '__002'] ] for inps, expected in inp_exps: inp = inps[0] @@ -290,7 +293,7 @@ def test_autovalue_does_not_change_when_name_exists(self): self.assertEqual(surv['choices'][0]['$autovalue'], 'A__B_C') self.assertEqual(surv['choices'][1]['$autovalue'], 'A_B_C') - def test_autovalue_choices(self): + def test_autovalue_choices_with_different_name_and_label(self): surv = { 'choices': [ {'list_name': 'xxx', 'label': 'A B C', 'name': 'D_E_F'}, @@ -320,11 +323,11 @@ def test_autovalue_choices_arabic(self): self.assertEqual(surv['choices'][1]['$autovalue'], part1 + part2) def test_query_parser(self): - query_string = ''' + query_string = """ (a:a OR b:b AND c:can't) AND d:do"you"say OR ( snakes:🐍🐍 AND NOT alphabet:🍲soup ) NOT 'in a house' NOT "with a mouse" - ''' + """ default_field_lookups = [ 'field_a__icontains', @@ -619,21 +622,21 @@ def test_strip_xml_nodes_by_xpaths_with_slashes(self): ) def test_get_or_create_element(self): - initial_xml_with_ns = ''' + initial_xml_with_ns = """ uuid:abc-123 - ''' - expected_xml_with_ns_after_modification = ''' + """ + expected_xml_with_ns_after_modification = """ uuid:def-456 uuid:abc-123 - ''' + """ initial_xml_without_ns = initial_xml_with_ns.replace( ' xmlns="http://opendatakit.org/submissions"', '' @@ -683,7 +686,7 @@ def test_edit_submission_xml(self): } for k, v in update_data.items(): edit_submission_xml(xml_parsed, k, v) - xml_expected = ''' + xml_expected = """ @@ -727,7 +730,7 @@ def test_edit_submission_xml(self): - ''' + """ self.__compare_xml(xml_tostring(xml_parsed), xml_expected) def __compare_xml(self, source: str, target: str) -> bool: diff --git a/kpi/tests/utils/xml.py b/kpi/tests/utils/xml.py index 827193b1f1..1df962c5e7 100644 --- a/kpi/tests/utils/xml.py +++ b/kpi/tests/utils/xml.py @@ -1,6 +1,7 @@ from __future__ import annotations from lxml import etree + from kpi.utils.xml import check_lxml_fromstring diff --git a/kpi/urls/__init__.py b/kpi/urls/__init__.py index 9cf2ce732b..27d09f68c5 100644 --- a/kpi/urls/__init__.py +++ b/kpi/urls/__init__.py @@ -1,19 +1,23 @@ # coding: utf-8 import private_storage.urls from django.conf import settings -from django.urls import include, re_path, path +from django.urls import include, path, re_path from django.views.i18n import JavaScriptCatalog from hub.models import ConfigurationFile -from kpi.views import authorized_application_authenticate_user -from kpi.views import home, browser_tests, modern_browsers -from kpi.views.environment import EnvironmentView +from kpi.views import ( + authorized_application_authenticate_user, + browser_tests, + home, + modern_browsers, +) from kpi.views.current_user import CurrentUserViewSet +from kpi.views.environment import EnvironmentView from kpi.views.token import TokenView -from .router_api_v1 import router_api_v1 -from .router_api_v2 import router_api_v2, URL_NAMESPACE from ..views.v2.logout import logout_from_all_devices +from .router_api_v1 import router_api_v1 +from .router_api_v2 import URL_NAMESPACE, router_api_v2 # TODO: Give other apps their own `urls.py` files instead of importing their # views directly! See diff --git a/kpi/urls/router_api_v1.py b/kpi/urls/router_api_v1.py index 368101a9e3..24cc000c46 100644 --- a/kpi/urls/router_api_v1.py +++ b/kpi/urls/router_api_v1.py @@ -3,22 +3,21 @@ from kobo.apps.hook.views.v1.hook import HookViewSet from kobo.apps.hook.views.v1.hook_log import HookLogViewSet - from kobo.apps.reports.views import ReportsViewSet from kpi.views.v1 import ( - AssetViewSet, - AssetVersionViewSet, - AssetSnapshotViewSet, AssetFileViewSet, + AssetSnapshotViewSet, + AssetVersionViewSet, + AssetViewSet, AuthorizedApplicationUserViewSet, ExportTaskViewSet, ImportTaskViewSet, ObjectPermissionViewSet, SitewideMessageViewSet, SubmissionViewSet, - UserViewSet, - UserAssetSubscriptionViewSet, TagViewSet, + UserAssetSubscriptionViewSet, + UserViewSet, ) router_api_v1 = ExtendedDefaultRouter() diff --git a/kpi/utils/files.py b/kpi/utils/files.py index f732dd83d6..7966b5b826 100644 --- a/kpi/utils/files.py +++ b/kpi/utils/files.py @@ -1,7 +1,7 @@ -import os import mimetypes -# from mimetypes import guess_type +import os +# from mimetypes import guess_type from django.core.files.base import ContentFile diff --git a/kpi/utils/monkey_patching.py b/kpi/utils/monkey_patching.py index d0bc364fff..506cec40ba 100644 --- a/kpi/utils/monkey_patching.py +++ b/kpi/utils/monkey_patching.py @@ -1,11 +1,13 @@ import django.contrib.auth.management import django.db.models.deletion -from django.db import router from django.conf import settings from django.contrib.auth.management import ( - create_permissions as django_create_permissions, DEFAULT_DB_ALIAS, ) +from django.contrib.auth.management import ( + create_permissions as django_create_permissions, +) +from django.db import router from kobo.apps.openrosa.libs.constants import OPENROSA_APP_LABELS from kpi.constants import SHARED_APP_LABELS diff --git a/kpi/utils/project_view_exports.py b/kpi/utils/project_view_exports.py index 5655f4496f..b14ef2a652 100644 --- a/kpi/utils/project_view_exports.py +++ b/kpi/utils/project_view_exports.py @@ -1,5 +1,6 @@ # coding: utf-8 from __future__ import annotations + import csv from io import StringIO from typing import Union @@ -14,7 +15,6 @@ from kpi.models import Asset from kpi.utils.project_views import get_region_for_view - ASSET_FIELDS = ( 'id', 'uid', diff --git a/kpi/utils/usage_calculator.py b/kpi/utils/usage_calculator.py index 71d3d24175..cee15a9798 100644 --- a/kpi/utils/usage_calculator.py +++ b/kpi/utils/usage_calculator.py @@ -1,7 +1,7 @@ from typing import Optional from django.conf import settings -from django.db.models import Sum, Q +from django.db.models import Q, Sum from django.db.models.functions import Coalesce from django.utils import timezone diff --git a/kpi/views/environment.py b/kpi/views/environment.py index 0ca636ab1a..31d2200eac 100644 --- a/kpi/views/environment.py +++ b/kpi/views/environment.py @@ -9,17 +9,17 @@ from django.db.models import Q from django.utils.translation import gettext_lazy as t from markdown import markdown -from hub.models.sitewide_message import SitewideMessage from rest_framework.response import Response from rest_framework.views import APIView +from hub.models.sitewide_message import SitewideMessage from hub.utils.i18n import I18nUtils -from kobo.apps.organizations.models import OrganizationOwner -from kobo.apps.stripe.constants import FREE_TIER_NO_THRESHOLDS, FREE_TIER_EMPTY_DISPLAY -from kobo.static_lists import COUNTRIES from kobo.apps.accounts.mfa.models import MfaAvailableToUser from kobo.apps.constance_backends.utils import to_python_object from kobo.apps.hook.constants import SUBMISSION_PLACEHOLDER +from kobo.apps.organizations.models import OrganizationOwner +from kobo.apps.stripe.constants import FREE_TIER_EMPTY_DISPLAY, FREE_TIER_NO_THRESHOLDS +from kobo.static_lists import COUNTRIES from kpi.utils.object_permission import get_database_user diff --git a/kpi/views/v2/asset.py b/kpi/views/v2/asset.py index b44de7640e..e9f0086148 100644 --- a/kpi/views/v2/asset.py +++ b/kpi/views/v2/asset.py @@ -1,7 +1,7 @@ # coding: utf-8 import copy import json -from collections import defaultdict, OrderedDict +from collections import OrderedDict, defaultdict from operator import itemgetter from django.db.models import Count @@ -13,15 +13,14 @@ from rest_framework_extensions.mixins import NestedViewSetMixin from kpi.constants import ( - ASSET_TYPES, ASSET_TYPE_ARG_NAME, ASSET_TYPE_SURVEY, ASSET_TYPE_TEMPLATE, + ASSET_TYPES, CLONE_ARG_NAME, CLONE_COMPATIBLE_TYPES, CLONE_FROM_VERSION_ID_ARG_NAME, ) -from kpi.deployment_backends.backends import DEPLOYMENT_BACKENDS from kpi.exceptions import ( BadAssetTypeException, ) @@ -31,17 +30,17 @@ SearchFilter, ) from kpi.highlighters import highlight_xform +from kpi.mixins.object_permission import ObjectPermissionViewSetMixin from kpi.models import ( Asset, UserAssetSubscription, ) -from kpi.mixins.object_permission import ObjectPermissionViewSetMixin from kpi.paginators import AssetPagination from kpi.permissions import ( - get_perm_name, AssetPermission, PostMappedToChangePermission, ReportPermission, + get_perm_name, ) from kpi.renderers import ( AssetJsonRenderer, @@ -49,21 +48,21 @@ XFormRenderer, XlsRenderer, ) -from kpi.serializers.v2.deployment import DeploymentSerializer from kpi.serializers.v2.asset import ( AssetBulkActionsSerializer, AssetListSerializer, AssetSerializer, ) +from kpi.serializers.v2.deployment import DeploymentSerializer from kpi.serializers.v2.reports import ReportsDetailSerializer from kpi.utils.bugfix import repair_file_column_content_and_save from kpi.utils.hash import calculate_hash from kpi.utils.kobo_to_xlsform import to_xlsform_structure -from kpi.utils.ss_structure_to_mdtable import ss_structure_to_mdtable from kpi.utils.object_permission import ( get_database_user, get_objects_for_user, ) +from kpi.utils.ss_structure_to_mdtable import ss_structure_to_mdtable class AssetViewSet( @@ -740,7 +739,7 @@ def hash(self, request): accessible_assets = ( get_objects_for_user(user, 'view_asset', Asset) .filter(asset_type=ASSET_TYPE_SURVEY) - .order_by("uid") + .order_by('uid') ) assets_version_ids = [ @@ -904,12 +903,12 @@ def _prepare_cloned_data(self, original_asset, source_version, partial_update): # Because we're updating an asset from another which can have another type, # we need to remove `asset_type` from clone data to ensure it's not updated # when serializer is initialized. - cloned_data.pop("asset_type", None) + cloned_data.pop('asset_type', None) else: # Change asset_type if needed. - cloned_data["asset_type"] = self.request.data.get(ASSET_TYPE_ARG_NAME, original_asset.asset_type) + cloned_data['asset_type'] = self.request.data.get(ASSET_TYPE_ARG_NAME, original_asset.asset_type) - cloned_asset_type = cloned_data.get("asset_type") + cloned_asset_type = cloned_data.get('asset_type') # Settings are: Country, Description, Sector and Share-metadata # Copy settings only when original_asset is `survey` or `template` # and `asset_type` property of `cloned_data` is `survey` or `template` @@ -918,9 +917,9 @@ def _prepare_cloned_data(self, original_asset, source_version, partial_update): original_asset.asset_type in [ASSET_TYPE_TEMPLATE, ASSET_TYPE_SURVEY]: settings = original_asset.settings.copy() - settings.pop("share-metadata", None) + settings.pop('share-metadata', None) - cloned_data_settings = cloned_data.get("settings", {}) + cloned_data_settings = cloned_data.get('settings', {}) # Depending of the client payload. settings can be JSON or string. # if it's a string. Let's load it to be able to merge it. @@ -932,10 +931,10 @@ def _prepare_cloned_data(self, original_asset, source_version, partial_update): # until we get content passed as a dict, transform the content obj to a str # TODO, verify whether `Asset.content.settings.id_string` should be cleared out. - cloned_data["content"] = json.dumps(cloned_data.get("content")) + cloned_data['content'] = json.dumps(cloned_data.get('content')) return cloned_data else: - raise BadAssetTypeException("Destination type is not compatible with source type") + raise BadAssetTypeException('Destination type is not compatible with source type') def _validate_destination_type(self, original_asset_): """ diff --git a/kpi/views/v2/asset_snapshot.py b/kpi/views/v2/asset_snapshot.py index 9a964b6ea4..2807eeaf77 100644 --- a/kpi/views/v2/asset_snapshot.py +++ b/kpi/views/v2/asset_snapshot.py @@ -3,7 +3,7 @@ import requests from django.conf import settings -from django.http import HttpResponseRedirect, Http404 +from django.http import Http404, HttpResponseRedirect from rest_framework import renderers, serializers, status from rest_framework.decorators import action from rest_framework.response import Response @@ -17,7 +17,7 @@ from kpi.exceptions import SubmissionIntegrityError from kpi.filters import RelatedAssetPermissionsFilter from kpi.highlighters import highlight_xform -from kpi.models import AssetSnapshot, AssetFile, PairedData +from kpi.models import AssetFile, AssetSnapshot, PairedData from kpi.permissions import EditSubmissionPermission from kpi.renderers import ( OpenRosaFormListRenderer, diff --git a/kpi/views/v2/attachment.py b/kpi/views/v2/attachment.py index fb900bbb18..43bbbe3fe9 100644 --- a/kpi/views/v2/attachment.py +++ b/kpi/views/v2/attachment.py @@ -4,7 +4,7 @@ from django.conf import settings from django.shortcuts import Http404 from django.utils.translation import gettext as t -from rest_framework import viewsets, serializers +from rest_framework import serializers, viewsets from rest_framework.decorators import action from rest_framework.response import Response from rest_framework_extensions.mixins import NestedViewSetMixin diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index 38f473cf56..cb894e3b73 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -27,12 +27,12 @@ ) from kpi.authentication import EnketoSessionAuthentication from kpi.constants import ( - SUBMISSION_FORMAT_TYPE_JSON, - SUBMISSION_FORMAT_TYPE_XML, PERM_CHANGE_SUBMISSIONS, PERM_DELETE_SUBMISSIONS, PERM_VALIDATE_SUBMISSIONS, PERM_VIEW_SUBMISSIONS, + SUBMISSION_FORMAT_TYPE_JSON, + SUBMISSION_FORMAT_TYPE_XML, ) from kpi.exceptions import ( InvalidXFormException, @@ -52,6 +52,7 @@ SubmissionGeoJsonRenderer, SubmissionXMLRenderer, ) +from kpi.serializers.v2.data import DataBulkActionsValidator from kpi.utils.log import logging from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin from kpi.utils.xml import ( @@ -59,7 +60,6 @@ get_or_create_element, xml_tostring, ) -from kpi.serializers.v2.data import DataBulkActionsValidator class DataViewSet( @@ -687,7 +687,7 @@ def _filter_mongo_query(self, request): """ filters = {} - if request.method == "GET": + if request.method == 'GET': filters = request.GET.dict() # Remove `format` from filters. No need to use it diff --git a/kpi/views/v2/paired_data.py b/kpi/views/v2/paired_data.py index f73e948b48..87b4b6d513 100644 --- a/kpi/views/v2/paired_data.py +++ b/kpi/views/v2/paired_data.py @@ -15,11 +15,11 @@ AssetEditorPermission, XMLExternalDataPermission, ) -from kpi.serializers.v2.paired_data import PairedDataSerializer from kpi.renderers import SubmissionXMLRenderer +from kpi.serializers.v2.paired_data import PairedDataSerializer from kpi.utils.hash import calculate_hash from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin -from kpi.utils.xml import strip_nodes, add_xml_declaration +from kpi.utils.xml import add_xml_declaration, strip_nodes class PairedDataViewset( diff --git a/kpi/views/v2/user.py b/kpi/views/v2/user.py index 933daf49a7..9db21b12cb 100644 --- a/kpi/views/v2/user.py +++ b/kpi/views/v2/user.py @@ -1,13 +1,13 @@ from rest_framework import exceptions, mixins, renderers, status, viewsets from rest_framework.decorators import action +from rest_framework.pagination import LimitOffsetPagination from rest_framework.response import Response from rest_framework.reverse import reverse -from rest_framework.pagination import LimitOffsetPagination from kobo.apps.kobo_auth.shortcuts import User from kpi.filters import SearchFilter from kpi.permissions import IsAuthenticated -from kpi.serializers.v2.user import UserSerializer, UserListSerializer +from kpi.serializers.v2.user import UserListSerializer, UserSerializer from kpi.tasks import sync_kobocat_xforms From 87c2a6c1497cb8a8e3b88cd5e39490820b3a2e9d Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 7 Oct 2024 11:57:10 -0400 Subject: [PATCH 118/119] linting: Line too long --- hub/admin/extend_user.py | 42 +- kobo/apps/__init__.py | 4 +- kobo/apps/accounts/mfa/models.py | 16 +- kobo/apps/audit_log/models.py | 14 +- kobo/apps/audit_log/signals.py | 1 - kobo/apps/audit_log/tests/test_models.py | 4 +- .../audit_log/tests/test_one_time_auth.py | 8 +- kobo/apps/audit_log/urls.py | 4 +- kobo/apps/audit_log/views.py | 1 - kobo/apps/hook/exceptions.py | 1 - kobo/apps/hook/models/hook_log.py | 5 +- .../models/service_definition_interface.py | 28 +- kobo/apps/hook/tasks.py | 13 +- kobo/apps/hook/tests/hook_test_case.py | 44 +- kobo/apps/hook/tests/test_api_hook.py | 149 +++-- kobo/apps/hook/tests/test_email.py | 20 +- kobo/apps/hook/tests/test_parser.py | 1 - kobo/apps/hook/tests/test_ssrf.py | 3 +- kobo/apps/hook/tests/test_utils.py | 2 +- kobo/apps/hook/utils/lazy.py | 1 + kobo/apps/hook/views/v2/hook.py | 4 +- kobo/apps/hook/views/v2/hook_log.py | 7 +- kobo/apps/kobo_auth/models.py | 4 +- kobo/apps/kobo_auth/signals.py | 5 +- .../tests/viewsets/test_abstract_viewset.py | 11 +- .../tests/viewsets/test_attachment_viewset.py | 3 +- .../api/tests/viewsets/test_data_viewset.py | 7 +- .../tests/viewsets/test_metadata_viewset.py | 8 +- .../apps/api/tests/viewsets/test_user.py | 1 - .../viewsets/test_xform_submission_api.py | 11 +- .../api/tests/viewsets/test_xform_viewset.py | 5 +- kobo/apps/openrosa/apps/api/tools.py | 38 +- .../apps/api/viewsets/connect_viewset.py | 1 - .../apps/api/viewsets/data_viewset.py | 39 +- .../apps/api/viewsets/xform_list_api.py | 5 +- .../apps/api/viewsets/xform_submission_api.py | 1 - .../apps/api/viewsets/xform_viewset.py | 5 +- .../openrosa/apps/logger/models/attachment.py | 22 +- .../openrosa/apps/logger/models/instance.py | 4 +- .../apps/openrosa/apps/logger/models/xform.py | 25 +- kobo/apps/openrosa/apps/logger/signals.py | 6 +- .../apps/logger/tests/test_parsing.py | 81 ++- .../apps/logger/tests/test_publish_xls.py | 37 +- .../logger/tests/test_simple_submission.py | 3 +- .../openrosa/apps/logger/utils/counters.py | 19 +- .../apps/logger/utils/database_query.py | 18 +- .../openrosa/apps/logger/utils/instance.py | 23 +- .../apps/logger/xform_instance_parser.py | 17 +- .../openrosa/apps/main/models/user_profile.py | 4 +- .../openrosa/apps/main/tests/test_base.py | 6 +- .../apps/main/tests/test_past_bugs.py | 12 +- .../openrosa/apps/main/tests/test_process.py | 228 ++++---- .../test_user_id_string_unique_together.py | 5 +- .../apps/viewer/models/parsed_instance.py | 4 +- .../tests/mixins/make_submission_mixin.py | 21 +- kobo/apps/openrosa/libs/utils/logger_tools.py | 74 ++- .../resume_failed_transfers_2_024_25_fix.py | 10 +- .../apps/project_ownership/models/transfer.py | 9 +- kobo/apps/project_ownership/tasks.py | 7 +- .../tests/api/v2/test_api.py | 6 +- .../tests/test_transfer_status.py | 1 - kobo/apps/project_ownership/utils.py | 7 +- .../stripe/tests/test_organization_usage.py | 4 +- .../actions/automatic_transcription.py | 1 - kobo/apps/subsequences/actions/translation.py | 1 - kobo/apps/subsequences/api_view.py | 8 +- .../subsequences/integrations/google/base.py | 1 - .../integrations/google/google_transcribe.py | 1 - .../integrations/google/google_translate.py | 1 - kobo/apps/subsequences/models.py | 5 +- ...add_qual_to_last_question_of_last_asset.py | 9 +- .../subsequences/scripts/repop_known_cols.py | 4 +- .../tests/test_known_cols_utils.py | 76 ++- .../tests/test_submission_extras_api_post.py | 5 +- kobo/apps/subsequences/utils/__init__.py | 5 +- kobo/apps/subsequences/utils/deprecation.py | 1 + .../migrations/0001_initial.py | 7 +- kobo/apps/superuser_stats/tasks.py | 127 ++-- kobo/apps/trackers/tests/submission_utils.py | 19 +- kobo/apps/trash_bin/models/project.py | 13 +- kobo/apps/trash_bin/utils.py | 12 +- kobo/settings/base.py | 6 +- kpi/deployment_backends/base_backend.py | 10 +- kpi/deployment_backends/kc_access/utils.py | 39 +- kpi/deployment_backends/mock_backend.py | 28 +- kpi/deployment_backends/openrosa_backend.py | 291 ++++------ kpi/exceptions.py | 4 +- kpi/management/commands/sync_kobocat_perms.py | 4 +- .../commands/sync_kobocat_xforms.py | 22 +- .../0011_explode_asset_deployments.py | 20 +- .../0012_onetimeauthenticationkey.py | 6 +- kpi/mixins/formpack_xlsform_utils.py | 19 +- kpi/models/asset.py | 25 +- kpi/models/authorized_application.py | 8 +- kpi/models/import_export_task.py | 10 +- kpi/serializers/v2/asset.py | 10 +- kpi/serializers/v2/deployment.py | 8 +- kpi/tasks.py | 6 +- kpi/tests/api/v1/test_api_assets.py | 92 +-- kpi/tests/api/v1/test_api_submissions.py | 19 +- kpi/tests/api/v2/test_api_assets.py | 29 +- kpi/tests/api/v2/test_api_logout_all.py | 8 +- kpi/tests/api/v2/test_api_paired_data.py | 2 +- kpi/tests/api/v2/test_api_submissions.py | 139 +++-- kpi/tests/kpi_test_case.py | 4 +- kpi/tests/test_asset_content.py | 21 +- kpi/tests/test_asset_versions.py | 7 +- kpi/tests/test_mock_data.py | 25 +- ...t_mock_data_conflicting_version_exports.py | 4 +- kpi/tests/test_mock_data_exports.py | 547 ++++++++++++++++-- kpi/tests/test_usage_calculator.py | 5 +- kpi/tests/test_utils.py | 52 +- kpi/tests/utils/dicts.py | 3 +- kpi/tests/utils/xml.py | 4 +- kpi/urls/__init__.py | 10 +- kpi/utils/database.py | 24 +- kpi/utils/monkey_patching.py | 17 +- kpi/utils/project_view_exports.py | 6 +- kpi/utils/usage_calculator.py | 74 ++- kpi/views/environment.py | 4 +- kpi/views/v2/asset.py | 35 +- kpi/views/v2/asset_snapshot.py | 8 +- kpi/views/v2/data.py | 57 +- 123 files changed, 1698 insertions(+), 1479 deletions(-) diff --git a/hub/admin/extend_user.py b/hub/admin/extend_user.py index 5552e67887..47116f5ede 100644 --- a/hub/admin/extend_user.py +++ b/hub/admin/extend_user.py @@ -4,12 +4,8 @@ from django.conf import settings from django.contrib import admin, messages from django.contrib.auth.admin import UserAdmin -from django.contrib.auth.forms import ( - UserChangeForm as DjangoUserChangeForm, -) -from django.contrib.auth.forms import ( - UserCreationForm as DjangoUserCreationForm, -) +from django.contrib.auth.forms import UserChangeForm as DjangoUserChangeForm +from django.contrib.auth.forms import UserCreationForm as DjangoUserCreationForm from django.core.exceptions import ValidationError from django.db.models import Count, Sum from django.forms import CharField @@ -29,7 +25,6 @@ from kobo.apps.trash_bin.models.account import AccountTrash from kobo.apps.trash_bin.utils import move_to_trash from kpi.models.asset import AssetDeploymentStatus - from .filters import UserAdvancedSearchFilter from .mixins import AdvancedSearchMixin @@ -57,19 +52,18 @@ class UserChangeForm(DjangoUserChangeForm): def clean(self): cleaned_data = super().clean() is_active = cleaned_data['is_active'] - if ( - is_active - and AccountTrash.objects.filter(user_id=self.instance.pk).exists() - ): + if is_active and AccountTrash.objects.filter(user_id=self.instance.pk).exists(): url = reverse('admin:trash_bin_accounttrash_changelist') - raise ValidationError(mark_safe( - f'User is in trash and cannot be reactivated' - f' from here.' - )) - if cleaned_data.get('is_superuser', False) and not validate_superuser_auth(self.instance): raise ValidationError( - 'Superusers with a usable password must enable MFA.' + mark_safe( + f'User is in trash and cannot be reactivated' + f' from here.' + ) ) + if cleaned_data.get('is_superuser', False) and not validate_superuser_auth( + self.instance + ): + raise ValidationError('Superusers with a usable password must enable MFA.') return cleaned_data @@ -94,13 +88,15 @@ class OrgInline(admin.StackedInline): 'is_admin', ] raw_id_fields = ('user', 'organization') - readonly_fields = ( - settings.STRIPE_ENABLED and ('active_subscription_status',) or [] - ) + readonly_fields = settings.STRIPE_ENABLED and ('active_subscription_status',) or [] def active_subscription_status(self, obj): if settings.STRIPE_ENABLED: - return obj.active_subscription_status if obj.active_subscription_status else 'None' + return ( + obj.active_subscription_status + if obj.active_subscription_status + else 'None' + ) def has_add_permission(self, request, obj=OrganizationUser): return False @@ -262,9 +258,7 @@ def monthly_submission_count(self, obj): user_id=obj.id, year=today.year, month=today.month, - ).aggregate( - counter=Sum('counter') - ) + ).aggregate(counter=Sum('counter')) return instances.get('counter') def _remove_or_delete( diff --git a/kobo/apps/__init__.py b/kobo/apps/__init__.py index 9c7365cede..0629fe2377 100644 --- a/kobo/apps/__init__.py +++ b/kobo/apps/__init__.py @@ -3,9 +3,7 @@ from django.core.checks import Tags, register import kpi.utils.monkey_patching # noqa -from kpi.utils.two_database_configuration_checker import ( - TwoDatabaseConfigurationChecker, -) +from kpi.utils.two_database_configuration_checker import TwoDatabaseConfigurationChecker class KpiConfig(AppConfig): diff --git a/kobo/apps/accounts/mfa/models.py b/kobo/apps/accounts/mfa/models.py index d590a026ab..0eed0464e7 100644 --- a/kobo/apps/accounts/mfa/models.py +++ b/kobo/apps/accounts/mfa/models.py @@ -3,12 +3,8 @@ from django.contrib import admin from django.db import models from django.utils.timezone import now -from trench.admin import ( - MFAMethod as TrenchMFAMethod, -) -from trench.admin import ( - MFAMethodAdmin as TrenchMFAMethodAdmin, -) +from trench.admin import MFAMethod as TrenchMFAMethod +from trench.admin import MFAMethodAdmin as TrenchMFAMethodAdmin from kobo.apps.openrosa.apps.main.models import UserProfile from kpi.models.abstract_models import AbstractTimeStampedModel @@ -70,9 +66,7 @@ def save( Update user's profile in KoBoCAT database. """ if not settings.TESTING and not created: - UserProfile.set_mfa_status( - user_id=self.user.pk, is_active=self.is_active - ) + UserProfile.set_mfa_status(user_id=self.user.pk, is_active=self.is_active) def delete(self, using=None, keep_parents=False): user_id = self.user.pk @@ -82,9 +76,7 @@ def delete(self, using=None, keep_parents=False): Update user's profile in KoboCAT database. """ if not settings.TESTING: - UserProfile.set_mfa_status( - user_id=user_id, is_active=False - ) + UserProfile.set_mfa_status(user_id=user_id, is_active=False) class MfaMethodAdmin(TrenchMFAMethodAdmin): diff --git a/kobo/apps/audit_log/models.py b/kobo/apps/audit_log/models.py index 81e1555a25..efb16b2910 100644 --- a/kobo/apps/audit_log/models.py +++ b/kobo/apps/audit_log/models.py @@ -139,14 +139,17 @@ def create_from_request( extra_metadata: dict = None, ): """ - Create an access log for a request, assigned to either the given user or request.user if not supplied + Create an access log for a request, assigned to either the given user or + request.user if not supplied - Note: Data passed in extra_metadata will override default values for the same key + Note: Data passed in extra_metadata will override default values for the + same key """ logged_in_user = user or request.user - # django-loginas will keep the superuser as the _cached_user while request.user is set to the new one - # sometimes there won't be a cached user at all, mostly in tests + # django-loginas will keep the superuser as the _cached_user while request.user + # is set to the new one sometimes there won't be a cached user at all, + # mostly in tests initial_user = getattr(request, '_cached_user', None) is_loginas_url = ( request.resolver_match is not None @@ -165,7 +168,8 @@ def create_from_request( ) is_loginas = is_loginas_url and user_changed if is_submission: - # Submissions are special snowflakes and need to be grouped together, no matter the auth type + # Submissions are special snowflakes and need to be grouped together, + # no matter the auth type auth_type = ACCESS_LOG_SUBMISSION_AUTH_TYPE elif authentication_type and authentication_type != '': # second option: auth type param diff --git a/kobo/apps/audit_log/signals.py b/kobo/apps/audit_log/signals.py index 0d128dea27..046ea518eb 100644 --- a/kobo/apps/audit_log/signals.py +++ b/kobo/apps/audit_log/signals.py @@ -2,7 +2,6 @@ from django.dispatch import receiver from kpi.utils.log import logging - from .models import AccessLog diff --git a/kobo/apps/audit_log/tests/test_models.py b/kobo/apps/audit_log/tests/test_models.py index f60e1b31b9..b01c1d891b 100644 --- a/kobo/apps/audit_log/tests/test_models.py +++ b/kobo/apps/audit_log/tests/test_models.py @@ -167,9 +167,7 @@ def test_create_auth_log_unknown_authenticator( }, ) - def test_create_auth_log_with_extra_metadata( - self, patched_ip, patched_source - ): + def test_create_auth_log_with_extra_metadata(self, patched_ip, patched_source): request = self._create_request( reverse('api_v2:asset-list'), AnonymousUser(), diff --git a/kobo/apps/audit_log/tests/test_one_time_auth.py b/kobo/apps/audit_log/tests/test_one_time_auth.py index fa1e473e93..67425410d8 100644 --- a/kobo/apps/audit_log/tests/test_one_time_auth.py +++ b/kobo/apps/audit_log/tests/test_one_time_auth.py @@ -128,7 +128,8 @@ def side_effect(request): def test_digest_auth_for_submission(self): """ - Test digest authentications for submissions result in an audit log being created with the 'Submission' type + Test digest authentications for submissions result in an audit log being created + with the 'Submission' type """ def side_effect(request): @@ -143,7 +144,7 @@ def side_effect(request): ): # assume the submission works, we don't actually care with patch( - 'kobo.apps.openrosa.apps.api.viewsets.xform_submission_api.XFormSubmissionApi.create', + 'kobo.apps.openrosa.apps.api.viewsets.xform_submission_api.XFormSubmissionApi.create', # noqa: E501 return_value=HttpResponse(status=200), ): self.client.post(reverse('submissions'), **header) @@ -164,7 +165,8 @@ def test_authorized_application_auth_creates_log(self): **header, data={'username': 'test', 'password': 'test'}, ) - # this log should belong to the user, not the app, and have a bit of extra metadata + # this log should belong to the user, not the app, and have a bit of extra + # metadata access_log_qs = AuditLog.objects.filter( user_uid=TestOneTimeAuthentication.user.extra_details.uid, action=AuditAction.AUTH, diff --git a/kobo/apps/audit_log/urls.py b/kobo/apps/audit_log/urls.py index 91780fbef8..0d742a83e2 100644 --- a/kobo/apps/audit_log/urls.py +++ b/kobo/apps/audit_log/urls.py @@ -5,8 +5,6 @@ router = DefaultRouter() router.register(r'audit-logs', AuditLogViewSet, basename='audit-log') router.register(r'access-logs', AllAccessLogViewSet, basename='all-access-logs') -router.register( - r'access-logs/me', AccessLogViewSet, basename='access-log' -) +router.register(r'access-logs/me', AccessLogViewSet, basename='access-log') urlpatterns = [] diff --git a/kobo/apps/audit_log/views.py b/kobo/apps/audit_log/views.py index a7173bd906..5a81b54765 100644 --- a/kobo/apps/audit_log/views.py +++ b/kobo/apps/audit_log/views.py @@ -3,7 +3,6 @@ from kpi.filters import SearchFilter from kpi.permissions import IsAuthenticated - from .filters import AccessLogPermissionsFilter from .models import AccessLog, AuditAction, AuditLog from .permissions import SuperUserPermission diff --git a/kobo/apps/hook/exceptions.py b/kobo/apps/hook/exceptions.py index 1997f47c2e..885df6bc54 100644 --- a/kobo/apps/hook/exceptions.py +++ b/kobo/apps/hook/exceptions.py @@ -1,3 +1,2 @@ - class HookRemoteServerDownError(Exception): pass diff --git a/kobo/apps/hook/models/hook_log.py b/kobo/apps/hook/models/hook_log.py index 1cd4057d98..8cdd137050 100644 --- a/kobo/apps/hook/models/hook_log.py +++ b/kobo/apps/hook/models/hook_log.py @@ -7,7 +7,6 @@ from kpi.fields import KpiUidField from kpi.models.abstract_models import AbstractTimeStampedModel from kpi.utils.log import logging - from ..constants import ( HOOK_LOG_FAILED, HOOK_LOG_PENDING, @@ -18,9 +17,7 @@ class HookLog(AbstractTimeStampedModel): - hook = models.ForeignKey( - 'Hook', related_name='logs', on_delete=models.CASCADE - ) + hook = models.ForeignKey('Hook', related_name='logs', on_delete=models.CASCADE) uid = KpiUidField(uid_prefix='hl') submission_id = models.IntegerField( # `KoboCAT.logger.Instance.id` default=0, db_index=True diff --git a/kobo/apps/hook/models/service_definition_interface.py b/kobo/apps/hook/models/service_definition_interface.py index a1b8e76e6c..56328fbade 100644 --- a/kobo/apps/hook/models/service_definition_interface.py +++ b/kobo/apps/hook/models/service_definition_interface.py @@ -9,7 +9,6 @@ from ssrf_protect.ssrf_protect import SSRFProtect, SSRFProtectException from kpi.utils.log import logging - from ..constants import ( HOOK_LOG_FAILED, HOOK_LOG_SUCCESS, @@ -85,7 +84,9 @@ def send(self) -> bool: if not self._data: self.save_log( - KOBO_INTERNAL_ERROR_STATUS_CODE, 'Submission has been deleted', allow_retries=False + KOBO_INTERNAL_ERROR_STATUS_CODE, + 'Submission has been deleted', + allow_retries=False, ) return False @@ -128,26 +129,18 @@ def send(self) -> bool: ssrf_protect_options = {} if constance.config.SSRF_ALLOWED_IP_ADDRESS.strip(): - ssrf_protect_options[ - 'allowed_ip_addresses' - ] = constance.config.SSRF_ALLOWED_IP_ADDRESS.strip().split( - '\r\n' + ssrf_protect_options['allowed_ip_addresses'] = ( + constance.config.SSRF_ALLOWED_IP_ADDRESS.strip().split('\r\n') ) if constance.config.SSRF_DENIED_IP_ADDRESS.strip(): - ssrf_protect_options[ - 'denied_ip_addresses' - ] = constance.config.SSRF_DENIED_IP_ADDRESS.strip().split( - '\r\n' + ssrf_protect_options['denied_ip_addresses'] = ( + constance.config.SSRF_DENIED_IP_ADDRESS.strip().split('\r\n') ) - SSRFProtect.validate( - self._hook.endpoint, options=ssrf_protect_options - ) + SSRFProtect.validate(self._hook.endpoint, options=ssrf_protect_options) - response = requests.post( - self._hook.endpoint, timeout=30, **request_kwargs - ) + response = requests.post(self._hook.endpoint, timeout=30, **request_kwargs) response.raise_for_status() self.save_log(response.status_code, response.text, success=True) @@ -178,8 +171,7 @@ def send(self) -> bool: exc_info=True, ) self.save_log( - KOBO_INTERNAL_ERROR_STATUS_CODE, - f'{self._hook.endpoint} is not allowed' + KOBO_INTERNAL_ERROR_STATUS_CODE, f'{self._hook.endpoint} is not allowed' ) raise except Exception as e: diff --git a/kobo/apps/hook/tasks.py b/kobo/apps/hook/tasks.py index cf7c4bdbc9..4d2521dcff 100644 --- a/kobo/apps/hook/tasks.py +++ b/kobo/apps/hook/tasks.py @@ -11,7 +11,6 @@ from kobo.celery import celery_app from kpi.utils.log import logging - from .constants import HOOK_LOG_FAILED from .exceptions import HookRemoteServerDownError from .models import Hook, HookLog @@ -58,9 +57,11 @@ def failures_reports(): beat_schedule = settings.CELERY_BEAT_SCHEDULE.get('send-hooks-failures-reports') # Use `.first()` instead of `.get()`, because task can be duplicated in admin section - failures_reports_period_task = PeriodicTask.objects.filter( - enabled=True, - task=beat_schedule.get('task')).order_by('-last_run_at').first() + failures_reports_period_task = ( + PeriodicTask.objects.filter(enabled=True, task=beat_schedule.get('task')) + .order_by('-last_run_at') + .first() + ) if failures_reports_period_task: @@ -71,9 +72,7 @@ def failures_reports(): if last_run_at: queryset = queryset.filter(date_modified__gte=last_run_at) - queryset = queryset.order_by( - 'hook__asset__name', 'hook__uid', '-date_modified' - ) + queryset = queryset.order_by('hook__asset__name', 'hook__uid', '-date_modified') # PeriodicTask are updated every 3 minutes (default). # It means, if this task interval is less than 3 minutes, some data can be duplicated in emails. diff --git a/kobo/apps/hook/tests/hook_test_case.py b/kobo/apps/hook/tests/hook_test_case.py index 0ec5d7045b..030610ee60 100644 --- a/kobo/apps/hook/tests/hook_test_case.py +++ b/kobo/apps/hook/tests/hook_test_case.py @@ -11,7 +11,6 @@ from kpi.constants import SUBMISSION_FORMAT_TYPE_JSON, SUBMISSION_FORMAT_TYPE_XML from kpi.exceptions import BadFormatException from kpi.tests.kpi_test_case import KpiTestCase - from ..constants import HOOK_LOG_FAILED from ..exceptions import HookRemoteServerDownError from ..models import Hook, HookLog @@ -23,21 +22,30 @@ def setUp(self): self.client.login(username='someuser', password='someuser') self.asset = self.create_asset( 'some_asset', - content=json.dumps({'survey': [ - {'type': 'text', 'label': 'q1', 'name': 'q1'}, - {'type': 'begin_group', 'label': 'group1', 'name': 'group1'}, - {'type': 'text', 'label': 'q2', 'name': 'q2'}, - {'type': 'text', 'label': 'q3', 'name': 'q3'}, - {'type': 'end_group'}, - {'type': 'begin_group', 'label': 'group2', 'name': 'group2'}, - {'type': 'begin_group', 'label': 'subgroup1', 'name': 'subgroup1'}, - {'type': 'text', 'label': 'q4', 'name': 'q4'}, - {'type': 'text', 'label': 'q5', 'name': 'q5'}, - {'type': 'text', 'label': 'q6', 'name': 'q6'}, - {'type': 'end_group'}, - {'type': 'end_group'}, - ]}), - format='json') + content=json.dumps( + { + 'survey': [ + {'type': 'text', 'label': 'q1', 'name': 'q1'}, + {'type': 'begin_group', 'label': 'group1', 'name': 'group1'}, + {'type': 'text', 'label': 'q2', 'name': 'q2'}, + {'type': 'text', 'label': 'q3', 'name': 'q3'}, + {'type': 'end_group'}, + {'type': 'begin_group', 'label': 'group2', 'name': 'group2'}, + { + 'type': 'begin_group', + 'label': 'subgroup1', + 'name': 'subgroup1', + }, + {'type': 'text', 'label': 'q4', 'name': 'q4'}, + {'type': 'text', 'label': 'q5', 'name': 'q5'}, + {'type': 'text', 'label': 'q6', 'name': 'q6'}, + {'type': 'end_group'}, + {'type': 'end_group'}, + ] + } + ), + format='json', + ) self.asset.deploy(backend='mock', active=True) self.asset.save() self.hook = Hook() @@ -95,9 +103,7 @@ def _send_and_fail(self): # Fakes Celery n retries by forcing status to `failed` # (where n is `settings.HOOKLOG_MAX_RETRIES`) - first_hooklog = HookLog.objects.get( - uid=first_hooklog_response.get('uid') - ) + first_hooklog = HookLog.objects.get(uid=first_hooklog_response.get('uid')) first_hooklog.change_status(HOOK_LOG_FAILED) return first_hooklog_response diff --git a/kobo/apps/hook/tests/test_api_hook.py b/kobo/apps/hook/tests/test_api_hook.py index f747bcaa11..fa7190b5ef 100644 --- a/kobo/apps/hook/tests/test_api_hook.py +++ b/kobo/apps/hook/tests/test_api_hook.py @@ -23,7 +23,6 @@ SUBMISSION_FORMAT_TYPE_JSON, ) from kpi.utils.datetime import several_minutes_from_now - from ..exceptions import HookRemoteServerDownError from .hook_test_case import HookTestCase @@ -34,25 +33,29 @@ def test_anonymous_access(self): hook = self._create_hook() self.client.logout() - list_url = reverse('hook-list', kwargs={ - 'parent_lookup_asset': self.asset.uid - }) + list_url = reverse('hook-list', kwargs={'parent_lookup_asset': self.asset.uid}) response = self.client.get(list_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - detail_url = reverse('hook-detail', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'uid': hook.uid, - }) + detail_url = reverse( + 'hook-detail', + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'uid': hook.uid, + }, + ) response = self.client.get(detail_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - log_list_url = reverse('hook-log-list', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_hook': hook.uid, - }) + log_list_url = reverse( + 'hook-log-list', + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': hook.uid, + }, + ) response = self.client.get(log_list_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -120,25 +123,29 @@ def test_non_owner_cannot_access(self): self.client.logout() self.client.login(username='anotheruser', password='anotheruser') - list_url = reverse('hook-list', kwargs={ - 'parent_lookup_asset': self.asset.uid - }) + list_url = reverse('hook-list', kwargs={'parent_lookup_asset': self.asset.uid}) response = self.client.get(list_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - detail_url = reverse('hook-detail', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'uid': hook.uid, - }) + detail_url = reverse( + 'hook-detail', + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'uid': hook.uid, + }, + ) response = self.client.get(detail_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - log_list_url = reverse('hook-log-list', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_hook': hook.uid, - }) + log_list_url = reverse( + 'hook-log-list', + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': hook.uid, + }, + ) response = self.client.get(log_list_url) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -152,20 +159,18 @@ def test_non_owner_cannot_create(self): def test_anonymous_cannot_create(self): self.client.logout() - response = self._create_hook(return_response_only=True, - name='Hook for asset from anonymous') + response = self._create_hook( + return_response_only=True, name='Hook for asset from anonymous' + ) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) def test_partial_update_hook(self): hook = self._create_hook() - url = reverse('hook-detail', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'uid': hook.uid - }) - data = { - 'name': 'some disabled external service', - 'active': False - } + url = reverse( + 'hook-detail', + kwargs={'parent_lookup_asset': self.asset.uid, 'uid': hook.uid}, + ) + data = {'name': 'some disabled external service', 'active': False} response = self.client.patch(url, data, format=SUBMISSION_FORMAT_TYPE_JSON) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data) @@ -175,7 +180,7 @@ def test_partial_update_hook(self): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @responses.activate def test_send_and_retry(self): @@ -183,29 +188,35 @@ def test_send_and_retry(self): first_log_response = self._send_and_fail() # Let's retry through API call - retry_url = reverse('hook-log-retry', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_hook': self.hook.uid, - 'uid': first_log_response.get('uid') - }) + retry_url = reverse( + 'hook-log-retry', + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': self.hook.uid, + 'uid': first_log_response.get('uid'), + }, + ) # It should be a success response = self.client.patch(retry_url, format=SUBMISSION_FORMAT_TYPE_JSON) self.assertEqual(response.status_code, status.HTTP_200_OK) # Let's check if logs has 2 tries - detail_url = reverse('hook-log-detail', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_hook': self.hook.uid, - 'uid': first_log_response.get('uid') - }) + detail_url = reverse( + 'hook-log-detail', + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': self.hook.uid, + 'uid': first_log_response.get('uid'), + }, + ) response = self.client.get(detail_url, format=SUBMISSION_FORMAT_TYPE_JSON) self.assertEqual(response.data.get('tries'), 2) @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @responses.activate def test_send_and_cannot_retry(self): @@ -213,29 +224,35 @@ def test_send_and_cannot_retry(self): first_log_response = self._send_and_wait_for_retry() # Let's retry through API call - retry_url = reverse('hook-log-retry', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_hook': self.hook.uid, - 'uid': first_log_response.get('uid') - }) + retry_url = reverse( + 'hook-log-retry', + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': self.hook.uid, + 'uid': first_log_response.get('uid'), + }, + ) # It should be a failure. The hook log is going to be retried response = self.client.patch(retry_url, format=SUBMISSION_FORMAT_TYPE_JSON) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) # Let's check if logs has 2 tries - detail_url = reverse('hook-log-detail', kwargs={ - 'parent_lookup_asset': self.asset.uid, - 'parent_lookup_hook': self.hook.uid, - 'uid': first_log_response.get('uid') - }) + detail_url = reverse( + 'hook-log-detail', + kwargs={ + 'parent_lookup_asset': self.asset.uid, + 'parent_lookup_hook': self.hook.uid, + 'uid': first_log_response.get('uid'), + }, + ) response = self.client.get(detail_url, format=SUBMISSION_FORMAT_TYPE_JSON) self.assertEqual(response.data.get('tries'), 1) @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @responses.activate def test_payload_template(self): @@ -321,7 +338,7 @@ def test_payload_template_validation(self): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @responses.activate def test_hook_log_filter_success(self): @@ -363,7 +380,7 @@ def test_hook_log_filter_success(self): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @responses.activate def test_hook_log_filter_failure(self): @@ -427,7 +444,7 @@ def test_hook_log_filter_validation(self): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @responses.activate def test_hook_log_filter_date(self): @@ -464,20 +481,26 @@ def test_hook_log_filter_date(self): # There should be a success log around now response = self.client.get( - f'{hook_log_url}?start={five_minutes_ago}&end={in_five_min}', format='json') + f'{hook_log_url}?start={five_minutes_ago}&end={in_five_min}', + format='json', + ) self.assertEqual(response.data.get('count'), 1) # There should be no log before now - response = self.client.get(f'{hook_log_url}?start={in_five_min}', format='json') + response = self.client.get( + f'{hook_log_url}?start={in_five_min}', format='json' + ) self.assertEqual(response.data.get('count'), 0) # There should be no log after now - response = self.client.get(f'{hook_log_url}?end={five_minutes_ago}', format='json') + response = self.client.get( + f'{hook_log_url}?end={five_minutes_ago}', format='json' + ) self.assertEqual(response.data.get('count'), 0) # There should be no log around now when expressed in a different time zone response = self.client.get( - f'{hook_log_url}?start={five_minutes_ago}{tzoffset}&end={in_five_min}{tzoffset}', + f'{hook_log_url}?start={five_minutes_ago}{tzoffset}&end={in_five_min}{tzoffset}', # noqa: E501 format='json', ) self.assertEqual(response.data.get('count'), 0) diff --git a/kobo/apps/hook/tests/test_email.py b/kobo/apps/hook/tests/test_email.py index 678f9a40a4..f42b757ee0 100644 --- a/kobo/apps/hook/tests/test_email.py +++ b/kobo/apps/hook/tests/test_email.py @@ -32,7 +32,7 @@ def _create_periodic_task(self): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @responses.activate def test_notifications(self): @@ -50,13 +50,17 @@ def test_notifications(self): 'name': self.asset.name, 'hook_uid': self.hook.uid, 'max_length': len(self.hook.name), - 'logs': [{ - 'hook_name': self.hook.name, - 'status_code': first_log_response.get('status_code'), - 'message': first_log_response.get('message'), - 'uid': first_log_response.get('uid'), - 'date_modified': dateparse.parse_datetime(first_log_response.get('date_modified')) - }] + 'logs': [ + { + 'hook_name': self.hook.name, + 'status_code': first_log_response.get('status_code'), + 'message': first_log_response.get('message'), + 'uid': first_log_response.get('uid'), + 'date_modified': dateparse.parse_datetime( + first_log_response.get('date_modified') + ), + } + ], } } } diff --git a/kobo/apps/hook/tests/test_parser.py b/kobo/apps/hook/tests/test_parser.py index f90dc6fbbe..0709a0fb1b 100644 --- a/kobo/apps/hook/tests/test_parser.py +++ b/kobo/apps/hook/tests/test_parser.py @@ -7,7 +7,6 @@ from kpi.constants import SUBMISSION_FORMAT_TYPE_XML from kpi.utils.strings import to_str from kpi.utils.xml import check_lxml_fromstring - from .hook_test_case import HookTestCase diff --git a/kobo/apps/hook/tests/test_ssrf.py b/kobo/apps/hook/tests/test_ssrf.py index f4c38155e2..0302d94439 100644 --- a/kobo/apps/hook/tests/test_ssrf.py +++ b/kobo/apps/hook/tests/test_ssrf.py @@ -8,7 +8,6 @@ from ssrf_protect.exceptions import SSRFProtectException from kobo.apps.hook.constants import HOOK_LOG_FAILED, KOBO_INTERNAL_ERROR_STATUS_CODE - from .hook_test_case import HookTestCase @@ -16,7 +15,7 @@ class SSRFHookTestCase(HookTestCase): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @override_config(SSRF_DENIED_IP_ADDRESS='1.2.3.4') @responses.activate diff --git a/kobo/apps/hook/tests/test_utils.py b/kobo/apps/hook/tests/test_utils.py index d05f618564..7f935c94e1 100644 --- a/kobo/apps/hook/tests/test_utils.py +++ b/kobo/apps/hook/tests/test_utils.py @@ -12,7 +12,7 @@ class HookUtilsTestCase(HookTestCase): @patch( 'ssrf_protect.ssrf_protect.SSRFProtect._get_ip_address', - new=MagicMock(return_value=ip_address('1.2.3.4')) + new=MagicMock(return_value=ip_address('1.2.3.4')), ) @responses.activate def test_data_submission(self): diff --git a/kobo/apps/hook/utils/lazy.py b/kobo/apps/hook/utils/lazy.py index 58a64c9d1a..c378b1f6ad 100644 --- a/kobo/apps/hook/utils/lazy.py +++ b/kobo/apps/hook/utils/lazy.py @@ -7,6 +7,7 @@ class LazyMaxRetriesInt: This wrapper helps to return the value of `constance.config.HOOK_MAX_RETRIES` on demand. """ + def __call__(self, *args, **kwargs): return constance.config.HOOK_MAX_RETRIES diff --git a/kobo/apps/hook/views/v2/hook.py b/kobo/apps/hook/views/v2/hook.py index 5550717eb4..10f547fdc0 100644 --- a/kobo/apps/hook/views/v2/hook.py +++ b/kobo/apps/hook/views/v2/hook.py @@ -204,9 +204,7 @@ def retry(self, request, uid=None, *args, **kwargs): retry_all_task.apply_async( queue='kpi_low_priority_queue', args=(hooklogs_ids,) ) - response.update({ - 'pending_uids': hooklogs_uids - }) + response.update({'pending_uids': hooklogs_uids}) else: response['detail'] = t('No data to retry') diff --git a/kobo/apps/hook/views/v2/hook_log.py b/kobo/apps/hook/views/v2/hook_log.py index 6e6ec34de9..d27d7d760e 100644 --- a/kobo/apps/hook/views/v2/hook_log.py +++ b/kobo/apps/hook/views/v2/hook_log.py @@ -103,8 +103,7 @@ def retry(self, request, uid=None, *args, **kwargs): :param uid: str :return: Response """ - response = {'detail': '', - 'status_code': KOBO_INTERNAL_ERROR_STATUS_CODE} + response = {'detail': '', 'status_code': KOBO_INTERNAL_ERROR_STATUS_CODE} status_code = status.HTTP_200_OK hook_log = self.get_object() @@ -123,9 +122,7 @@ def retry(self, request, uid=None, *args, **kwargs): ) status_code = status.HTTP_500_INTERNAL_SERVER_ERROR else: - response['detail'] = t( - 'Data is being or has already been processed' - ) + response['detail'] = t('Data is being or has already been processed') status_code = status.HTTP_400_BAD_REQUEST return Response(response, status=status_code) diff --git a/kobo/apps/kobo_auth/models.py b/kobo/apps/kobo_auth/models.py index 5e1a96f1d9..dbdf532f66 100644 --- a/kobo/apps/kobo_auth/models.py +++ b/kobo/apps/kobo_auth/models.py @@ -39,7 +39,7 @@ def has_perm(self, perm, obj=None): return super().has_perm(perm, obj) def sync_to_openrosa_db(self): - User = self.__class__ # noqa + User = self.__class__ # noqa User.objects.using(settings.OPENROSA_DB_ALIAS).bulk_create( [self], update_conflicts=True, @@ -54,6 +54,6 @@ def sync_to_openrosa_db(self): 'is_active', 'date_joined', ], - unique_fields=['pk'] + unique_fields=['pk'], ) update_autofield_sequence(User) diff --git a/kobo/apps/kobo_auth/signals.py b/kobo/apps/kobo_auth/signals.py index bc4a80d21c..38d409330f 100644 --- a/kobo/apps/kobo_auth/signals.py +++ b/kobo/apps/kobo_auth/signals.py @@ -9,10 +9,7 @@ grant_kc_model_level_perms, kc_transaction_atomic, ) -from kpi.utils.permissions import ( - grant_default_model_level_perms, - is_user_anonymous, -) +from kpi.utils.permissions import grant_default_model_level_perms, is_user_anonymous @receiver(post_save, sender=User) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py index 58ed6a9ebd..a48938cd4d 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_abstract_viewset.py @@ -2,17 +2,14 @@ from typing import Union from django.conf import settings -from django.contrib.auth.models import ( - AnonymousUser, - Permission, -) +from django.contrib.auth.models import AnonymousUser, Permission from django.core.files.base import ContentFile from django.test import TestCase +from django_digest.test import DigestAuth from rest_framework import status from rest_framework.reverse import reverse from rest_framework.test import APIRequestFactory -from django_digest.test import DigestAuth from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.api.viewsets.metadata_viewset import MetaDataViewSet from kobo.apps.openrosa.apps.logger.models import Attachment, XForm @@ -101,9 +98,7 @@ def publish_xls_form( self.assertEqual(response.status_code, 201) self.xform = XForm.objects.all().order_by('pk').reverse()[0] - data.update({ - 'url': f'http://testserver/api/v1/forms/{self.xform.pk}' - }) + data.update({'url': f'http://testserver/api/v1/forms/{self.xform.pk}'}) self.assertEqual(dict(response.data, **data), response.data) self.form_data = response.data else: diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py index 04bcc1cbe9..c27a56a352 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_attachment_viewset.py @@ -151,8 +151,7 @@ def test_direct_image_link(self): self.assertEqual(response.status_code, 404) def test_direct_image_link_uppercase(self): - self._submit_transport_instance_w_attachment( - media_file='1335783522564.JPG') + self._submit_transport_instance_w_attachment(media_file='1335783522564.JPG') filename = self.attachment.media_file.name file_base, file_extension = os.path.splitext(filename) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py index dd64fc3b1d..2b091b0184 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_data_viewset.py @@ -207,8 +207,7 @@ def test_add_form_tag_propagates_to_data_tags(self): for i in self.xform.instances.all(): self.assertIn('hello', i.tags.names()) # remove tag "hello" - request = self.factory.delete('/', data={'tags': 'hello'}, - **self.extra) + request = self.factory.delete('/', data={'tags': 'hello'}, **self.extra) response = view(request, pk=pk, label='hello') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, []) @@ -324,9 +323,7 @@ def test_get_enketo_edit_url(self): ) request = self.factory.get( - '/', - data={'return_url': 'http://test.io/test_url'}, - **self.extra + '/', data={'return_url': 'http://test.io/test_url'}, **self.extra ) with HTTMock(enketo_mock): diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py index 0f41357f15..08020161ff 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_metadata_viewset.py @@ -27,8 +27,12 @@ def setUp(self): self.publish_xls_form() self.data_value = 'screenshot.png' self.fixture_dir = os.path.join( - settings.OPENROSA_APP_DIR, 'apps', 'main', 'tests', 'fixtures', - 'transportation' + settings.OPENROSA_APP_DIR, + 'apps', + 'main', + 'tests', + 'fixtures', + 'transportation', ) self.path = os.path.join(self.fixture_dir, self.data_value) diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py index 66e8ee29d1..2c0d6e5b31 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_user.py @@ -7,7 +7,6 @@ from rest_framework.reverse import reverse from kobo.apps.openrosa.apps.logger.models.xform import XForm - from .test_abstract_viewset import TestAbstractViewSet diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py index fa8eb92cfd..cf0319fa7e 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_submission_api.py @@ -3,9 +3,9 @@ import simplejson as json from django.contrib.auth.models import AnonymousUser from django.core.files.uploadedfile import InMemoryUploadedFile +from django_digest.test import DigestAuth from rest_framework import status -from django_digest.test import DigestAuth from kobo.apps.openrosa.apps.api.tests.viewsets.test_abstract_viewset import ( TestAbstractViewSet, ) @@ -19,10 +19,7 @@ class TestXFormSubmissionApi(TestAbstractViewSet): def setUp(self): super().setUp() - self.view = XFormSubmissionApi.as_view({ - 'head': 'create', - 'post': 'create' - }) + self.view = XFormSubmissionApi.as_view({'head': 'create', 'post': 'create'}) self.publish_xls_form() def test_head_response(self): @@ -378,9 +375,7 @@ def test_post_submission_json_without_submission_key(self): auth = DigestAuth('bob', 'bobbob') request.META.update(auth(request.META, response)) response = self.view(request) - self.assertContains( - response, 'No submission key provided.', status_code=400 - ) + self.assertContains(response, 'No submission key provided.', status_code=400) def test_submission_blocking_flag(self): # Set 'submissions_suspended' True in the profile metadata to test if diff --git a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py index 8f5210063b..ad9b7a2c3a 100644 --- a/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py +++ b/kobo/apps/openrosa/apps/api/tests/viewsets/test_xform_viewset.py @@ -147,7 +147,7 @@ def test_form_format(self): }) formid = self.xform.pk data = { - 'name': 'transportation_2011_07_25', # Since commit 3c0e17d0b6041ae96b06c3ef4d2f78a2d0739cbc # flake8: noqa + 'name': 'transportation_2011_07_25', # Since commit 3c0e17d0b6041ae96b06c3ef4d2f78a2d0739cbc # noqa: E501 'title': 'transportation_2011_07_25', 'default_language': 'default', 'id_string': 'transportation_2011_07_25', @@ -239,8 +239,7 @@ def test_form_tags(self): self.assertEqual(response.data, []) # remove tag "hello" - request = self.factory.delete('/', data={'tags': 'hello'}, - **self.extra) + request = self.factory.delete('/', data={'tags': 'hello'}, **self.extra) response = view(request, pk=formid, label='hello') self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data, []) diff --git a/kobo/apps/openrosa/apps/api/tools.py b/kobo/apps/openrosa/apps/api/tools.py index 2a2a049a81..e19c9dd7fb 100644 --- a/kobo/apps/openrosa/apps/api/tools.py +++ b/kobo/apps/openrosa/apps/api/tools.py @@ -6,11 +6,7 @@ import rest_framework.views as rest_framework_views from django import forms from django.conf import settings -from django.http import ( - HttpResponse, - HttpResponseNotFound, - HttpResponseRedirect, -) +from django.http import HttpResponse, HttpResponseNotFound, HttpResponseRedirect from django.urls import Resolver404, resolve from django.utils.translation import gettext as t from rest_framework import exceptions @@ -33,9 +29,7 @@ from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) -from kpi.views.v2.paired_data import ( - OpenRosaDynamicDataAttachmentViewset, -) +from kpi.views.v2.paired_data import OpenRosaDynamicDataAttachmentViewset DECIMAL_PRECISION = 2 @@ -53,8 +47,11 @@ def _get_id_for_type(record, mongo_field): date_field = datetime_from_str(record[mongo_field]) mongo_str = '$' + mongo_field - return {'$substr': [mongo_str, 0, 10]} if isinstance(date_field, datetime)\ + return ( + {'$substr': [mongo_str, 0, 10]} + if isinstance(date_field, datetime) else mongo_str + ) def publish_xlsform(request, user, existing_xform=None): @@ -69,17 +66,25 @@ def publish_xlsform(request, user, existing_xform=None): ) ): raise exceptions.PermissionDenied( - detail=t('User %(user)s has no permission to add xforms to ' - 'account %(account)s' % {'user': request.user.username, - 'account': user.username})) + detail=t( + 'User %(user)s has no permission to add xforms to ' + 'account %(account)s' + % {'user': request.user.username, 'account': user.username} + ) + ) if ( existing_xform and not request.user.is_superuser and not request.user.has_perm('change_xform', existing_xform) ): raise exceptions.PermissionDenied( - detail=t('User %(user)s has no permission to change this ' - 'form.' % {'user': request.user.username, }) + detail=t( + 'User %(user)s has no permission to change this ' + 'form.' + % { + 'user': request.user.username, + } + ) ) def set_form(): @@ -102,8 +107,9 @@ def get_xform(formid, request, username=None): xform = check_and_set_form_by_id(int(formid), request) if not xform: - raise exceptions.PermissionDenied(t( - 'You do not have permission to view data from this form.')) + raise exceptions.PermissionDenied( + t('You do not have permission to view data from this form.') + ) return xform diff --git a/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py index 7ba1c7f12b..6cba77226c 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/connect_viewset.py @@ -9,7 +9,6 @@ UserProfileWithTokenSerializer, ) from kpi.utils.object_permission import get_database_user - from ..utils.rest_framework.viewsets import OpenRosaGenericViewSet diff --git a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py index 7ce1d73f9d..6ecda8a1b8 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/data_viewset.py @@ -11,29 +11,21 @@ from rest_framework.serializers import ValidationError from rest_framework.settings import api_settings -from kobo.apps.openrosa.apps.api.exceptions import ( - NoConfirmationProvidedAPIException, -) +from kobo.apps.openrosa.apps.api.exceptions import NoConfirmationProvidedAPIException from kobo.apps.openrosa.apps.api.permissions import ( EnketoSubmissionEditPermissions, EnketoSubmissionViewPermissions, XFormDataPermissions, ) -from kobo.apps.openrosa.apps.api.tools import ( - add_tags_to_instance, -) -from kobo.apps.openrosa.apps.api.viewsets.xform_viewset import ( - custom_response_handler, -) +from kobo.apps.openrosa.apps.api.tools import add_tags_to_instance +from kobo.apps.openrosa.apps.api.viewsets.xform_viewset import custom_response_handler from kobo.apps.openrosa.apps.logger.exceptions import ( BuildDbQueriesAttributeError, BuildDbQueriesBadArgumentError, BuildDbQueriesNoConfirmationProvidedError, MissingValidationStatusPayloadError, ) -from kobo.apps.openrosa.apps.logger.models.instance import ( - Instance, -) +from kobo.apps.openrosa.apps.logger.models.instance import Instance from kobo.apps.openrosa.apps.logger.models.xform import XForm from kobo.apps.openrosa.apps.logger.utils.instance import ( add_validation_status_to_instance, @@ -56,7 +48,6 @@ get_enketo_submission_url, ) from kpi.utils.object_permission import get_database_user - from ..utils.rest_framework.viewsets import OpenRosaModelViewSet SAFE_METHODS = ['GET', 'HEAD', 'OPTIONS'] @@ -412,9 +403,9 @@ def bulk_delete(self, request, *args, **kwargs): try: deleted_records_count = delete_instances(xform, request.data) except BuildDbQueriesBadArgumentError: - raise ValidationError({ - 'payload': t("`query` and `instance_ids` can't be used together") - }) + raise ValidationError( + {'payload': t("`query` and `instance_ids` can't be used together")} + ) except BuildDbQueriesAttributeError: raise ValidationError( {'payload': t('Invalid `query` or `submission_ids` params')} @@ -441,9 +432,9 @@ def bulk_validation_status(self, request, *args, **kwargs): xform, request.data, real_user.username ) except BuildDbQueriesBadArgumentError: - raise ValidationError({ - 'payload': t("`query` and `instance_ids` can't be used together") - }) + raise ValidationError( + {'payload': t("`query` and `instance_ids` can't be used together")} + ) except BuildDbQueriesAttributeError: raise ValidationError( {'payload': t('Invalid `query` or `submission_ids` params')} @@ -497,8 +488,7 @@ def get_object(self) -> Union[XForm, Instance]: try: int(dataid) except ValueError: - raise ParseError(t('Invalid dataid `%(dataid)s`' - % {'dataid': dataid})) + raise ParseError(t('Invalid dataid `%(dataid)s`' % {'dataid': dataid})) return get_object_or_404(Instance, pk=dataid, xform__pk=pk) @@ -552,11 +542,8 @@ def validation_status(self, request, *args, **kwargs): if request.method != 'GET': username = get_database_user(request.user).username validation_status_uid = request.data.get('validation_status.uid') - if ( - request.method == 'PATCH' - and not add_validation_status_to_instance( - username, validation_status_uid, instance - ) + if request.method == 'PATCH' and not add_validation_status_to_instance( + username, validation_status_uid, instance ): http_status = status.HTTP_400_BAD_REQUEST elif request.method == 'DELETE': diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py b/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py index a2f57c0d06..f425451b89 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_list_api.py @@ -28,7 +28,6 @@ XFormManifestSerializer, ) from kpi.authentication import DigestAuthentication - from ..utils.rest_framework.viewsets import OpenRosaReadOnlyModelViewSet @@ -224,9 +223,7 @@ def _is_metadata_expired(obj: MetaData, request: Request) -> bool: # default anymore and the `update()` method is itself # atomic since it does not reference any value previously read # from the database. Is that enough? - MetaData.objects.filter(pk=obj.pk).update( - date_modified=timezone.now() - ) + MetaData.objects.filter(pk=obj.pk).update(date_modified=timezone.now()) return True return False diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py b/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py index aed4a60467..d23a5fd236 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_submission_api.py @@ -28,7 +28,6 @@ from kobo.apps.openrosa.libs.utils.string import dict_lists2strings from kpi.authentication import DigestAuthentication from kpi.utils.object_permission import get_database_user - from ..utils.rest_framework.viewsets import OpenRosaGenericViewSet xml_error_re = re.compile('>(.*)<') diff --git a/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py b/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py index 4f785abae6..a4bf7a64e2 100644 --- a/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py +++ b/kobo/apps/openrosa/apps/api/viewsets/xform_viewset.py @@ -38,7 +38,6 @@ default_kobocat_storage as default_storage, ) from kpi.utils.object_permission import get_database_user - from ..utils.rest_framework.viewsets import OpenRosaModelViewSet EXPORT_EXT = { @@ -125,9 +124,7 @@ def _get_owner(request): owner = _get_user(owner) if owner is None: - raise ValidationError( - 'User with username %(owner)s does not exist.' - ) + raise ValidationError('User with username %(owner)s does not exist.') return owner diff --git a/kobo/apps/openrosa/apps/logger/models/attachment.py b/kobo/apps/openrosa/apps/logger/models/attachment.py index 80c38f2c24..35fb3fb256 100644 --- a/kobo/apps/openrosa/apps/logger/models/attachment.py +++ b/kobo/apps/openrosa/apps/logger/models/attachment.py @@ -9,19 +9,13 @@ from django.utils.http import urlencode from kobo.apps.openrosa.libs.utils.hash import get_hash -from kobo.apps.openrosa.libs.utils.image_tools import ( - get_optimized_image_path, - resize, -) -from kpi.deployment_backends.kc_access.storage import ( - KobocatFileSystemStorage, -) +from kobo.apps.openrosa.libs.utils.image_tools import get_optimized_image_path, resize +from kpi.deployment_backends.kc_access.storage import KobocatFileSystemStorage from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) from kpi.fields.file import ExtendedFileField from kpi.mixins.audio_transcoding import AudioTranscodingMixin - from .instance import Instance @@ -149,19 +143,15 @@ def protected_path( # (out of ASCII character set) and must be encoded to let NGINX serve # them if optimized_image_path: - attachment_file_path = default_storage.path( - optimized_image_path - ) - protected_url = urlquote(attachment_file_path.replace( - settings.KOBOCAT_MEDIA_ROOT, '/protected') + attachment_file_path = default_storage.path(optimized_image_path) + protected_url = urlquote( + attachment_file_path.replace(settings.KOBOCAT_MEDIA_ROOT, '/protected') ) else: # Double-encode the S3 URL to take advantage of NGINX's # otherwise troublesome automatic decoding if optimized_image_path: - attachment_file_path = default_storage.url( - optimized_image_path - ) + attachment_file_path = default_storage.url(optimized_image_path) protected_url = f'/protected-s3/{urlquote(attachment_file_path)}' return protected_url diff --git a/kobo/apps/openrosa/apps/logger/models/instance.py b/kobo/apps/openrosa/apps/logger/models/instance.py index 640507046c..6a6737078a 100644 --- a/kobo/apps/openrosa/apps/logger/models/instance.py +++ b/kobo/apps/openrosa/apps/logger/models/instance.py @@ -130,7 +130,9 @@ def check_active(self, force): if self.xform and not self.xform.downloadable: raise FormInactiveError() - UserProfile = apps.get_model('main', 'UserProfile') # noqa - Avoid circular imports + UserProfile = apps.get_model( + 'main', 'UserProfile' + ) # noqa - Avoid circular imports profile, created = UserProfile.objects.get_or_create(user=self.xform.user) if not created and profile.metadata.get('submissions_suspended', False): raise TemporarilyUnavailableError() diff --git a/kobo/apps/openrosa/apps/logger/models/xform.py b/kobo/apps/openrosa/apps/logger/models/xform.py index f18e976f03..f366ce156d 100644 --- a/kobo/apps/openrosa/apps/logger/models/xform.py +++ b/kobo/apps/openrosa/apps/logger/models/xform.py @@ -56,9 +56,7 @@ class XForm(AbstractTimeStampedModel): CLONED_SUFFIX = '_cloned' MAX_ID_LENGTH = 100 - xls = ExtendedFileField( - storage=default_storage, upload_to=upload_to, null=True - ) + xls = ExtendedFileField(storage=default_storage, upload_to=upload_to, null=True) json = models.TextField(default='') description = models.TextField(default='', null=True) xml = models.TextField() @@ -74,9 +72,7 @@ class XForm(AbstractTimeStampedModel): encrypted = models.BooleanField(default=False) id_string = models.SlugField( - editable=False, - verbose_name=t('ID'), - max_length=MAX_ID_LENGTH + editable=False, verbose_name=t('ID'), max_length=MAX_ID_LENGTH ) title = models.CharField(editable=False, max_length=XFORM_TITLE_LENGTH) last_submission_time = models.DateTimeField(blank=True, null=True) @@ -194,8 +190,7 @@ def _set_title(self): if self.title and title_xml != self.title: title_xml = self.title[:XFORM_TITLE_LENGTH] title_xml = xml_escape(title_xml) - self.xml = title_pattern.sub( - '%s' % title_xml, self.xml) + self.xml = title_pattern.sub('%s' % title_xml, self.xml) self.title = title_xml @@ -228,10 +223,15 @@ def save(self, *args, **kwargs): "the existing forms' id_string '%(old_id)s'." % {'new_id': self.id_string, 'old_id': old_id_string})) - if getattr(settings, 'STRICT', True) and \ - not re.search(r'^[\w-]+$', self.id_string): - raise XLSFormError(t('In strict mode, the XForm ID must be a ' - 'valid slug and contain no spaces.')) + if getattr(settings, 'STRICT', True) and not re.search( + r'^[\w-]+$', self.id_string + ): + raise XLSFormError( + t( + 'In strict mode, the XForm ID must be a ' + 'valid slug and contain no spaces.' + ) + ) super().save(*args, **kwargs) @@ -244,6 +244,7 @@ def submission_count(self, force_update=False): self.num_of_submissions = count self.save(update_fields=['num_of_submissions']) return self.num_of_submissions + submission_count.short_description = t('Submission Count') def geocoded_submission_count(self): diff --git a/kobo/apps/openrosa/apps/logger/signals.py b/kobo/apps/openrosa/apps/logger/signals.py index 64c2884e68..16d228f4ca 100644 --- a/kobo/apps/openrosa/apps/logger/signals.py +++ b/kobo/apps/openrosa/apps/logger/signals.py @@ -5,11 +5,7 @@ from django.core.exceptions import ObjectDoesNotExist from django.db import transaction from django.db.models import Case, F, When -from django.db.models.signals import ( - post_delete, - post_save, - pre_delete, -) +from django.db.models.signals import post_delete, post_save, pre_delete from django.dispatch import receiver from kobo.apps.kobo_auth.shortcuts import User diff --git a/kobo/apps/openrosa/apps/logger/tests/test_parsing.py b/kobo/apps/openrosa/apps/logger/tests/test_parsing.py index f322f9bcc4..f5cd23dbfa 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_parsing.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_parsing.py @@ -28,15 +28,14 @@ def _publish_and_submit_new_repeats(self): # publish our form which contains some some repeats xls_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), - '../fixtures/new_repeats/new_repeats.xls' + '../fixtures/new_repeats/new_repeats.xls', ) self._publish_xls_file_and_set_xform(xls_file_path) # submit an instance xml_submission_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), - '../fixtures/new_repeats/instances/' - 'new_repeats_2012-07-05-14-33-53.xml' + '../fixtures/new_repeats/instances/' 'new_repeats_2012-07-05-14-33-53.xml', ) self._make_submission(xml_submission_file_path) @@ -91,13 +90,15 @@ def test_parse_xform_nested_repeats(self): self.assertEqual(flat_dict, expected_flat_dict) def test_xpath_from_xml_node(self): - xml_str = "' \ - 'c911d71ce1ac48478e5f8bac99addc4e' \ - '-1.2625149 36.7924478 0.0 30.0' \ - 'Yo' \ - '-1.2625072 36.7924328 0.0 30.0' \ - 'What' + xml_str = ( + "' + 'c911d71ce1ac48478e5f8bac99addc4e' + '-1.2625149 36.7924478 0.0 30.0' + 'Yo' + '-1.2625072 36.7924328 0.0 30.0' + 'What' + ) clean_xml_str = xml_str.strip() clean_xml_str = re.sub(r'>\s+<', '><', clean_xml_str) root_node = minidom.parseString(clean_xml_str).documentElement @@ -113,22 +114,33 @@ def test_xpath_from_xml_node(self): def test_get_meta_from_xml(self): with open( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'tutorial', - 'instances', 'tutorial_2012-06-27_11-27-53_w_uuid_edited.xml'), - 'r') as xml_file: + os.path.dirname(__file__), + '..', + 'fixtures', + 'tutorial', + 'instances', + 'tutorial_2012-06-27_11-27-53_w_uuid_edited.xml', + ), + 'r', + ) as xml_file: xml_str = xml_file.read() instanceID = get_meta_from_xml(xml_str, 'instanceID') - self.assertEqual(instanceID, - 'uuid:2d8c59eb-94e9-485d-a679-b28ffe2e9b98') + self.assertEqual(instanceID, 'uuid:2d8c59eb-94e9-485d-a679-b28ffe2e9b98') deprecatedID = get_meta_from_xml(xml_str, 'deprecatedID') self.assertEqual(deprecatedID, 'uuid:729f173c688e482486a48661700455ff') def test_get_meta_from_xml_without_uuid_returns_none(self): with open( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'tutorial', - 'instances', 'tutorial_2012-06-27_11-27-53.xml'), - 'r') as xml_file: + os.path.dirname(__file__), + '..', + 'fixtures', + 'tutorial', + 'instances', + 'tutorial_2012-06-27_11-27-53.xml', + ), + 'r', + ) as xml_file: xml_str = xml_file.read() instanceID = get_meta_from_xml(xml_str, 'instanceID') self.assertIsNone(instanceID) @@ -136,9 +148,15 @@ def test_get_meta_from_xml_without_uuid_returns_none(self): def test_get_uuid_from_xml(self): with open( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'tutorial', - 'instances', 'tutorial_2012-06-27_11-27-53_w_uuid.xml'), - 'r') as xml_file: + os.path.dirname(__file__), + '..', + 'fixtures', + 'tutorial', + 'instances', + 'tutorial_2012-06-27_11-27-53_w_uuid.xml', + ), + 'r', + ) as xml_file: xml_str = xml_file.read() instanceID = get_uuid_from_xml(xml_str) self.assertEqual(instanceID, '729f173c688e482486a48661700455ff') @@ -146,9 +164,15 @@ def test_get_uuid_from_xml(self): def test_get_deprecated_uuid_from_xml(self): with open( os.path.join( - os.path.dirname(__file__), '..', 'fixtures', 'tutorial', - 'instances', 'tutorial_2012-06-27_11-27-53_w_uuid_edited.xml'), - 'r') as xml_file: + os.path.dirname(__file__), + '..', + 'fixtures', + 'tutorial', + 'instances', + 'tutorial_2012-06-27_11-27-53_w_uuid_edited.xml', + ), + 'r', + ) as xml_file: xml_str = xml_file.read() deprecatedID = get_deprecated_uuid_from_xml(xml_str) self.assertEqual(deprecatedID, '729f173c688e482486a48661700455ff') @@ -158,26 +182,25 @@ def test_parse_xform_nested_repeats_multiple_nodes(self): # publish our form which contains some some repeats xls_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), - '../fixtures/new_repeats/new_repeats.xls' + '../fixtures/new_repeats/new_repeats.xls', ) self._publish_xls_file_and_set_xform(xls_file_path) # submit an instance xml_submission_file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), - '../fixtures/new_repeats/instances/' - 'multiple_nodes_error.xml' + '../fixtures/new_repeats/instances/' 'multiple_nodes_error.xml', ) self._make_submission(xml_submission_file_path) def test_xml_repeated_group_to_dict(self): xml_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), - '../fixtures/repeated_group/repeated_group.xml' + '../fixtures/repeated_group/repeated_group.xml', ) json_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), - '../fixtures/repeated_group/repeated_group.json' + '../fixtures/repeated_group/repeated_group.json', ) with open(xml_file) as file: dict_ = _xml_node_to_dict(clean_and_parse_xml(file.read())) diff --git a/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py b/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py index 82805834f4..ad15f2ce8d 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_publish_xls.py @@ -18,8 +18,8 @@ class TestPublishXLS(TestBase): def test_publish_xls(self): xls_file_path = os.path.join( - self.this_directory, 'fixtures', - 'transportation', 'transportation.xls') + self.this_directory, 'fixtures', 'transportation', 'transportation.xls' + ) count = XForm.objects.count() call_command('publish_xls', xls_file_path, self.user.username) self.assertEqual(XForm.objects.count(), count + 1) @@ -29,14 +29,17 @@ def test_publish_xls(self): def test_publish_xls_replacement(self): count = XForm.objects.count() xls_file_path = os.path.join( - self.this_directory, 'fixtures', - 'transportation', 'transportation.xls') + self.this_directory, 'fixtures', 'transportation', 'transportation.xls' + ) call_command('publish_xls', xls_file_path, self.user.username) self.assertEqual(XForm.objects.count(), count + 1) count = XForm.objects.count() xls_file_path = os.path.join( - self.this_directory, 'fixtures', - 'transportation', 'transportation_updated.xls') + self.this_directory, + 'fixtures', + 'transportation', + 'transportation_updated.xls', + ) # call command without replace param with self.assertRaises(CommandError): call_command('publish_xls', xls_file_path, self.user.username) @@ -55,11 +58,14 @@ def test_publish_xls_replacement(self): @unittest.skip('Fails under Django 1.6') def test_line_break_in_variables(self): xls_file_path = os.path.join( - self.this_directory, 'fixtures', 'exp_line_break.xlsx') + self.this_directory, 'fixtures', 'exp_line_break.xlsx' + ) xml_file_path = os.path.join( - self.this_directory, 'fixtures', 'exp_line_break.xml') + self.this_directory, 'fixtures', 'exp_line_break.xml' + ) test_xml_file_path = os.path.join( - self.this_directory, 'fixtures', 'test_exp_line_break.xml') + self.this_directory, 'fixtures', 'test_exp_line_break.xml' + ) self._publish_xls_file(xls_file_path) xforms = XForm.objects.filter(id_string='exp_line_break') self.assertTrue(xforms.count() > 0) @@ -83,8 +89,9 @@ def test_report_exception_with_exc_info(self): except Exception as e: exc_info = sys.exc_info() try: - report_exception(subject='Test report exception', info=e, - exc_info=exc_info) + report_exception( + subject='Test report exception', info=e, exc_info=exc_info + ) except Exception as e: raise AssertionError('%s' % e) @@ -124,9 +131,11 @@ def test_publish_invalid_xls_form_no_choices(self): with pytest.raises(PyXFormError) as e: self._publish_xls_file(path) + # Intermediate variable `columns` is just here to lure linter about Q000 + columns = "'list_name', 'name', and 'label'" error_msg = ( - "There should be a choices sheet in this xlsform. " - "Please ensure that the choices sheet has the mandatory " - "columns 'list_name', 'name', and 'label'." + f'There should be a choices sheet in this xlsform. ' + f'Please ensure that the choices sheet has the mandatory ' + f'columns {columns}.' ) assert error_msg in str(e) diff --git a/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py b/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py index c9a54958e9..371ee04f30 100644 --- a/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py +++ b/kobo/apps/openrosa/apps/logger/tests/test_simple_submission.py @@ -50,8 +50,7 @@ def _submit_simple_yes(self): '/yesno>'), []) def setUp(self): - self.user = User.objects.create( - username='admin', email='sample@example.com') + self.user = User.objects.create(username='admin', email='sample@example.com') self.user.set_password('pass') UserProfile.objects.get_or_create(user=self.user) diff --git a/kobo/apps/openrosa/apps/logger/utils/counters.py b/kobo/apps/openrosa/apps/logger/utils/counters.py index c76dbf42d4..c75e8f65a3 100644 --- a/kobo/apps/openrosa/apps/logger/utils/counters.py +++ b/kobo/apps/openrosa/apps/logger/utils/counters.py @@ -1,11 +1,13 @@ - def delete_null_user_daily_counters(apps, *args): """ - Find any DailyXFormCounters without a user, assign them to a user if we can, otherwise delete them + Find any DailyXFormCounters without a user, assign them to a user if we can, + otherwise delete them. This function is reused between two migrations, logger.0030 and logger.0031. If/when those migrations get squashed, please delete this function """ - DailyXFormSubmissionCounter = apps.get_model('logger', 'DailyXFormSubmissionCounter') # noqa + DailyXFormSubmissionCounter = apps.get_model( + 'logger', 'DailyXFormSubmissionCounter' + ) # noqa counters_without_users = DailyXFormSubmissionCounter.objects.filter(user=None) @@ -21,10 +23,15 @@ def delete_null_user_daily_counters(apps, *args): .iterator(chunk_size=batch_size) ): counter.user = counter.xform.user - # don't add a user to duplicate counters, so they get deleted when we're done looping - if DailyXFormSubmissionCounter.objects.filter( + # don't add a user to duplicate counters, so they get deleted when we're + # done looping + if ( + DailyXFormSubmissionCounter.objects.filter( date=counter.date, xform=counter.xform - ).exclude(user=None).exists(): + ) + .exclude(user=None) + .exists() + ): continue batch.append(counter) if len(batch) >= batch_size: diff --git a/kobo/apps/openrosa/apps/logger/utils/database_query.py b/kobo/apps/openrosa/apps/logger/utils/database_query.py index c47dbae4ac..0e3be5393b 100644 --- a/kobo/apps/openrosa/apps/logger/utils/database_query.py +++ b/kobo/apps/openrosa/apps/logger/utils/database_query.py @@ -3,7 +3,6 @@ import json from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance - from ..exceptions import ( BuildDbQueriesAttributeError, BuildDbQueriesBadArgumentError, @@ -13,7 +12,6 @@ def build_db_queries(xform: XForm, request_data: dict) -> tuple[dict, dict]: - """ Gets instance ids based on the request payload. Useful to narrow down set of instances for bulk actions @@ -29,15 +27,11 @@ def build_db_queries(xform: XForm, request_data: dict) -> tuple[dict, dict]: """ - mongo_query = ParsedInstance.get_base_query( - xform.user.username, xform.id_string - ) + mongo_query = ParsedInstance.get_base_query(xform.user.username, xform.id_string) postgres_query = {'xform_id': xform.id} instance_ids = None # Remove empty values - payload = { - key_: value_ for key_, value_ in request_data.items() if value_ - } + payload = {key_: value_ for key_, value_ in request_data.items() if value_} ################################################### # Submissions can be retrieve in 3 different ways # ################################################### @@ -58,10 +52,7 @@ def build_db_queries(xform: XForm, request_data: dict) -> tuple[dict, dict]: except AttributeError: raise BuildDbQueriesAttributeError - query_kwargs = { - 'query': json.dumps(query), - 'fields': '["_id"]' - } + query_kwargs = {'query': json.dumps(query), 'fields': '["_id"]'} cursor = ParsedInstance.query_mongo_no_paging(**query_kwargs) instance_ids = [record.get('_id') for record in list(cursor)] @@ -74,8 +65,7 @@ def build_db_queries(xform: XForm, request_data: dict) -> tuple[dict, dict]: else: try: # Use int() to test if list of integers is valid. - instance_ids = [int(submission_id) - for submission_id in submission_ids] + instance_ids = [int(submission_id) for submission_id in submission_ids] except ValueError: raise BuildDbQueriesAttributeError diff --git a/kobo/apps/openrosa/apps/logger/utils/instance.py b/kobo/apps/openrosa/apps/logger/utils/instance.py index 1c6ec30b9c..0d2ab73698 100644 --- a/kobo/apps/openrosa/apps/logger/utils/instance.py +++ b/kobo/apps/openrosa/apps/logger/utils/instance.py @@ -10,7 +10,6 @@ ) from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance from kobo.apps.openrosa.apps.viewer.signals import remove_from_mongo - from ..exceptions import MissingValidationStatusPayloadError from ..models.instance import Instance from ..models.xform import XForm @@ -29,9 +28,7 @@ def add_validation_status_to_instance( # Payload must contain validation_status property. if validation_status_uid: - validation_status = get_validation_status( - validation_status_uid, username - ) + validation_status = get_validation_status(validation_status_uid, username) if validation_status: instance.validation_status = validation_status instance.save(update_fields=['validation_status']) @@ -48,11 +45,13 @@ def delete_instances(xform: XForm, request_data: dict) -> int: # Disconnect signals to speed-up bulk deletion pre_delete.disconnect(remove_from_mongo, sender=ParsedInstance) post_delete.disconnect( - nullify_exports_time_of_last_submission, sender=Instance, + nullify_exports_time_of_last_submission, + sender=Instance, dispatch_uid='nullify_exports_time_of_last_submission', ) post_delete.disconnect( - update_xform_submission_count_delete, sender=Instance, + update_xform_submission_count_delete, + sender=Instance, dispatch_uid='update_xform_submission_count_delete', ) @@ -72,9 +71,7 @@ def delete_instances(xform: XForm, request_data: dict) -> int: # Update xform like signals would do if it was as single object deletion nullify_exports_time_of_last_submission(sender=Instance, instance=xform) update_xform_submission_count_delete( - sender=Instance, - instance=xform, - value=deleted_records_count + sender=Instance, instance=xform, value=deleted_records_count ) finally: # Pre_delete signal needs to be re-enabled for parsed instance @@ -129,10 +126,8 @@ def set_instance_validation_statuses( postgres_query, mongo_query = build_db_queries(xform, request_data) # Update Postgres & Mongo - updated_records_count = Instance.objects.filter( - **postgres_query - ).update(validation_status=new_validation_status) - ParsedInstance.bulk_update_validation_statuses( - mongo_query, new_validation_status + updated_records_count = Instance.objects.filter(**postgres_query).update( + validation_status=new_validation_status ) + ParsedInstance.bulk_update_validation_statuses(mongo_query, new_validation_status) return updated_records_count diff --git a/kobo/apps/openrosa/apps/logger/xform_instance_parser.py b/kobo/apps/openrosa/apps/logger/xform_instance_parser.py index 1deea34511..afcda3e4da 100644 --- a/kobo/apps/openrosa/apps/logger/xform_instance_parser.py +++ b/kobo/apps/openrosa/apps/logger/xform_instance_parser.py @@ -52,10 +52,12 @@ def get_meta_from_xml(xml_str, meta_name): if children.length == 0: raise ValueError(t('XML string must have a survey element.')) survey_node = children[0] - meta_tags = [n for n in survey_node.childNodes if - n.nodeType == Node.ELEMENT_NODE and - (n.tagName.lower() == 'meta' or - n.tagName.lower() == 'orx:meta')] + meta_tags = [ + n + for n in survey_node.childNodes + if n.nodeType == Node.ELEMENT_NODE + and (n.tagName.lower() == 'meta' or n.tagName.lower() == 'orx:meta') + ] if len(meta_tags) == 0: return None @@ -80,6 +82,7 @@ def _uuid_only(uuid, regex): if matches and len(matches.groups()) > 0: return matches.groups()[0] return None + uuid = get_meta_from_xml(xml, 'instanceID') regex = re.compile(r'uuid:(.*)') if uuid: @@ -299,8 +302,10 @@ def __init__(self, xml_str, data_dictionary): def parse(self, xml_str): self._xml_obj = clean_and_parse_xml(xml_str) self._root_node = self._xml_obj.documentElement - repeats = [e.get_abbreviated_xpath() - for e in self.dd.get_survey_elements_of_type('repeat')] + repeats = [ + e.get_abbreviated_xpath() + for e in self.dd.get_survey_elements_of_type('repeat') + ] self._dict = _xml_node_to_dict(self._root_node, repeats) if self._dict is None: raise InstanceEmptyError diff --git a/kobo/apps/openrosa/apps/main/models/user_profile.py b/kobo/apps/openrosa/apps/main/models/user_profile.py index 5eb49da58a..934a1651fb 100644 --- a/kobo/apps/openrosa/apps/main/models/user_profile.py +++ b/kobo/apps/openrosa/apps/main/models/user_profile.py @@ -111,9 +111,7 @@ def set_password_details( """ user_profile, created = cls.objects.get_or_create(user_id=user_id) user_profile.validated_password = validated - user_profile.save( - update_fields=['validated_password'] - ) + user_profile.save(update_fields=['validated_password']) # TODO, remove this in favor of `kpi.utils.object_permission.get_anonymous_user()` diff --git a/kobo/apps/openrosa/apps/main/tests/test_base.py b/kobo/apps/openrosa/apps/main/tests/test_base.py index 679e4fa332..89246ed523 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_base.py +++ b/kobo/apps/openrosa/apps/main/tests/test_base.py @@ -11,9 +11,9 @@ from django.test import TestCase from django.test.client import Client from django.utils import timezone +from django_digest.test import Client as DigestClient from rest_framework.test import APIRequestFactory -from django_digest.test import Client as DigestClient from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.logger.models import Attachment, XForm from kobo.apps.openrosa.apps.main.models import UserProfile @@ -21,9 +21,7 @@ MakeSubmissionMixin, ) from kobo.apps.openrosa.libs.tests.mixins.request_mixin import RequestMixin -from kobo.apps.openrosa.libs.utils.logger_tools import ( - publish_xls_form, -) +from kobo.apps.openrosa.libs.utils.logger_tools import publish_xls_form from kobo.apps.openrosa.libs.utils.string import base64_encodestring diff --git a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py index 46873a27bc..00cb7f0200 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py +++ b/kobo/apps/openrosa/apps/main/tests/test_past_bugs.py @@ -5,7 +5,6 @@ from pyxform.errors import PyXFormError from kobo.apps.openrosa.apps.logger.models import Instance, XForm - from .test_base import TestBase @@ -20,17 +19,16 @@ def test_uniqueness_of_group_names_enforced(self): self._create_user_and_login() with pytest.raises(PyXFormError) as e: - self._publish_xls_file( - 'fixtures/group_names_must_be_unique.xls' + self._publish_xls_file('fixtures/group_names_must_be_unique.xls') + assert ( + 'The name "group_names_must_be_unique" is the same as the form name' + in str(e) ) - assert 'The name "group_names_must_be_unique" is the same as the form name' in str(e) assert XForm.objects.count() == pre_count def test_mch(self): - self._publish_xls_file( - os.path.join('fixtures/bug_fixes/MCH_v1.xls') - ) + self._publish_xls_file(os.path.join('fixtures/bug_fixes/MCH_v1.xls')) def test_buggy_files(self): message = 'Duplicate column header: label' diff --git a/kobo/apps/openrosa/apps/main/tests/test_process.py b/kobo/apps/openrosa/apps/main/tests/test_process.py index eba5974323..ae77d659e3 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_process.py +++ b/kobo/apps/openrosa/apps/main/tests/test_process.py @@ -11,8 +11,8 @@ from django.conf import settings from django.core.files.uploadedfile import UploadedFile from django.urls import reverse - from django_digest.test import Client as DigestClient + from kobo.apps.openrosa.apps.logger.models import XForm from kobo.apps.openrosa.apps.logger.models.xform import XFORM_TITLE_LENGTH from kobo.apps.openrosa.apps.logger.xform_instance_parser import clean_and_parse_xml @@ -20,7 +20,6 @@ from kobo.apps.openrosa.apps.viewer.models.data_dictionary import DataDictionary from kobo.apps.openrosa.libs.utils.common_tags import SUBMISSION_TIME, UUID from kobo.apps.openrosa.libs.utils.hash import get_hash - from .test_base import TestBase uuid_regex = re.compile( @@ -135,7 +134,7 @@ def _check_formList(self): % (self.user.username, self.xform.pk) md5_hash = get_hash(self.xform.xml) expected_content = """ -transportation_2011_07_25transportation_2011_07_25md5:%(hash)stransportation_2011_07_25%(download_url)s%(manifest_url)s""" # noqa +transportation_2011_07_25transportation_2011_07_25md5:%(hash)stransportation_2011_07_25%(download_url)s%(manifest_url)s""" # noqa: E501 expected_content = expected_content % { 'download_url': self.download_url, 'manifest_url': self.manifest_url, @@ -151,22 +150,25 @@ def _download_xform(self): response = client.get(self.download_url) response_doc = minidom.parseString(response.content) - xml_path = os.path.join(self.this_directory, 'fixtures', - 'transportation', 'transportation.xml') + xml_path = os.path.join( + self.this_directory, 'fixtures', 'transportation', 'transportation.xml' + ) with open(xml_path) as xml_file: expected_doc = minidom.parse(xml_file) model_node = [ - n for n in - response_doc.getElementsByTagName('h:head')[0].childNodes - if n.nodeType == Node.ELEMENT_NODE and - n.tagName == 'model'][0] + n + for n in response_doc.getElementsByTagName('h:head')[0].childNodes + if n.nodeType == Node.ELEMENT_NODE and n.tagName == 'model' + ][0] # check for UUID and remove - uuid_nodes = [node for node in model_node.childNodes - if node.nodeType == Node.ELEMENT_NODE and - node.getAttribute('nodeset') == - '/transportation/formhub/uuid'] + uuid_nodes = [ + node + for node in model_node.childNodes + if node.nodeType == Node.ELEMENT_NODE + and node.getAttribute('nodeset') == '/transportation/formhub/uuid' + ] self.assertEqual(len(uuid_nodes), 1) uuid_node = uuid_nodes[0] uuid_node.setAttribute('calculate', "''") @@ -186,8 +188,11 @@ def _check_data_dictionary(self): qs = DataDictionary.objects.filter(user=self.user) self.assertEqual(qs.count(), 1) self.data_dictionary = DataDictionary.objects.all()[0] - with open(os.path.join(self.this_directory, 'fixtures', - 'transportation', 'headers.json')) as f: + with open( + os.path.join( + self.this_directory, 'fixtures', 'transportation', 'headers.json' + ) + ) as f: expected_list = json.load(f) self.assertEqual(self.data_dictionary.get_headers(), expected_list) @@ -198,25 +203,24 @@ def _check_data_dictionary(self): def _check_data_for_csv_export(self): data = [ - {'available_transportation_types_to_referral_facility/ambulance': - True, - 'available_transportation_types_to_referral_facility/bicycle': - True, - self.ambulance_key: 'daily', - self.bicycle_key: 'weekly' - }, + { + 'available_transportation_types_to_referral_facility/ambulance': True, + 'available_transportation_types_to_referral_facility/bicycle': True, + self.ambulance_key: 'daily', + self.bicycle_key: 'weekly', + }, {}, - {'available_transportation_types_to_referral_facility/ambulance': - True, - self.ambulance_key: 'weekly', - }, - {'available_transportation_types_to_referral_facility/taxi': True, - 'available_transportation_types_to_referral_facility/other': True, - 'available_transportation_types_to_referral_facility_other': - 'camel', - self.taxi_key: 'daily', - self.other_key: 'other', - } + { + 'available_transportation_types_to_referral_facility/ambulance': True, + self.ambulance_key: 'weekly', + }, + { + 'available_transportation_types_to_referral_facility/taxi': True, + 'available_transportation_types_to_referral_facility/other': True, + 'available_transportation_types_to_referral_facility_other': 'camel', + self.taxi_key: 'daily', + self.other_key: 'other', + }, ] for d_from_db in self.data_dictionary.get_data_for_excel(): d_from_db_iter = dict(d_from_db) @@ -240,30 +244,29 @@ def _check_group_xpaths_do_not_appear_in_dicts_for_export(self): expected_dict = { 'transportation': { - 'meta': { - 'instanceID': uuid - }, + 'meta': {'instanceID': uuid}, 'transport': { - 'loop_over_transport_types_frequency': {'bicycle': { - 'frequency_to_referral_facility': 'weekly' - }, + 'loop_over_transport_types_frequency': { + 'bicycle': {'frequency_to_referral_facility': 'weekly'}, 'ambulance': { 'frequency_to_referral_facility': 'daily' - } + }, }, - 'available_transportation_types_to_referral_facility': - 'ambulance bicycle', - } + 'available_transportation_types_to_referral_facility': ( + 'ambulance bicycle' + ), + }, } } self.assertEqual(instance.get_dict(flat=False), expected_dict) expected_dict = { - 'transport/available_transportation_types_to_referral_facility': - 'ambulance bicycle', + 'transport/available_transportation_types_to_referral_facility': ( + 'ambulance bicycle' + ), self.transport_ambulance_key: 'daily', self.transport_bicycle_key: 'weekly', '_xform_id_string': 'transportation_2011_07_25', - 'meta/instanceID': uuid + 'meta/instanceID': uuid, } self.assertEqual(instance.get_dict(), expected_dict) @@ -280,9 +283,15 @@ def _get_csv_(self): def _check_csv_export_first_pass(self): actual_csv = self._get_csv_() - f = open(os.path.join( - self.this_directory, 'fixtures', - 'transportation', 'transportation.csv'), 'r') + f = open( + os.path.join( + self.this_directory, + 'fixtures', + 'transportation', + 'transportation.csv', + ), + 'r', + ) expected_csv = csv.reader(f) for actual_row, expected_row in zip(actual_csv, expected_csv): for actual_cell, expected_cell in zip(actual_row, expected_row): @@ -299,42 +308,44 @@ def _check_csv_export_second_pass(self): actual_csv = csv.reader(actual_lines) headers = next(actual_csv) data = [ - {'meta/instanceID': 'uuid:5b2cc313-fc09-437e-8149-fcd32f695d41', - '_uuid': '5b2cc313-fc09-437e-8149-fcd32f695d41', - '_submission_time': '2013-02-14T15:37:21', - '_tags': '', '_notes': '' - }, - {'available_transportation_types_to_referral_facility/ambulance': - 'True', - 'available_transportation_types_to_referral_facility/bicycle': - 'True', - self.ambulance_key: 'daily', - self.bicycle_key: 'weekly', - 'meta/instanceID': 'uuid:f3d8dc65-91a6-4d0f-9e97-802128083390', - '_uuid': 'f3d8dc65-91a6-4d0f-9e97-802128083390', - '_submission_time': '2013-02-14T15:37:22', - '_tags': '', '_notes': '' - }, - {'available_transportation_types_to_referral_facility/ambulance': - 'True', - self.ambulance_key: 'weekly', - 'meta/instanceID': 'uuid:9c6f3468-cfda-46e8-84c1-75458e72805d', - '_uuid': '9c6f3468-cfda-46e8-84c1-75458e72805d', - '_submission_time': '2013-02-14T15:37:23', - '_tags': '', '_notes': '' - }, - {'available_transportation_types_to_referral_facility/taxi': - 'True', - 'available_transportation_types_to_referral_facility/other': - 'True', - 'available_transportation_types_to_referral_facility_other': - 'camel', - self.taxi_key: 'daily', - 'meta/instanceID': 'uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf', - '_uuid': '9f0a1508-c3b7-4c99-be00-9b237c26bcbf', - '_submission_time': '2013-02-14T15:37:24', - '_tags': '', '_notes': '' - } + { + 'meta/instanceID': 'uuid:5b2cc313-fc09-437e-8149-fcd32f695d41', + '_uuid': '5b2cc313-fc09-437e-8149-fcd32f695d41', + '_submission_time': '2013-02-14T15:37:21', + '_tags': '', + '_notes': '', + }, + { + 'available_transportation_types_to_referral_facility/ambulance': 'True', + 'available_transportation_types_to_referral_facility/bicycle': 'True', + self.ambulance_key: 'daily', + self.bicycle_key: 'weekly', + 'meta/instanceID': 'uuid:f3d8dc65-91a6-4d0f-9e97-802128083390', + '_uuid': 'f3d8dc65-91a6-4d0f-9e97-802128083390', + '_submission_time': '2013-02-14T15:37:22', + '_tags': '', + '_notes': '', + }, + { + 'available_transportation_types_to_referral_facility/ambulance': 'True', + self.ambulance_key: 'weekly', + 'meta/instanceID': 'uuid:9c6f3468-cfda-46e8-84c1-75458e72805d', + '_uuid': '9c6f3468-cfda-46e8-84c1-75458e72805d', + '_submission_time': '2013-02-14T15:37:23', + '_tags': '', + '_notes': '', + }, + { + 'available_transportation_types_to_referral_facility/taxi': 'True', + 'available_transportation_types_to_referral_facility/other': 'True', + 'available_transportation_types_to_referral_facility_other': 'camel', + self.taxi_key: 'daily', + 'meta/instanceID': 'uuid:9f0a1508-c3b7-4c99-be00-9b237c26bcbf', + '_uuid': '9f0a1508-c3b7-4c99-be00-9b237c26bcbf', + '_submission_time': '2013-02-14T15:37:24', + '_tags': '', + '_notes': '', + }, ] dd = DataDictionary.objects.get(pk=self.xform.pk) @@ -378,8 +389,9 @@ def test_publish_bad_xls_with_unicode_in_error(self): def test_metadata_file_hash(self): self._publish_transportation_form() - src = os.path.join(self.this_directory, 'fixtures', - 'transportation', 'screenshot.png') + src = os.path.join( + self.this_directory, 'fixtures', 'transportation', 'screenshot.png' + ) uf = UploadedFile(file=open(src, 'rb'), content_type='image/png') count = MetaData.objects.count() MetaData.media_upload(self.xform, uf) @@ -413,18 +425,24 @@ def test_uuid_injection_in_cascading_select(self): # check for instance nodes that are direct children of the model node model_node = xml.getElementsByTagName('model')[0] - instance_nodes = [node for node in model_node.childNodes if - node.nodeType == Node.ELEMENT_NODE and - node.tagName.lower() == 'instance' and - not node.hasAttribute('id')] + instance_nodes = [ + node + for node in model_node.childNodes + if node.nodeType == Node.ELEMENT_NODE + and node.tagName.lower() == 'instance' + and not node.hasAttribute('id') + ] self.assertEqual(len(instance_nodes), 1) instance_node = instance_nodes[0] # get the first element whose id attribute is equal to our form's # id_string - form_nodes = [node for node in instance_node.childNodes if - node.nodeType == Node.ELEMENT_NODE and - node.getAttribute('id') == xform.id_string] + form_nodes = [ + node + for node in instance_node.childNodes + if node.nodeType == Node.ELEMENT_NODE + and node.getAttribute('id') == xform.id_string + ] form_node = form_nodes[0] # find the formhub node that has a uuid child node @@ -434,24 +452,26 @@ def test_uuid_injection_in_cascading_select(self): self.assertEqual(len(uuid_nodes), 1) # check for the calculate bind - calculate_bind_nodes = [node for node in model_node.childNodes if - node.nodeType == Node.ELEMENT_NODE and - node.tagName == 'bind' and - node.getAttribute('nodeset') == - '/%s/formhub/uuid' % xform.id_string] + calculate_bind_nodes = [ + node + for node in model_node.childNodes + if node.nodeType == Node.ELEMENT_NODE + and node.tagName == 'bind' + and node.getAttribute('nodeset') == '/%s/formhub/uuid' % xform.id_string + ] self.assertEqual(len(calculate_bind_nodes), 1) calculate_bind_node = calculate_bind_nodes[0] self.assertEqual( - calculate_bind_node.getAttribute('calculate'), "'%s'" % xform.uuid) + calculate_bind_node.getAttribute('calculate'), "'%s'" % xform.uuid + ) def test_truncate_xform_title_to_255(self): self._publish_transportation_form() title = 'a' * (XFORM_TITLE_LENGTH + 1) groups = re.match( - r'(.+)([^<]+)(.*)', - self.xform.xml, re.DOTALL).groups() - self.xform.xml = '{0}{1}{2}'.format( - groups[0], title, groups[2]) + r'(.+)([^<]+)(.*)', self.xform.xml, re.DOTALL + ).groups() + self.xform.xml = '{0}{1}{2}'.format(groups[0], title, groups[2]) self.xform.title = title self.xform.save() self.assertEqual(self.xform.title, 'a' * XFORM_TITLE_LENGTH) diff --git a/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py b/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py index a3fede2ed2..de313cff64 100644 --- a/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py +++ b/kobo/apps/openrosa/apps/main/tests/test_user_id_string_unique_together.py @@ -5,7 +5,6 @@ from django.db.utils import IntegrityError from kobo.apps.openrosa.apps.logger.models import XForm - from .test_base import TestBase @@ -18,9 +17,7 @@ def test_unique_together(self): """ self._create_user_and_login() self.this_directory = os.path.dirname(__file__) - xls_path = os.path.join( - self.this_directory, 'fixtures', 'gps', 'gps.xls' - ) + xls_path = os.path.join(self.this_directory, 'fixtures', 'gps', 'gps.xls') # first time self._publish_xls_file(xls_path) diff --git a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py index 10728d5d15..63122f43cf 100644 --- a/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py +++ b/kobo/apps/openrosa/apps/viewer/models/parsed_instance.py @@ -67,7 +67,9 @@ class ParsedInstance(models.Model): DEFAULT_LIMIT = 30000 DEFAULT_BATCHSIZE = 1000 - instance = models.OneToOneField(Instance, related_name='parsed_instance', on_delete=models.CASCADE) + instance = models.OneToOneField( + Instance, related_name='parsed_instance', on_delete=models.CASCADE + ) start_time = models.DateTimeField(null=True) end_time = models.DateTimeField(null=True) # TODO: decide if decimal field is better than float field. diff --git a/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py b/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py index d5412b6a44..c2d14aadcb 100644 --- a/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py +++ b/kobo/apps/openrosa/libs/tests/mixins/make_submission_mixin.py @@ -5,15 +5,13 @@ from typing import Union from django.contrib.auth import authenticate +from django_digest.test import DigestAuth from rest_framework import status from rest_framework.test import APIRequestFactory -from django_digest.test import DigestAuth from kobo.apps.openrosa.apps.api.viewsets.xform_submission_api import XFormSubmissionApi from kobo.apps.openrosa.apps.logger.models import Instance, XForm -from kobo.apps.openrosa.libs.utils.logger_tools import ( - safe_create_instance, -) +from kobo.apps.openrosa.libs.utils.logger_tools import safe_create_instance class MakeSubmissionMixin: @@ -21,10 +19,11 @@ class MakeSubmissionMixin: @property def submission_view(self): if not hasattr(self, '_submission_view'): - setattr(self, '_submission_view', XFormSubmissionApi.as_view({ - 'head': 'create', - 'post': 'create' - })) + setattr( + self, + '_submission_view', + XFormSubmissionApi.as_view({'head': 'create', 'post': 'create'}), + ) return self._submission_view def _add_uuid_to_submission_xml(self, path, xform): @@ -62,6 +61,7 @@ def _make_submission( path = self._add_uuid_to_submission_xml(path, self.xform) if not use_api: + class FakeRequest: pass @@ -101,8 +101,9 @@ class FakeRequest: url = f'/{url_prefix}submission' request = self.factory.post(url, post_data) if auth: - request.user = authenticate(username=auth.username, - password=auth.password) + request.user = authenticate( + username=auth.username, password=auth.password + ) self.response = None # Reset in case error in viewset below self.response = self.submission_view(request, username=username) diff --git a/kobo/apps/openrosa/libs/utils/logger_tools.py b/kobo/apps/openrosa/libs/utils/logger_tools.py index 454a8ef920..a27194ef45 100644 --- a/kobo/apps/openrosa/libs/utils/logger_tools.py +++ b/kobo/apps/openrosa/libs/utils/logger_tools.py @@ -78,10 +78,7 @@ from kobo.apps.openrosa.apps.viewer.models.data_dictionary import DataDictionary from kobo.apps.openrosa.apps.viewer.models.parsed_instance import ParsedInstance from kobo.apps.openrosa.libs.utils import common_tags -from kobo.apps.openrosa.libs.utils.model_tools import ( - queryset_iterator, - set_uuid, -) +from kobo.apps.openrosa.libs.utils.model_tools import queryset_iterator, set_uuid from kpi.deployment_backends.kc_access.storage import ( default_kobocat_storage as default_storage, ) @@ -353,9 +350,10 @@ def inject_instanceid(xml_str, uuid): # check if we have a meta tag survey_node = children.item(0) meta_tags = [ - n for n in survey_node.childNodes - if n.nodeType == Node.ELEMENT_NODE - and n.tagName.lower() == 'meta'] + n + for n in survey_node.childNodes + if n.nodeType == Node.ELEMENT_NODE and n.tagName.lower() == 'meta' + ] if len(meta_tags) == 0: meta_tag = xml.createElement('meta') xml.documentElement.appendChild(meta_tag) @@ -364,9 +362,10 @@ def inject_instanceid(xml_str, uuid): # check if we have an instanceID tag uuid_tags = [ - n for n in meta_tag.childNodes - if n.nodeType == Node.ELEMENT_NODE - and n.tagName == 'instanceID'] + n + for n in meta_tag.childNodes + if n.nodeType == Node.ELEMENT_NODE and n.tagName == 'instanceID' + ] if len(uuid_tags) == 0: uuid_tag = xml.createElement('instanceID') meta_tag.appendChild(uuid_tag) @@ -420,11 +419,12 @@ def mongo_sync_status(remongo=False, update_all=False, user=None, xform=None): ) if instance_count != mongo_count or update_all: - line = 'user: %s, id_string: %s\nInstance count: %d\t' \ - 'Mongo count: %d\n---------------------------------' \ - '-----\n' % ( - user.username, xform.id_string, instance_count, - mongo_count) + line = ( + 'user: %s, id_string: %s\nInstance count: %d\t' + 'Mongo count: %d\n---------------------------------' + '-----\n' + % (user.username, xform.id_string, instance_count, mongo_count) + ) report_string += line found += 1 total_to_remongo += (instance_count - mongo_count) @@ -434,23 +434,23 @@ def mongo_sync_status(remongo=False, update_all=False, user=None, xform=None): if update_all: sys.stdout.write( 'Updating all records for %s\n--------------------' - '---------------------------\n' % xform.id_string) + '---------------------------\n' % xform.id_string + ) else: sys.stdout.write( 'Updating missing records for %s\n----------------' - '-------------------------------\n' - % xform.id_string) - _update_mongo_for_xform( - xform, only_update_missing=not update_all - ) + '-------------------------------\n' % xform.id_string + ) + _update_mongo_for_xform(xform, only_update_missing=not update_all) done += 1 - sys.stdout.write( - '%.2f %% done ...\r' % ((float(done) / float(total)) * 100)) + sys.stdout.write('%.2f %% done ...\r' % ((float(done) / float(total)) * 100)) # only show stats if we are not updating mongo, the update function # will show progress if not remongo: - line = 'Total # of forms out of sync: %d\n' \ - 'Total # of records to remongo: %d\n' % (found, total_to_remongo) + line = ( + 'Total # of forms out of sync: %d\n' + 'Total # of records to remongo: %d\n' % (found, total_to_remongo) + ) report_string += line return report_string @@ -553,9 +553,10 @@ def report_exception(subject, info, exc_info=None): # TODO: replace with standard logging (i.e. `import logging`) if exc_info: cls, err = exc_info[:2] - message = t('Exception in request:' - ' %(class)s: %(error)s')\ - % {'class': cls.__name__, 'error': err} + message = t('Exception in request:' ' %(class)s: %(error)s') % { + 'class': cls.__name__, + 'error': err, + } message += ''.join(traceback.format_exception(*exc_info)) else: message = '%s' % info @@ -585,8 +586,7 @@ def response_with_mimetype_and_name( response = StreamingHttpResponse(wrapper, content_type=mimetype) response['Content-Length'] = os.path.getsize(file_path) except IOError: - response = HttpResponseNotFound( - t('The requested file could not be found.')) + response = HttpResponseNotFound(t('The requested file could not be found.')) else: response = HttpResponse(content_type=mimetype) response['Content-Disposition'] = disposition_ext_and_date( @@ -715,9 +715,7 @@ def save_submission( instance.save(update_fields=['date_created']) if instance.xform is not None: - pi, created = ParsedInstance.objects.get_or_create( - instance=instance - ) + pi, created = ParsedInstance.objects.get_or_create(instance=instance) if not created: pi.save(asynchronous=False) @@ -899,10 +897,8 @@ def _update_mongo_for_xform(xform, only_update_missing=True): mongo_instances.delete_many({common_tags.USERFORM_ID: userform_id}) # get instances - sys.stdout.write( - 'Total no of instances to update: %d\n' % len(instance_ids)) - instances = Instance.objects.only('id').in_bulk( - [id_ for id_ in instance_ids]) + sys.stdout.write('Total no of instances to update: %d\n' % len(instance_ids)) + instances = Instance.objects.only('id').in_bulk([id_ for id_ in instance_ids]) total = len(instances) done = 0 for id_, instance in instances.items(): @@ -927,8 +923,8 @@ def _update_mongo_for_xform(xform, only_update_missing=True): sys.stdout.write(progress) sys.stdout.flush() sys.stdout.write( - '\nUpdated %s\n------------------------------------------\n' - % xform.id_string) + '\nUpdated %s\n------------------------------------------\n' % xform.id_string + ) class BaseOpenRosaResponse(HttpResponse): diff --git a/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py b/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py index 50c7951eb3..dcdb17117b 100644 --- a/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py +++ b/kobo/apps/project_ownership/management/commands/resume_failed_transfers_2_024_25_fix.py @@ -7,11 +7,7 @@ TransferStatusChoices, TransferStatusTypeChoices, ) -from ...utils import ( - move_attachments, - move_media_files, - rewrite_mongo_userform_id, -) +from ...utils import move_attachments, move_media_files, rewrite_mongo_userform_id class Command(BaseCommand): @@ -50,9 +46,7 @@ def handle(self, *args, **options): continue if verbosity: - self.stdout.write( - f'Resuming `{transfer.asset}` transfer…' - ) + self.stdout.write(f'Resuming `{transfer.asset}` transfer…') self._move_data(transfer) move_attachments(transfer) move_media_files(transfer) diff --git a/kobo/apps/project_ownership/models/transfer.py b/kobo/apps/project_ownership/models/transfer.py index 7e0655fda4..fd3505734c 100644 --- a/kobo/apps/project_ownership/models/transfer.py +++ b/kobo/apps/project_ownership/models/transfer.py @@ -19,7 +19,6 @@ from kpi.fields import KpiUidField from kpi.models import Asset, ObjectPermission from kpi.models.abstract_models import AbstractTimeStampedModel - from ..exceptions import TransferAlreadyProcessedException from ..tasks import async_task, send_email_to_admins from ..utils import get_target_folder @@ -258,14 +257,10 @@ def _start_async_jobs(self, rewrite_mongo: bool = True): # tasks because it can take a while to complete on big projects if rewrite_mongo: # 1) Rewrite `_userform_id` in MongoDB - async_task.delay( - self.pk, TransferStatusTypeChoices.SUBMISSIONS - ) + async_task.delay(self.pk, TransferStatusTypeChoices.SUBMISSIONS) # 2) Move media files to new owner's home directory - async_task.delay( - self.pk, TransferStatusTypeChoices.MEDIA_FILES - ) + async_task.delay(self.pk, TransferStatusTypeChoices.MEDIA_FILES) def _update_invite_status(self): """ diff --git a/kobo/apps/project_ownership/tasks.py b/kobo/apps/project_ownership/tasks.py index a88f9308e6..f69d674471 100644 --- a/kobo/apps/project_ownership/tasks.py +++ b/kobo/apps/project_ownership/tasks.py @@ -11,18 +11,13 @@ from kobo.celery import celery_app from kpi.utils.mailer import EmailMessage, Mailer - from .exceptions import AsyncTaskException, TransferStillPendingException from .models.choices import ( InviteStatusChoices, TransferStatusChoices, TransferStatusTypeChoices, ) -from .utils import ( - move_attachments, - move_media_files, - rewrite_mongo_userform_id, -) +from .utils import move_attachments, move_media_files, rewrite_mongo_userform_id @celery_app.task( diff --git a/kobo/apps/project_ownership/tests/api/v2/test_api.py b/kobo/apps/project_ownership/tests/api/v2/test_api.py index 2a4cbe0847..d8144a66ac 100644 --- a/kobo/apps/project_ownership/tests/api/v2/test_api.py +++ b/kobo/apps/project_ownership/tests/api/v2/test_api.py @@ -8,11 +8,7 @@ from rest_framework import status from rest_framework.reverse import reverse -from kobo.apps.project_ownership.models import ( - Invite, - InviteStatusChoices, - Transfer, -) +from kobo.apps.project_ownership.models import Invite, InviteStatusChoices, Transfer from kobo.apps.trackers.utils import update_nlp_counter from kpi.constants import PERM_VIEW_ASSET from kpi.models import Asset diff --git a/kobo/apps/project_ownership/tests/test_transfer_status.py b/kobo/apps/project_ownership/tests/test_transfer_status.py index 8c2641209f..2853083e66 100644 --- a/kobo/apps/project_ownership/tests/test_transfer_status.py +++ b/kobo/apps/project_ownership/tests/test_transfer_status.py @@ -3,7 +3,6 @@ from kpi.models import Asset from kpi.tests.utils.transaction import immediate_on_commit - from ..models import ( Invite, InviteStatusChoices, diff --git a/kobo/apps/project_ownership/utils.py b/kobo/apps/project_ownership/utils.py index 716f20e15f..33bca9b75d 100644 --- a/kobo/apps/project_ownership/utils.py +++ b/kobo/apps/project_ownership/utils.py @@ -7,7 +7,6 @@ from kobo.apps.openrosa.apps.logger.models import Attachment from kobo.apps.openrosa.apps.main.models import MetaData from kpi.models.asset import AssetFile - from .exceptions import AsyncTaskException from .models.choices import TransferStatusChoices, TransferStatusTypeChoices @@ -54,9 +53,9 @@ def move_attachments(transfer: 'project_ownership.Transfer'): _mark_task_as_successful(transfer, async_task_type) return - attachments = Attachment.all_objects.filter( - instance_id__in=submission_ids - ).exclude(media_file__startswith=f'{transfer.asset.owner.username}/') + attachments = Attachment.all_objects.filter(instance_id__in=submission_ids).exclude( + media_file__startswith=f'{transfer.asset.owner.username}/' + ) for attachment in attachments.iterator(): # Pretty slow but it should run in celery task. We want to be the diff --git a/kobo/apps/stripe/tests/test_organization_usage.py b/kobo/apps/stripe/tests/test_organization_usage.py index d40eb0ab69..fb0d62e907 100644 --- a/kobo/apps/stripe/tests/test_organization_usage.py +++ b/kobo/apps/stripe/tests/test_organization_usage.py @@ -338,9 +338,7 @@ def test_plan_canceled_edge_date(self): current_month_start = datetime.fromisoformat( response.data['current_month_start'] ) - current_month_end = datetime.fromisoformat( - response.data['current_month_end'] - ) + current_month_end = datetime.fromisoformat(response.data['current_month_end']) assert current_month_start.month == cancel_date.month assert current_month_start.day == cancel_date.day diff --git a/kobo/apps/subsequences/actions/automatic_transcription.py b/kobo/apps/subsequences/actions/automatic_transcription.py index 6981ae6c93..11de866180 100644 --- a/kobo/apps/subsequences/actions/automatic_transcription.py +++ b/kobo/apps/subsequences/actions/automatic_transcription.py @@ -1,5 +1,4 @@ from kobo.apps.subsequences.constants import GOOGLETS - from ..actions.base import ACTION_NEEDED, PASSES, BaseAction NOT_REQUESTED = 'NOT_REQUESTED' diff --git a/kobo/apps/subsequences/actions/translation.py b/kobo/apps/subsequences/actions/translation.py index 34e0568a80..535dc37196 100644 --- a/kobo/apps/subsequences/actions/translation.py +++ b/kobo/apps/subsequences/actions/translation.py @@ -1,6 +1,5 @@ from kobo.apps.subsequences.constants import GOOGLETX - from ..actions.base import BaseAction TRANSLATED = 'translation' diff --git a/kobo/apps/subsequences/api_view.py b/kobo/apps/subsequences/api_view.py index e744cd7102..d38c9372ab 100644 --- a/kobo/apps/subsequences/api_view.py +++ b/kobo/apps/subsequences/api_view.py @@ -8,9 +8,7 @@ from rest_framework.views import APIView from kobo.apps.subsequences.models import SubmissionExtras -from kobo.apps.subsequences.utils.deprecation import ( - get_sanitized_dict_keys, -) +from kobo.apps.subsequences.utils.deprecation import get_sanitized_dict_keys from kpi.models import Asset from kpi.permissions import SubmissionPermission from kpi.views.environment import check_asr_mt_access_for_user @@ -104,9 +102,7 @@ def get_submission_processing(asset, s_uuid): # TODO delete "if" statement below when every asset is repopulated with # `xpath` instead of `qpath`. - if content := get_sanitized_dict_keys( - submission_extra.content, asset - ): + if content := get_sanitized_dict_keys(submission_extra.content, asset): submission_extra.content = content return Response(submission_extra.content) diff --git a/kobo/apps/subsequences/integrations/google/base.py b/kobo/apps/subsequences/integrations/google/base.py index 41156d8a10..36400617a8 100644 --- a/kobo/apps/subsequences/integrations/google/base.py +++ b/kobo/apps/subsequences/integrations/google/base.py @@ -12,7 +12,6 @@ from googleapiclient import discovery from kobo.apps.trackers.utils import update_nlp_counter - from ...constants import GOOGLE_CACHE_TIMEOUT, make_nlp_async_cache_key from ...exceptions import SubsequenceTimeoutError from ...models import SubmissionExtras diff --git a/kobo/apps/subsequences/integrations/google/google_transcribe.py b/kobo/apps/subsequences/integrations/google/google_transcribe.py index eb55fd1d17..726d5984c7 100644 --- a/kobo/apps/subsequences/integrations/google/google_transcribe.py +++ b/kobo/apps/subsequences/integrations/google/google_transcribe.py @@ -11,7 +11,6 @@ from google.cloud import speech from kpi.utils.log import logging - from ...constants import GOOGLETS from ...exceptions import ( AudioTooLongError, diff --git a/kobo/apps/subsequences/integrations/google/google_translate.py b/kobo/apps/subsequences/integrations/google/google_translate.py index 01432b13cf..09468d97ef 100644 --- a/kobo/apps/subsequences/integrations/google/google_translate.py +++ b/kobo/apps/subsequences/integrations/google/google_translate.py @@ -12,7 +12,6 @@ from kobo.apps.languages.models.translation import TranslationService from kpi.utils.log import logging - from ...constants import GOOGLE_CODE, GOOGLETX from ...exceptions import ( SubsequenceTimeoutError, diff --git a/kobo/apps/subsequences/models.py b/kobo/apps/subsequences/models.py index 5c371f5884..5bf7622bc8 100644 --- a/kobo/apps/subsequences/models.py +++ b/kobo/apps/subsequences/models.py @@ -3,11 +3,8 @@ from kpi.models import Asset from kpi.models.abstract_models import AbstractTimeStampedModel - from .constants import GOOGLETS, GOOGLETX -from .utils.determine_export_cols_with_values import ( - determine_export_cols_indiv, -) +from .utils.determine_export_cols_with_values import determine_export_cols_indiv class SubmissionExtras(AbstractTimeStampedModel): diff --git a/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py b/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py index 1d7360479e..429d5e1435 100644 --- a/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py +++ b/kobo/apps/subsequences/scripts/add_qual_to_last_question_of_last_asset.py @@ -21,8 +21,7 @@ }, { 'labels': { - '_default': 'How many people are heard speaking in this ' - 'response?' + '_default': 'How many people are heard speaking in this ' 'response?' }, 'xpath': '', 'scope': 'by_question#survey', @@ -41,8 +40,7 @@ }, ], 'labels': { - '_default': 'Do they describe the facility as being well ' - 'maintained?' + '_default': 'Do they describe the facility as being well ' 'maintained?' }, 'xpath': '', 'scope': 'by_question#survey', @@ -72,8 +70,7 @@ }, { 'labels': { - '_default': 'Please respect the confidentiality of our ' - 'respondents.' + '_default': 'Please respect the confidentiality of our ' 'respondents.' }, 'xpath': '', 'scope': 'by_question#survey', diff --git a/kobo/apps/subsequences/scripts/repop_known_cols.py b/kobo/apps/subsequences/scripts/repop_known_cols.py index ef6ff9b387..a1fbd40e82 100644 --- a/kobo/apps/subsequences/scripts/repop_known_cols.py +++ b/kobo/apps/subsequences/scripts/repop_known_cols.py @@ -38,9 +38,7 @@ def migrate_subex_content( def migrate_subex_content_for_asset(asset, save=True): submission_extras = [] for sub_ex in asset.submission_extras.all(): - if updated_sub_ex := migrate_subex_content( - sub_ex, asset=asset, save=save - ): + if updated_sub_ex := migrate_subex_content(sub_ex, asset=asset, save=save): submission_extras.append(updated_sub_ex) return submission_extras diff --git a/kobo/apps/subsequences/tests/test_known_cols_utils.py b/kobo/apps/subsequences/tests/test_known_cols_utils.py index 291ac4283c..4ed79fb7b7 100644 --- a/kobo/apps/subsequences/tests/test_known_cols_utils.py +++ b/kobo/apps/subsequences/tests/test_known_cols_utils.py @@ -2,29 +2,35 @@ def test_known_cols_transc_duplicates(): - results = parse_known_cols([ - 'col/xpath:transc_a:en', - 'col/xpath:transc_a:en', - ]) + results = parse_known_cols( + [ + 'col/xpath:transc_a:en', + 'col/xpath:transc_a:en', + ] + ) assert len(results) == 1 assert results[0]['language'] == 'en' def test_known_cols_transl_duplicates(): - results = parse_known_cols([ - 'col/xpath:transl_a:fr', - 'col/xpath:transl_a:fr', - ]) + results = parse_known_cols( + [ + 'col/xpath:transl_a:fr', + 'col/xpath:transl_a:fr', + ] + ) assert len(results) == 1 def test_known_cols_transc_uniqs(): - results = parse_known_cols([ - 'col/xpath1:transc_a:en', - 'col/xpath1:transc_b:fr', - 'col/xpath2:transc_a:en', - 'col/xpath2:transc_b:fr', - ]) + results = parse_known_cols( + [ + 'col/xpath1:transc_a:en', + 'col/xpath1:transc_b:fr', + 'col/xpath2:transc_a:en', + 'col/xpath2:transc_b:fr', + ] + ) assert len(results) == 4 rs = {} for prop in ['language', 'label', 'xpath']: @@ -45,12 +51,14 @@ def test_known_cols_transc_uniqs(): def test_known_cols_transl_uniqs(): - results = parse_known_cols([ - 'col/xpath1:transl_a:en', - 'col/xpath1:transl_b:fr', - 'col/xpath2:transl_a:en', - 'col/xpath2:transl_b:fr', - ]) + results = parse_known_cols( + [ + 'col/xpath1:transl_a:en', + 'col/xpath1:transl_b:fr', + 'col/xpath2:transl_a:en', + 'col/xpath2:transl_b:fr', + ] + ) assert len(results) == 4 xpaths = [r['xpath'] for r in results] assert xpaths == [ @@ -62,12 +70,14 @@ def test_known_cols_transl_uniqs(): def test_known_cols_combos(): - results = parse_known_cols([ - 'col/xpath1:transl_a:en', - 'col/xpath1:transl_b:fr', - 'col/xpath2:transl_a:en', - 'col/xpath2:transl_b:fr', - ]) + results = parse_known_cols( + [ + 'col/xpath1:transl_a:en', + 'col/xpath1:transl_b:fr', + 'col/xpath2:transl_a:en', + 'col/xpath2:transl_b:fr', + ] + ) langs = [r['language'] for r in results] assert langs == ['en', 'fr', 'en', 'fr'] assert len(results) == 4 @@ -76,12 +86,14 @@ def test_known_cols_combos(): def test_known_cols_grouped_source(): # TODO: refer to commit d013bfe0f5 and `extend_col_deets()` to figure out # how this should behave - results = parse_known_cols([ - # `group` is the group name - # `question` is the (source) question name - 'group/question:transcript:en', - 'group/question:translation:es', - ]) + results = parse_known_cols( + [ + # `group` is the group name + # `question` is the (source) question name + 'group/question:transcript:en', + 'group/question:translation:es', + ] + ) sources = [r['source'] for r in results] xpaths = [r['xpath'] for r in results] names = [r['name'] for r in results] diff --git a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py index e990d240a9..4b9de37062 100644 --- a/kobo/apps/subsequences/tests/test_submission_extras_api_post.py +++ b/kobo/apps/subsequences/tests/test_submission_extras_api_post.py @@ -27,7 +27,6 @@ ) from kpi.models.asset import Asset from kpi.utils.fuzzy_int import FuzzyInt - from ..constants import GOOGLETS, GOOGLETX from ..models import SubmissionExtras @@ -37,9 +36,7 @@ def setUp(self): user = User.objects.create_user(username='someuser', email='user@example.com') self.asset = Asset( owner=user, - content={ - 'survey': [{'type': 'audio', 'label': 'q1', 'name': 'q1'}] - }, + content={'survey': [{'type': 'audio', 'label': 'q1', 'name': 'q1'}]}, ) self.asset.advanced_features = {} self.asset.save() diff --git a/kobo/apps/subsequences/utils/__init__.py b/kobo/apps/subsequences/utils/__init__.py index 15f3bb574d..8d17a77129 100644 --- a/kobo/apps/subsequences/utils/__init__.py +++ b/kobo/apps/subsequences/utils/__init__.py @@ -4,10 +4,7 @@ from ..actions.automatic_transcription import AutomaticTranscriptionAction from ..actions.qual import QualAction from ..actions.translation import TranslationAction -from .deprecation import ( - get_sanitized_advanced_features, - get_sanitized_dict_keys, -) +from .deprecation import get_sanitized_advanced_features, get_sanitized_dict_keys AVAILABLE_ACTIONS = ( AutomaticTranscriptionAction, diff --git a/kobo/apps/subsequences/utils/deprecation.py b/kobo/apps/subsequences/utils/deprecation.py index e63abc9ccc..df1cc4889f 100644 --- a/kobo/apps/subsequences/utils/deprecation.py +++ b/kobo/apps/subsequences/utils/deprecation.py @@ -97,6 +97,7 @@ class WritableAdvancedFeaturesField(WritableJSONField): It should be deleted and replaced with WritableJSONField when all assets are repopulated. """ + def __init__(self, **kwargs): super().__init__(**kwargs) self._model_instance = None diff --git a/kobo/apps/superuser_stats/migrations/0001_initial.py b/kobo/apps/superuser_stats/migrations/0001_initial.py index 7b2d303df9..506c69bf3f 100644 --- a/kobo/apps/superuser_stats/migrations/0001_initial.py +++ b/kobo/apps/superuser_stats/migrations/0001_initial.py @@ -1,3 +1,4 @@ +# flake8: noqa # Generated by Django 4.2.11 on 2024-07-03 19:37 from django.db import migrations @@ -9,12 +10,12 @@ class Migration(migrations.Migration): model without raising an error. > django.db.migrations.exceptions.InvalidBasesError: Cannot resolve bases for [] - > This can happen if you are inheriting models from an app with migrations (e.g. contrib.auth) + > This can happen if you are inheriting models from an app with migrations (e.g.: contrib.auth) > in an app with no migrations; see https://docs.djangoproject.com/en/4.2/topics/migrations/#dependencies for more details """ + dependencies = [ ('logger', '0034_set_require_auth_at_project_level'), ] - operations = [ - ] + operations = [] diff --git a/kobo/apps/superuser_stats/tasks.py b/kobo/apps/superuser_stats/tasks.py index 157a0fdc13..17db487a36 100644 --- a/kobo/apps/superuser_stats/tasks.py +++ b/kobo/apps/superuser_stats/tasks.py @@ -10,16 +10,7 @@ from dateutil.relativedelta import relativedelta from django.conf import settings from django.core.files.storage import default_storage -from django.db.models import ( - CharField, - Count, - DateField, - F, - IntegerField, - Q, - Sum, - Value, -) +from django.db.models import CharField, Count, DateField, F, IntegerField, Q, Sum, Value from django.db.models.functions import Cast, Concat from hub.models import ExtraUserDetail @@ -40,9 +31,7 @@ @shared_task -def generate_country_report( - output_filename: str, start_date: str, end_date: str -): +def generate_country_report(output_filename: str, start_date: str, end_date: str): def get_row_for_country(code_: str, label_: str): row_ = [] @@ -104,16 +93,14 @@ def generate_continued_usage_report(output_filename: str, end_date: str): assets = user.assets.values('pk', 'date_created').filter( date_created__date__range=(twelve_months_time, end_date), ) - submissions_count = ( - MonthlyXFormSubmissionCounter.objects.annotate( - date=Cast( - Concat(F('year'), Value('-'), F('month'), Value('-'), 1), - DateField(), - ) - ).filter( - user_id=user.id, - date__range=(twelve_months_time, end_date), + submissions_count = MonthlyXFormSubmissionCounter.objects.annotate( + date=Cast( + Concat(F('year'), Value('-'), F('month'), Value('-'), 1), + DateField(), ) + ).filter( + user_id=user.id, + date__range=(twelve_months_time, end_date), ) twelve_asset_count = assets.aggregate(asset_count=Count('pk')) twelve_submission_count = submissions_count.aggregate( @@ -185,28 +172,30 @@ def generate_domain_report(output_filename: str, start_date: str, end_date: str) # get a count of the assets domain_assets = { - domain: - Asset.objects.filter( - owner__email__endswith='@' + domain, - date_created__date__range=(start_date, end_date), - ).count() + domain: Asset.objects.filter( + owner__email__endswith='@' + domain, + date_created__date__range=(start_date, end_date), + ).count() for domain in domain_users.keys() } # get a count of the submissions domain_submissions = { - domain: MonthlyXFormSubmissionCounter.objects.annotate( - date=Cast( - Concat(F('year'), Value('-'), F('month'), Value('-'), 1), - DateField(), + domain: ( + MonthlyXFormSubmissionCounter.objects.annotate( + date=Cast( + Concat(F('year'), Value('-'), F('month'), Value('-'), 1), + DateField(), + ) ) - ).filter( - user__email__endswith='@' + domain, - date__range=(start_date, end_date), - ).aggregate( - Sum('counter') - )['counter__sum'] - if domain_assets[domain] else 0 + .filter( + user__email__endswith='@' + domain, + date__range=(start_date, end_date), + ) + .aggregate(Sum('counter'))['counter__sum'] + if domain_assets[domain] + else 0 + ) for domain in domain_users.keys() } @@ -234,30 +223,12 @@ def generate_domain_report(output_filename: str, start_date: str, end_date: str) def generate_forms_count_by_submission_range(output_filename: str): # List of submissions count ranges ranges = [ - { - 'label': '0', - 'orm_criteria': {'count': 0} - }, - { - 'label': '1 - 500', - 'orm_criteria': {'count__range': (1, 500)} - }, - { - 'label': '501 - 1000', - 'orm_criteria': {'count__range': (501, 1000)} - }, - { - 'label': '1001 - 10000', - 'orm_criteria': {'count__range': (1001, 10000)} - }, - { - 'label': '10001 - 50000', - 'orm_criteria': {'count__range': (10001, 50000)} - }, - { - 'label': '50001 and more', - 'orm_criteria': {'count__gte': 50001} - }, + {'label': '0', 'orm_criteria': {'count': 0}}, + {'label': '1 - 500', 'orm_criteria': {'count__range': (1, 500)}}, + {'label': '501 - 1000', 'orm_criteria': {'count__range': (501, 1000)}}, + {'label': '1001 - 10000', 'orm_criteria': {'count__range': (1001, 10000)}}, + {'label': '10001 - 50000', 'orm_criteria': {'count__range': (10001, 50000)}}, + {'label': '50001 and more', 'orm_criteria': {'count__gte': 50001}}, ] # store data for csv @@ -266,14 +237,15 @@ def generate_forms_count_by_submission_range(output_filename: str): today = datetime.today() date_ = today - relativedelta(years=1) no_submissions = XForm.objects.filter( - date_created__date__gte=date_, - num_of_submissions=0 + date_created__date__gte=date_, num_of_submissions=0 + ) + queryset = ( + Instance.objects.values('xform_id') + .filter( + date_created__date__gte=date_, + ) + .annotate(count=Count('xform_id')) ) - queryset = Instance.objects.values( - 'xform_id' - ).filter( - date_created__date__gte=date_, - ).annotate(count=Count('xform_id')) for r in ranges: if r['label'] == '0': @@ -301,10 +273,12 @@ def generate_media_storage_report(output_filename: str): data = [] for attachment_count in attachments.iterator(): - data.append([ - attachment_count['user__username'], - attachment_count['attachment_storage_bytes'], - ]) + data.append( + [ + attachment_count['user__username'], + attachment_count['attachment_storage_bytes'], + ] + ) headers = ['Username', 'Storage Used (Bytes)'] @@ -464,8 +438,7 @@ def generate_user_statistics_report( ) records = asset_queryset.annotate(deployment_count=Count('pk')).order_by() deployment_count = { - record['owner_id']: record['deployment_count'] - for record in records.iterator() + record['owner_id']: record['deployment_count'] for record in records.iterator() } # Get records from SubmissionCounter @@ -475,7 +448,9 @@ def generate_user_statistics_report( Concat(F('year'), Value('-'), F('month'), Value('-'), 1), DateField(), ) - ).filter(date__range=(start_date, end_date)).values( + ) + .filter(date__range=(start_date, end_date)) + .values( 'user_id', 'user__username', 'user__email', diff --git a/kobo/apps/trackers/tests/submission_utils.py b/kobo/apps/trackers/tests/submission_utils.py index 5c761f2823..91c1d2f288 100644 --- a/kobo/apps/trackers/tests/submission_utils.py +++ b/kobo/apps/trackers/tests/submission_utils.py @@ -60,11 +60,16 @@ def expected_file_size(submissions: int = 1): """ Calculate the expected combined file size for the test audio clip and image """ - return (os.path.getsize( - settings.BASE_DIR + '/kpi/fixtures/attachments/audio_conversion_test_clip.3gp' - ) + os.path.getsize( - settings.BASE_DIR + '/kpi/fixtures/attachments/audio_conversion_test_image.jpg' - )) * submissions + return ( + os.path.getsize( + settings.BASE_DIR + + '/kpi/fixtures/attachments/audio_conversion_test_clip.3gp' + ) + + os.path.getsize( + settings.BASE_DIR + + '/kpi/fixtures/attachments/audio_conversion_test_image.jpg' + ) + ) * submissions def add_mock_submissions( @@ -102,7 +107,9 @@ def add_mock_submissions( } if age_days > 0: submission_time = timezone.now() - relativedelta(days=age_days) - submission['_submission_time'] = submission_time.strftime('%Y-%m-%dT%H:%M:%S') + submission['_submission_time'] = submission_time.strftime( + '%Y-%m-%dT%H:%M:%S' + ) asset_submissions.append(submission) asset.deployment.mock_submissions(asset_submissions) diff --git a/kobo/apps/trash_bin/models/project.py b/kobo/apps/trash_bin/models/project.py index 1c4360a1d9..d9529b7cd2 100644 --- a/kobo/apps/trash_bin/models/project.py +++ b/kobo/apps/trash_bin/models/project.py @@ -5,16 +5,11 @@ from django.utils.timezone import now from kobo.apps.openrosa.apps.logger.models import XForm -from kobo.apps.project_ownership.models import ( - Invite, - InviteStatusChoices, - Transfer, -) +from kobo.apps.project_ownership.models import Invite, InviteStatusChoices, Transfer from kpi.deployment_backends.kc_access.utils import kc_transaction_atomic from kpi.fields import KpiUidField from kpi.models.asset import Asset, AssetDeploymentStatus from kpi.utils.django_orm_helper import UpdateJSONFieldAttributes - from . import BaseTrash @@ -91,9 +86,9 @@ def toggle_asset_statuses( ).update(status=InviteStatusChoices.CANCELLED) if not settings.TESTING: - kc_updated = XForm.objects.filter( - **kc_filter_params - ).update(**kc_update_params) + kc_updated = XForm.objects.filter(**kc_filter_params).update( + **kc_update_params + ) assert updated >= kc_updated return queryset, updated diff --git a/kobo/apps/trash_bin/utils.py b/kobo/apps/trash_bin/utils.py index 89e86e8552..f7344c3932 100644 --- a/kobo/apps/trash_bin/utils.py +++ b/kobo/apps/trash_bin/utils.py @@ -10,21 +10,13 @@ from django.db.models import F, Q from django.db.models.signals import pre_delete from django.utils.timezone import now -from django_celery_beat.models import ( - ClockedSchedule, - PeriodicTask, - PeriodicTasks, -) +from django_celery_beat.models import ClockedSchedule, PeriodicTask, PeriodicTasks from kobo.apps.audit_log.models import AuditAction, AuditLog, AuditType -from kpi.exceptions import ( - InvalidXFormException, - MissingXFormException, -) +from kpi.exceptions import InvalidXFormException, MissingXFormException from kpi.models import Asset, ExportTask, ImportTask from kpi.utils.mongo_helper import MongoHelper from kpi.utils.storage import rmdir - from .constants import DELETE_PROJECT_STR_PREFIX, DELETE_USER_STR_PREFIX from .exceptions import ( TrashIntegrityError, diff --git a/kobo/settings/base.py b/kobo/settings/base.py index 234c45bdce..df835708b2 100644 --- a/kobo/settings/base.py +++ b/kobo/settings/base.py @@ -17,12 +17,8 @@ from django.utils.translation import gettext_lazy as t from pymongo import MongoClient -from kobo.apps.stripe.constants import ( - FREE_TIER_EMPTY_DISPLAY, - FREE_TIER_NO_THRESHOLDS, -) +from kobo.apps.stripe.constants import FREE_TIER_EMPTY_DISPLAY, FREE_TIER_NO_THRESHOLDS from kpi.utils.json import LazyJSONSerializable - from ..static_lists import EXTRA_LANG_INFO, SECTOR_CHOICE_DEFAULTS env = environ.Env() diff --git a/kpi/deployment_backends/base_backend.py b/kpi/deployment_backends/base_backend.py index 36692634e0..1b03a6dfcd 100644 --- a/kpi/deployment_backends/base_backend.py +++ b/kpi/deployment_backends/base_backend.py @@ -22,9 +22,7 @@ from rest_framework.reverse import reverse from shortuuid import ShortUUID -from kobo.apps.openrosa.libs.utils.logger_tools import ( - http_open_rosa_error_handler, -) +from kobo.apps.openrosa.libs.utils.logger_tools import http_open_rosa_error_handler from kpi.constants import ( PERM_CHANGE_SUBMISSIONS, PERM_PARTIAL_SUBMISSIONS, @@ -194,7 +192,7 @@ def bulk_update_submissions( { 'uuid': _uuid, 'error': handler.error, - 'result': handler.func_return + 'result': handler.func_return, } ) return self.prepare_bulk_update_response(backend_results) @@ -238,7 +236,9 @@ def delete_submissions( @abc.abstractmethod def duplicate_submission( - self, submission_id: int, request: 'rest_framework.request.Request', + self, + submission_id: int, + request: 'rest_framework.request.Request', ) -> dict: pass diff --git a/kpi/deployment_backends/kc_access/utils.py b/kpi/deployment_backends/kc_access/utils.py index 55b3aed6ae..036ef343ca 100644 --- a/kpi/deployment_backends/kc_access/utils.py +++ b/kpi/deployment_backends/kc_access/utils.py @@ -23,6 +23,7 @@ def _wrapper(*args, **kwargs): raise ProgrammingError( 'kc_access error accessing KoboCAT tables: {}'.format(str(e)) ) + return _wrapper @@ -116,13 +117,11 @@ def grant_kc_model_level_perms(user: 'kobo_auth.User'): # without KC should change # `KOBOCAT_DEFAULT_PERMISSION_CONTENT_TYPES` appropriately in their # settings - logging.error( - 'Could not find KoboCAT content type for {}.{}'.format(*pair) - ) + logging.error('Could not find KoboCAT content type for {}.{}'.format(*pair)) - permissions_to_assign = Permission.objects.using( - settings.OPENROSA_DB_ALIAS - ).filter(content_type__in=content_types) + permissions_to_assign = Permission.objects.using(settings.OPENROSA_DB_ALIAS).filter( + content_type__in=content_types + ) if content_types and not permissions_to_assign.exists(): raise RuntimeError( @@ -138,17 +137,17 @@ def set_kc_anonymous_permissions_xform_flags( obj, kpi_codenames, xform_id, remove=False ): r""" - Given a KPI object, one or more KPI permission codenames and the PK of - a KC `XForm`, assume the KPI permissions have been assigned to or - removed from the anonymous user. Then, modify any corresponding flags - on the `XForm` accordingly. - :param obj: Object with `KC_ANONYMOUS_PERMISSIONS_XFORM_FLAGS` - dictionary attribute - :param kpi_codenames: One or more codenames for KPI permissions - :type kpi_codenames: str or list(str) - :param xform_id: PK of the KC `XForm` associated with `obj` - :param remove: If `True`, apply the Boolean `not` operator to each - value in `KC_ANONYMOUS_PERMISSIONS_XFORM_FLAGS` + Given a KPI object, one or more KPI permission codenames and the PK of + a KC `XForm`, assume the KPI permissions have been assigned to or + removed from the anonymous user. Then, modify any corresponding flags + on the `XForm` accordingly. + :param obj: Object with `KC_ANONYMOUS_PERMISSIONS_XFORM_FLAGS` + dictionary attribute + :param kpi_codenames: One or more codenames for KPI permissions + :type kpi_codenames: str or list(str) + :param xform_id: PK of the KC `XForm` associated with `obj` + :param remove: If `True`, apply the Boolean `not` operator to each + value in `KC_ANONYMOUS_PERMISSIONS_XFORM_FLAGS` """ if not settings.KOBOCAT_URL or not settings.KOBOCAT_INTERNAL_URL: return @@ -213,9 +212,9 @@ def assign_applicable_kc_permissions( obj, kpi_codenames, xform_id ) - xform_content_type = ContentType.objects.using( - settings.OPENROSA_DB_ALIAS - ).get(**obj.KC_CONTENT_TYPE_KWARGS) + xform_content_type = ContentType.objects.using(settings.OPENROSA_DB_ALIAS).get( + **obj.KC_CONTENT_TYPE_KWARGS + ) kc_permissions_already_assigned = ( UserObjectPermission.objects.using(settings.OPENROSA_DB_ALIAS) diff --git a/kpi/deployment_backends/mock_backend.py b/kpi/deployment_backends/mock_backend.py index ff2183416b..a22cbff1e0 100644 --- a/kpi/deployment_backends/mock_backend.py +++ b/kpi/deployment_backends/mock_backend.py @@ -10,13 +10,9 @@ from django.utils.dateparse import parse_datetime from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.libs.utils.logger_tools import ( - create_instance, - dict2xform, -) +from kobo.apps.openrosa.libs.utils.logger_tools import create_instance, dict2xform from kpi.constants import PERM_ADD_SUBMISSIONS, SUBMISSION_FORMAT_TYPE_JSON from kpi.tests.utils.dicts import convert_hierarchical_keys_to_nested_dict - from ..utils.files import ExtendedContentFile from .openrosa_backend import OpenRosaDeploymentBackend @@ -41,17 +37,17 @@ def get_submissions( format_type: str = SUBMISSION_FORMAT_TYPE_JSON, submission_ids: list = None, request: Optional['rest_framework.request.Request'] = None, - **mongo_query_params + **mongo_query_params, ) -> list: # Overload parent to cast generator to a list. Many tests are expecting # a list - return list(super().get_submissions( - user, format_type, submission_ids, request, **mongo_query_params - )) + return list( + super().get_submissions( + user, format_type, submission_ids, request, **mongo_query_params + ) + ) - def mock_submissions( - self, submissions, create_uuids: bool = True - ): + def mock_submissions(self, submissions, create_uuids: bool = True): """ Simulate client (i.e.: Enketo or Collect) data submission. @@ -94,7 +90,7 @@ class FakeRequest: else: uuid_ = submission['meta/instanceID'].replace('uuid:', '') - sub_copy['meta'] = {'instanceID': f'uuid:{uuid_}'} + sub_copy['meta'] = {'instanceID': f'uuid:{uuid_}'} submission['_uuid'] = uuid_ assign_perm = False @@ -171,11 +167,7 @@ def _get_media_files(self, submission): basename = os.path.basename(filename) file_ = os.path.join( - settings.BASE_DIR, - 'kpi', - 'fixtures', - 'attachments', - basename + settings.BASE_DIR, 'kpi', 'fixtures', 'attachments', basename ) if not os.path.isfile(file_): raise Exception( diff --git a/kpi/deployment_backends/openrosa_backend.py b/kpi/deployment_backends/openrosa_backend.py index a10ca69d96..c55a62ca3f 100644 --- a/kpi/deployment_backends/openrosa_backend.py +++ b/kpi/deployment_backends/openrosa_backend.py @@ -39,10 +39,7 @@ set_instance_validation_statuses, ) from kobo.apps.openrosa.apps.main.models import MetaData, UserProfile -from kobo.apps.openrosa.libs.utils.logger_tools import ( - create_instance, - publish_xls_form, -) +from kobo.apps.openrosa.libs.utils.logger_tools import create_instance, publish_xls_form from kobo.apps.subsequences.utils import stream_with_extras from kobo.apps.trackers.models import NLPUsageCounter from kpi.constants import ( @@ -74,15 +71,9 @@ from kpi.utils.mongo_helper import MongoHelper from kpi.utils.object_permission import get_database_user from kpi.utils.xml import fromstring_preserve_root_xmlns, xml_tostring - -from ..exceptions import ( - BadFormatException, -) +from ..exceptions import BadFormatException from .base_backend import BaseDeploymentBackend -from .kc_access.utils import ( - assign_applicable_kc_permissions, - kc_transaction_atomic, -) +from .kc_access.utils import assign_applicable_kc_permissions, kc_transaction_atomic class OpenRosaDeploymentBackend(BaseDeploymentBackend): @@ -142,12 +133,13 @@ def connect(self, active=False): CAUTION: Does not save deployment data to the database! """ xlsx_io = self.asset.to_xlsx_io( - versioned=True, append={ + versioned=True, + append={ 'settings': { 'id_string': self.asset.uid, 'form_title': self.asset.name, } - } + }, ) xlsx_file = ContentFile(xlsx_io.read(), name=f'{self.asset.uid}.xlsx') @@ -155,9 +147,7 @@ def connect(self, active=False): self._xform = publish_xls_form(xlsx_file, self.asset.owner) self._xform.downloadable = active self._xform.kpi_asset_uid = self.asset.uid - self._xform.save( - update_fields=['downloadable', 'kpi_asset_uid'] - ) + self._xform.save(update_fields=['downloadable', 'kpi_asset_uid']) self.store_data( { @@ -179,9 +169,7 @@ def form_uuid(self): try: return self.backend_response['uuid'] except KeyError: - logging.warning( - 'OpenRosa backend response has no `uuid`', exc_info=True - ) + logging.warning('OpenRosa backend response has no `uuid`', exc_info=True) return None @staticmethod @@ -220,9 +208,7 @@ def delete_submission( """ self.validate_access_with_partial_perms( - user=user, - perm=PERM_DELETE_SUBMISSIONS, - submission_ids=[submission_id] + user=user, perm=PERM_DELETE_SUBMISSIONS, submission_ids=[submission_id] ) count, _ = Instance.objects.filter(pk=submission_id).delete() @@ -259,7 +245,9 @@ def delete_submissions( return delete_instances(self.xform, data) def duplicate_submission( - self, submission_id: int, request: 'rest_framework.request.Request', + self, + submission_id: int, + request: 'rest_framework.request.Request', ) -> dict: """ Duplicates a single submission. The submission with the given @@ -285,9 +273,7 @@ def duplicate_submission( # Get attachments for the duplicated submission if there are any attachments = [] - if attachment_objects := Attachment.objects.filter( - instance_id=submission_id - ): + if attachment_objects := Attachment.objects.filter(instance_id=submission_id): attachments = ( ExtendedContentFile(a.media_file.read(), name=a.media_file_basename) for a in attachment_objects @@ -309,9 +295,7 @@ def duplicate_submission( # Rely on `meta/instanceID` being present. If it's absent, something is # fishy enough to warrant raising an exception instead of continuing # silently - xml_parsed.find(self.SUBMISSION_CURRENT_UUID_XPATH).text = ( - uuid_formatted - ) + xml_parsed.find(self.SUBMISSION_CURRENT_UUID_XPATH).text = uuid_formatted # create_instance uses `username` argument to identify the XForm object # (when nothing else worked). `_submitted_by` is populated by `request.user` @@ -345,13 +329,9 @@ def edit_submission( try: xml_root = fromstring_preserve_root_xmlns(submission_xml) except DET.ParseError: - raise SubmissionIntegrityError( - t('Your submission XML is malformed.') - ) + raise SubmissionIntegrityError(t('Your submission XML is malformed.')) try: - deprecated_uuid = xml_root.find( - self.SUBMISSION_DEPRECATED_UUID_XPATH - ).text + deprecated_uuid = xml_root.find(self.SUBMISSION_DEPRECATED_UUID_XPATH).text xform_uuid = xml_root.find(self.FORM_UUID_XPATH).text except AttributeError: raise SubmissionIntegrityError( @@ -376,9 +356,7 @@ def edit_submission( # Validate write access for users with partial permissions self.validate_access_with_partial_perms( - user=user, - perm=PERM_CHANGE_SUBMISSIONS, - submission_ids=[instance.pk] + user=user, perm=PERM_CHANGE_SUBMISSIONS, submission_ids=[instance.pk] ) # Set the In-Memory file’s current position to 0 before passing it to @@ -531,63 +509,63 @@ def get_daily_counts( '_userform_id': self.mongo_userform_id, '_submission_time': { '$gte': f'{timeframe[0]}', - '$lte': f'{timeframe[1]}T23:59:59' - } + '$lte': f'{timeframe[1]}T23:59:59', + }, } - query = MongoHelper.get_permission_filters_query( - query, permission_filters - ) + query = MongoHelper.get_permission_filters_query(query, permission_filters) - documents = settings.MONGO_DB.instances.aggregate([ - { - '$match': query, - }, - { - '$group': { - '_id': { - '$dateToString': { - 'format': '%Y-%m-%d', - 'date': { - '$dateFromString': { - 'format': '%Y-%m-%dT%H:%M:%S', - 'dateString': '$_submission_time' - } + documents = settings.MONGO_DB.instances.aggregate( + [ + { + '$match': query, + }, + { + '$group': { + '_id': { + '$dateToString': { + 'format': '%Y-%m-%d', + 'date': { + '$dateFromString': { + 'format': '%Y-%m-%dT%H:%M:%S', + 'dateString': '$_submission_time', + } + }, } - } - }, - 'count': {'$sum': 1} - } - } - ]) + }, + 'count': {'$sum': 1}, + } + }, + ] + ) return {doc['_id']: doc['count'] for doc in documents} # Trivial case, user has 'view_permissions' - daily_counts = ( - DailyXFormSubmissionCounter.objects.values( - 'date', 'counter' - ).filter( - xform_id=self.xform_id, - date__range=timeframe, - ) + daily_counts = DailyXFormSubmissionCounter.objects.values( + 'date', 'counter' + ).filter( + xform_id=self.xform_id, + date__range=timeframe, ) - return { - str(count['date']): count['counter'] for count in daily_counts - } + return {str(count['date']): count['counter'] for count in daily_counts} def get_data_download_links(self): - exports_base_url = '/'.join(( - settings.KOBOCAT_URL.rstrip('/'), - self.asset.owner.username, - 'exports', - self.xform.id_string - )) - reports_base_url = '/'.join(( - settings.KOBOCAT_URL.rstrip('/'), - self.asset.owner.username, - 'reports', - self.xform.id_string - )) + exports_base_url = '/'.join( + ( + settings.KOBOCAT_URL.rstrip('/'), + self.asset.owner.username, + 'exports', + self.xform.id_string, + ) + ) + reports_base_url = '/'.join( + ( + settings.KOBOCAT_URL.rstrip('/'), + self.asset.owner.username, + 'reports', + self.xform.id_string, + ) + ) links = { # To be displayed in iframes 'xls_legacy': '/'.join((exports_base_url, 'xls/')), @@ -606,10 +584,9 @@ def get_enketo_survey_links(self): data = { 'server_url': '{}/{}'.format( - settings.KOBOCAT_URL.rstrip('/'), - self.asset.owner.username + settings.KOBOCAT_URL.rstrip('/'), self.asset.owner.username ), - 'form_id': self.xform.id_string + 'form_id': self.xform.id_string, } try: @@ -617,13 +594,12 @@ def get_enketo_survey_links(self): f'{settings.ENKETO_URL}/{settings.ENKETO_SURVEY_ENDPOINT}', # bare tuple implies basic auth auth=(settings.ENKETO_API_KEY, ''), - data=data + data=data, ) response.raise_for_status() except requests.exceptions.RequestException: # Don't 500 the entire asset view if Enketo is unreachable - logging.error( - 'Failed to retrieve links from Enketo', exc_info=True) + logging.error('Failed to retrieve links from Enketo', exc_info=True) return {} try: links = response.json() @@ -653,9 +629,7 @@ def get_enketo_survey_links(self): # Thus, we need to always generated the ID with the same URL # (i.e.: with username) to be retro-compatible and then, # overwrite the OpenRosa server URL again. - self.set_enketo_open_rosa_server( - require_auth=True, enketo_id=enketo_id - ) + self.set_enketo_open_rosa_server(require_auth=True, enketo_id=enketo_id) for discard in ('enketo_id', 'code', 'preview_iframe_url'): try: @@ -695,7 +669,7 @@ def get_submissions( format_type: str = SUBMISSION_FORMAT_TYPE_JSON, submission_ids: list = None, request: Optional['rest_framework.request.Request'] = None, - **mongo_query_params + **mongo_query_params, ) -> Union[Generator[dict, None, None], list]: """ Retrieve submissions that `user` is allowed to access. @@ -718,9 +692,7 @@ def get_submissions( See `BaseDeploymentBackend._rewrite_json_attachment_urls()` """ - mongo_query_params['submission_ids'] = ( - submission_ids if submission_ids else [] - ) + mongo_query_params['submission_ids'] = submission_ids if submission_ids else [] params = self.validate_submission_list_params( user, format_type=format_type, **mongo_query_params ) @@ -748,9 +720,7 @@ def get_validation_status( return { 'content_type': 'application/json', 'status': status.HTTP_404_NOT_FOUND, - 'data': { - 'detail': f'No submission found with ID: {submission_id}' - } + 'data': {'detail': f'No submission found with ID: {submission_id}'}, } return { @@ -777,10 +747,8 @@ def nlp_tracking_data( try: nlp_tracking = ( NLPUsageCounter.objects.only('total_asr_seconds', 'total_mt_characters') - .filter( - asset_id__in=asset_ids, - **filter_args - ).aggregate( + .filter(asset_id__in=asset_ids, **filter_args) + .aggregate( total_nlp_asr_seconds=Coalesce(Sum('total_asr_seconds'), 0), total_nlp_mt_characters=Coalesce(Sum('total_mt_characters'), 0), ) @@ -804,12 +772,13 @@ def redeploy(self, active=None): id_string = self.xform.id_string xlsx_io = self.asset.to_xlsx_io( - versioned=True, append={ + versioned=True, + append={ 'settings': { 'id_string': id_string, 'form_title': self.asset.name, } - } + }, ) xlsx_file = ContentFile(xlsx_io.read(), name=f'{self.asset.uid}.xlsx') @@ -882,7 +851,7 @@ def rename_enketo_id_key(self, previous_owner_username: str): try: enketo_redis_client.rename( src=f'or:{domain_name}/{previous_owner_username},{asset_uid}', - dst=f'or:{domain_name}/{self.asset.owner.username},{asset_uid}' + dst=f'or:{domain_name}/{self.asset.owner.username},{asset_uid}', ) except redis.exceptions.ResponseError: # original does not exist, weird but don't raise a 500 for that @@ -954,8 +923,7 @@ def set_asset_uid(self, force: bool = False) -> bool: during this call, otherwise `False`. """ is_synchronized = not ( - force or - self.backend_response.get('kpi_asset_uid', None) is None + force or self.backend_response.get('kpi_asset_uid', None) is None ) if is_synchronized: return False @@ -963,17 +931,13 @@ def set_asset_uid(self, force: bool = False) -> bool: # Use `queryset.update()` over `model.save()` because we don't need to # run the logic of the `model.save()` method and we don't need signals # to be called. - XForm.objects.filter(pk=self.xform_id).update( - kpi_asset_uid=self.asset.uid - ) + XForm.objects.filter(pk=self.xform_id).update(kpi_asset_uid=self.asset.uid) self.xform.kpi_asset_uid = self.asset.uid self.backend_response['kpi_asset_uid'] = self.asset.uid self.store_data({'backend_response': self.backend_response}) return True - def set_enketo_open_rosa_server( - self, require_auth: bool, enketo_id: str = None - ): + def set_enketo_open_rosa_server(self, require_auth: bool, enketo_id: str = None): # Kobocat handles Open Rosa requests with different accesses. # - Authenticated access, https://[kc] # - Anonymous access, https://[kc]/username @@ -1018,16 +982,14 @@ def set_validation_status( # TODO simplify response when KobocatDeploymentBackend # and MockDeploymentBackend are gone try: - instance = Instance.objects.only( - 'validation_status', 'date_modified' - ).get(pk=submission_id) + instance = Instance.objects.only('validation_status', 'date_modified').get( + pk=submission_id + ) except Instance.DoesNotExist: return { 'content_type': 'application/json', 'status': status.HTTP_404_NOT_FOUND, - 'data': { - 'detail': f'No submission found with ID: {submission_id}' - } + 'data': {'detail': f'No submission found with ID: {submission_id}'}, } if method == 'DELETE': @@ -1040,9 +1002,7 @@ def set_validation_status( return { 'content_type': 'application/json', 'status': status.HTTP_500_INTERNAL_SERVER_ERROR, - 'data': { - 'detail': 'Could not update MongoDB' - } + 'data': {'detail': 'Could not update MongoDB'}, } validation_status_uid = data.get('validation_status.uid') @@ -1055,7 +1015,7 @@ def set_validation_status( 'status': status.HTTP_400_BAD_REQUEST, 'data': { 'detail': f'Invalid validation status: `{validation_status_uid}`' - } + }, } return { 'data': instance.validation_status, @@ -1107,9 +1067,7 @@ def store_submission( ): media_files = [] if attachments: - media_files = ( - media_file for media_file in attachments.values() - ) + media_files = (media_file for media_file in attachments.values()) # create_instance uses `username` argument to identify the XForm object # (when nothing else worked). `_submitted_by` is populated by `request.user` @@ -1144,9 +1102,11 @@ def submission_count_since_date(self, start_date=None): # Note: this is replicating the functionality that was formerly in # `current_month_submission_count`. `current_month_submission_count` # didn't account for partial permissions, and this doesn't either - total_submissions = DailyXFormSubmissionCounter.objects.only( - 'date', 'counter' - ).filter(**filter_args).aggregate(count_sum=Coalesce(Sum('counter'), 0)) + total_submissions = ( + DailyXFormSubmissionCounter.objects.only('date', 'counter') + .filter(**filter_args) + .aggregate(count_sum=Coalesce(Sum('counter'), 0)) + ) except DailyXFormSubmissionCounter.DoesNotExist: return 0 else: @@ -1259,9 +1219,7 @@ def xform(self): 'require_auth', 'uuid', ) - .select_related( - 'user' - ) # Avoid extra query to validate username below + .select_related('user') # Avoid extra query to validate username below .first() ) @@ -1287,9 +1245,7 @@ def xform_id_string(self): @staticmethod @contextmanager def suspend_submissions(user_ids: list[int]): - UserProfile.objects.filter( - user_id__in=user_ids - ).update( + UserProfile.objects.filter(user_id__in=user_ids).update( metadata=UpdateJSONFieldAttributes( 'metadata', updates={'submissions_suspended': True}, @@ -1298,41 +1254,30 @@ def suspend_submissions(user_ids: list[int]): try: yield finally: - UserProfile.objects.filter( - user_id__in=user_ids - ).update( + UserProfile.objects.filter(user_id__in=user_ids).update( metadata=UpdateJSONFieldAttributes( 'metadata', updates={'submissions_suspended': False}, ), ) - def transfer_submissions_ownership( - self, previous_owner_username: str - ) -> bool: + def transfer_submissions_ownership(self, previous_owner_username: str) -> bool: results = settings.MONGO_DB.instances.update_many( {'_userform_id': f'{previous_owner_username}_{self.xform_id_string}'}, - { - '$set': { - '_userform_id': self.mongo_userform_id - } - }, + {'$set': {'_userform_id': self.mongo_userform_id}}, ) - return ( - results.matched_count == 0 or - ( - results.matched_count > 0 - and results.matched_count == results.modified_count - ) + return results.matched_count == 0 or ( + results.matched_count > 0 + and results.matched_count == results.modified_count ) def transfer_counters_ownership(self, new_owner: 'kobo_auth.User'): - NLPUsageCounter.objects.filter( - asset=self.asset, user=self.asset.owner - ).update(user=new_owner) + NLPUsageCounter.objects.filter(asset=self.asset, user=self.asset.owner).update( + user=new_owner + ) DailyXFormSubmissionCounter.objects.filter( xform=self.xform, user_id=self.asset.owner.pk ).update(user=new_owner) @@ -1415,9 +1360,7 @@ def _update_kc_metadata_hash( file_.save(update_fields=['synced_with_backend']) def __get_submissions_in_json( - self, - request: Optional['rest_framework.request.Request'] = None, - **params + self, request: Optional['rest_framework.request.Request'] = None, **params ) -> Generator[dict, None, None]: """ Retrieve submissions directly from Mongo. @@ -1427,7 +1370,8 @@ def __get_submissions_in_json( if not params.get('sort'): params['sort'] = {'_id': 1} mongo_cursor, total_count = MongoHelper.get_instances( - self.mongo_userform_id, **params) + self.mongo_userform_id, **params + ) # Python-only attribute used by `kpi.views.v2.data.DataViewSet.list()` self.current_submission_count = total_count @@ -1450,32 +1394,29 @@ def __get_submissions_in_json( for submission in mongo_cursor ) - def __get_submissions_in_xml( - self, - **params - ) -> Generator[str, None, None]: + def __get_submissions_in_xml(self, **params) -> Generator[str, None, None]: """ Retrieve submissions directly from PostgreSQL. Submissions can be filtered with `params`. """ mongo_filters = ['query', 'permission_filters'] - use_mongo = any(mongo_filter in mongo_filters for mongo_filter in params - if params.get(mongo_filter) is not None) + use_mongo = any( + mongo_filter in mongo_filters + for mongo_filter in params + if params.get(mongo_filter) is not None + ) if use_mongo: # We use Mongo to retrieve matching instances. params['fields'] = ['_id'] # Force `sort` by `_id` for Mongo - # See FIXME about sort in `BaseDeploymentBackend.validate_submission_list_params()` + # See FIXME about sort in `BaseDeploymentBackend.validate_submission_list_params()` # noqa: E501 params['sort'] = {'_id': 1} submissions, count = MongoHelper.get_instances( self.mongo_userform_id, **params ) - submission_ids = [ - submission.get('_id') - for submission in submissions - ] + submission_ids = [submission.get('_id') for submission in submissions] self.current_submission_count = count queryset = Instance.objects.filter(xform_id=self.xform_id) @@ -1488,7 +1429,7 @@ def __get_submissions_in_xml( self.current_submission_count = queryset.count() # Force Sort by id - # See FIXME about sort in `BaseDeploymentBackend.validate_submission_list_params()` + # See FIXME about sort in `BaseDeploymentBackend.validate_submission_list_params()` # noqa: E501 queryset = queryset.order_by('id') # When using Mongo, data is already paginated, diff --git a/kpi/exceptions.py b/kpi/exceptions.py index 42fd198231..909c7dd13c 100644 --- a/kpi/exceptions.py +++ b/kpi/exceptions.py @@ -113,9 +113,7 @@ class InvalidSearchException(exceptions.APIException): class InvalidXFormException(Exception): - def __init__( - self, message=t('Deployment links to an unexpected KoboCAT XForm') - ): + def __init__(self, message=t('Deployment links to an unexpected KoboCAT XForm')): super().__init__(message) diff --git a/kpi/management/commands/sync_kobocat_perms.py b/kpi/management/commands/sync_kobocat_perms.py index c20b2aa014..9f5961b05b 100644 --- a/kpi/management/commands/sync_kobocat_perms.py +++ b/kpi/management/commands/sync_kobocat_perms.py @@ -92,9 +92,7 @@ def _sync_perms(self, **options): with kc_transaction_atomic(): kc_user_obj_perm_qs = ( - UserObjectPermission.objects.using( - settings.OPENROSA_DB_ALIAS - ) + UserObjectPermission.objects.using(settings.OPENROSA_DB_ALIAS) .filter(object_pk=asset.deployment.xform_id) .exclude(user_id=asset.owner_id) ) diff --git a/kpi/management/commands/sync_kobocat_xforms.py b/kpi/management/commands/sync_kobocat_xforms.py index 9307759b52..43581d87d0 100644 --- a/kpi/management/commands/sync_kobocat_xforms.py +++ b/kpi/management/commands/sync_kobocat_xforms.py @@ -235,9 +235,7 @@ def _sync_form_content(asset, xform, changes): if modified: # It's important to update `deployment_data` with the new hash from KC; # otherwise, we'll be re-syncing the same content forever (issue #1302) - asset.deployment.store_data( - {'backend_response': _get_backend_response(xform)} - ) + asset.deployment.store_data({'backend_response': _get_backend_response(xform)}) return modified @@ -253,12 +251,14 @@ def _sync_form_metadata(asset, xform, changes): # A brand-new asset asset.date_created = xform.date_created backend_deployment = OpenRosaDeploymentBackend(asset) - backend_deployment.store_data({ - 'backend': 'openrosa', - 'active': xform.downloadable, - 'backend_response': _get_backend_response(xform), - 'version': asset.version_id - }) + backend_deployment.store_data( + { + 'backend': 'openrosa', + 'active': xform.downloadable, + 'backend_response': _get_backend_response(xform), + 'version': asset.version_id, + } + ) changes.append('CREATE METADATA') asset.set_deployment(kc_deployment) # `_sync_permissions()` will save `asset` if it has no `pk` @@ -288,9 +288,7 @@ def _sync_form_metadata(asset, xform, changes): changes.append('NAME') if fetch_backend_response: - asset.deployment.store_data({ - 'backend_response': _get_backend_response(xform) - }) + asset.deployment.store_data({'backend_response': _get_backend_response(xform)}) modified = True affected_users = _sync_permissions(asset, xform) diff --git a/kpi/migrations/0011_explode_asset_deployments.py b/kpi/migrations/0011_explode_asset_deployments.py index 98193cb2ad..e779eb2468 100644 --- a/kpi/migrations/0011_explode_asset_deployments.py +++ b/kpi/migrations/0011_explode_asset_deployments.py @@ -22,15 +22,17 @@ def explode_assets(apps, schema_editor): deployment = asset.assetdeployment_set.last() # Copy the deployment-related data backend_deployment = OpenRosaDeploymentBackend(asset) - backend_deployment.store_data({ - 'backend': 'kobocat', - 'active': deployment.data['downloadable'], - 'backend_response': deployment.data, - # deployment.asset_version_id was mistakenly set to the id of the - # _oldest_ version of the asset, making it useless, so we use zero - # as a placeholder - 'version': 0 - }) + backend_deployment.store_data( + { + 'backend': 'kobocat', + 'active': deployment.data['downloadable'], + 'backend_response': deployment.data, + # deployment.asset_version_id was mistakenly set to the id of the + # _oldest_ version of the asset, making it useless, so we use zero + # as a placeholder + 'version': 0, + } + ) asset.save() assets_done += 1 if assets_done % asset_progress_interval == 0: diff --git a/kpi/migrations/0012_onetimeauthenticationkey.py b/kpi/migrations/0012_onetimeauthenticationkey.py index ad0a4f0c3f..52c3e50160 100644 --- a/kpi/migrations/0012_onetimeauthenticationkey.py +++ b/kpi/migrations/0012_onetimeauthenticationkey.py @@ -2,8 +2,8 @@ from secrets import token_urlsafe import django.core.validators -from django.db import migrations, models from django.conf import settings +from django.db import migrations, models from kpi.utils.datetime import ten_minutes_from_now @@ -33,9 +33,7 @@ class Migration(migrations.Migration): models.CharField( default=partial(token_urlsafe, nbytes=45), max_length=60, - validators=[ - django.core.validators.MinLengthValidator(60) - ], + validators=[django.core.validators.MinLengthValidator(60)], ), ), ('expiry', models.DateTimeField(default=ten_minutes_from_now)), diff --git a/kpi/mixins/formpack_xlsform_utils.py b/kpi/mixins/formpack_xlsform_utils.py index 6eb20a2a44..14ac123b03 100644 --- a/kpi/mixins/formpack_xlsform_utils.py +++ b/kpi/mixins/formpack_xlsform_utils.py @@ -9,23 +9,17 @@ from formpack.utils.flatten_content import flatten_content from formpack.utils.spreadsheet_content import flatten_to_spreadsheet_content from kobo.apps.reports.constants import FUZZY_VERSION_PATTERN -from kpi.utils.absolute_paths import ( - insert_full_paths_in_place, -) -from kpi.utils.asset_translation_utils import ( +from kpi.utils.absolute_paths import insert_full_paths_in_place +from kpi.utils.asset_translation_utils import ( # TRANSLATIONS_EQUAL, TRANSLATION_ADDED, TRANSLATION_CHANGE_UNSUPPORTED, TRANSLATION_DELETED, TRANSLATION_RENAMED, TRANSLATIONS_MULTIPLE_CHANGES, - # TRANSLATIONS_EQUAL, TRANSLATIONS_OUT_OF_ORDER, compare_translations, ) -from kpi.utils.autoname import ( - autoname_fields_in_place, - autovalue_choices_in_place, -) +from kpi.utils.autoname import autoname_fields_in_place, autovalue_choices_in_place from kpi.utils.kobo_to_xlsform import ( expand_rank_and_score_in_place, remove_empty_expressions_in_place, @@ -270,14 +264,15 @@ def _prioritize_translation(self, content, translation_name, is_new=False): # just ignore the translation `translation_name` if len(_translations) == 1 and _translations[0] is None: return - else: # Otherwise raise an error. + else: # Otherwise, raise an error. # Remove None from translations we want to display to users valid_translations = [t for t in _translations if t is not None] raise ValueError( '`{translation_name}` is specified as the default language, ' - 'but only these translations are present in the form: `{translations}`'.format( + 'but only these translations are present in the form: ' + '`{translations}`'.format( translation_name=translation_name, - translations='`, `'.join(valid_translations) + translations='`, `'.join(valid_translations), ) ) diff --git a/kpi/models/asset.py b/kpi/models/asset.py index 99c0b20dbc..fff9eafeeb 100644 --- a/kpi/models/asset.py +++ b/kpi/models/asset.py @@ -18,10 +18,7 @@ from formpack.utils.flatten_content import flatten_content from formpack.utils.json_hash import json_hash from formpack.utils.kobo_locking import strip_kobo_locking_profile -from kobo.apps.reports.constants import ( - DEFAULT_REPORTS_KEY, - SPECIFIC_REPORTS_KEY, -) +from kobo.apps.reports.constants import DEFAULT_REPORTS_KEY, SPECIFIC_REPORTS_KEY from kobo.apps.subsequences.advanced_features_params_schema import ( ADVANCED_FEATURES_PARAMS_SCHEMA, ) @@ -67,10 +64,7 @@ BadPermissionsException, DeploymentDataException, ) -from kpi.fields import ( - KpiUidField, - LazyDefaultJSONBField, -) +from kpi.fields import KpiUidField, LazyDefaultJSONBField from kpi.mixins import ( FormpackXLSFormUtilsMixin, ObjectPermissionMixin, @@ -745,8 +739,11 @@ def get_partial_perms( If user doesn't have any partial permissions, it returns `None`. """ - perms = self.asset_partial_permissions.filter(user_id=user_id)\ - .values_list('permissions', flat=True).first() + perms = ( + self.asset_partial_permissions.filter(user_id=user_id) + .values_list('permissions', flat=True) + .first() + ) if perms: if with_filters: @@ -1136,9 +1133,7 @@ def update_languages(self, children=None): else: children_languages = list( self.children.values_list('summary__languages', flat=True) - .exclude( - Q(summary__languages=[]) | Q(summary__languages=[None]) - ) + .exclude(Q(summary__languages=[]) | Q(summary__languages=[None])) .order_by() ) @@ -1165,8 +1160,8 @@ def validate_advanced_features(self): self.advanced_features = advanced_features jsonschema.validate( - instance=self.advanced_features, - schema=ADVANCED_FEATURES_PARAMS_SCHEMA, + instance=self.advanced_features, + schema=ADVANCED_FEATURES_PARAMS_SCHEMA, ) @property diff --git a/kpi/models/authorized_application.py b/kpi/models/authorized_application.py index 2886df2469..d4bb9ab63c 100644 --- a/kpi/models/authorized_application.py +++ b/kpi/models/authorized_application.py @@ -30,11 +30,13 @@ class ApplicationTokenAuthentication(TokenAuthentication): model = AuthorizedApplication def authenticate_credentials(self, key): - """ Mostly duplicated from TokenAuthentication, except that we return + """ + Mostly duplicated from TokenAuthentication, except that we return an AnonymousUser - We also do not create an AuditLog here because we only want to do so for certain endpoints, - and only after we get the user being accessed""" + We also do not create an AuditLog here because we only want to do so for + certain endpoints, and only after we get the user being accessed + """ try: token = self.model.objects.get(key=key) except self.model.DoesNotExist: diff --git a/kpi/models/import_export_task.py b/kpi/models/import_export_task.py index 362bb15e18..322f2cad76 100644 --- a/kpi/models/import_export_task.py +++ b/kpi/models/import_export_task.py @@ -33,9 +33,7 @@ from werkzeug.http import parse_options_header import formpack -from formpack.constants import ( - KOBO_LOCK_SHEET, -) +from formpack.constants import KOBO_LOCK_SHEET from formpack.schema.fields import ( IdCopyField, NotesCopyField, @@ -60,11 +58,7 @@ from kpi.fields import KpiUidField from kpi.models import Asset from kpi.utils.log import logging -from kpi.utils.models import ( - _load_library_content, - create_assets, - resolve_url_to_asset, -) +from kpi.utils.models import _load_library_content, create_assets, resolve_url_to_asset from kpi.utils.project_view_exports import create_project_view_export from kpi.utils.rename_xls_sheet import ( ConflictSheetError, diff --git a/kpi/serializers/v2/asset.py b/kpi/serializers/v2/asset.py index b608c27ce5..2db03ec6ef 100644 --- a/kpi/serializers/v2/asset.py +++ b/kpi/serializers/v2/asset.py @@ -19,13 +19,8 @@ from kobo.apps.reports.constants import FUZZY_VERSION_PATTERN from kobo.apps.reports.report_data import build_formpack -from kobo.apps.subsequences.utils.deprecation import ( - WritableAdvancedFeaturesField, -) -from kobo.apps.trash_bin.exceptions import ( - TrashIntegrityError, - TrashTaskInProgressError, -) +from kobo.apps.subsequences.utils.deprecation import WritableAdvancedFeaturesField +from kobo.apps.trash_bin.exceptions import TrashIntegrityError, TrashTaskInProgressError from kobo.apps.trash_bin.models.project import ProjectTrash from kobo.apps.trash_bin.utils import move_to_trash, put_back from kpi.constants import ( @@ -67,7 +62,6 @@ user_has_project_view_asset_perm, view_has_perm, ) - from .asset_export_settings import AssetExportSettingsSerializer from .asset_file import AssetFileSerializer from .asset_permission_assignment import AssetPermissionAssignmentSerializer diff --git a/kpi/serializers/v2/deployment.py b/kpi/serializers/v2/deployment.py index e140800ca9..c82325039c 100644 --- a/kpi/serializers/v2/deployment.py +++ b/kpi/serializers/v2/deployment.py @@ -28,15 +28,11 @@ def create(self, validated_data): asset = self.context['asset'] self._raise_unless_current_version(asset, validated_data) # if no backend is provided, use the installation's default backend - backend_id = validated_data.get( - 'backend', settings.DEFAULT_DEPLOYMENT_BACKEND - ) + backend_id = validated_data.get('backend', settings.DEFAULT_DEPLOYMENT_BACKEND) # `asset.deploy()` deploys the latest version and updates that versions' # 'deployed' boolean value - asset.deploy( - backend=backend_id, active=validated_data.get('active', False) - ) + asset.deploy(backend=backend_id, active=validated_data.get('active', False)) return asset.deployment def update(self, instance, validated_data): diff --git a/kpi/tasks.py b/kpi/tasks.py index 9ed84ddb19..380b6fe939 100644 --- a/kpi/tasks.py +++ b/kpi/tasks.py @@ -13,11 +13,7 @@ from kpi.constants import LIMIT_HOURS_23 from kpi.maintenance_tasks import remove_old_asset_snapshots, remove_old_import_tasks from kpi.models.asset import Asset -from kpi.models.import_export_task import ( - ExportTask, - ImportTask, - ProjectViewExportTask, -) +from kpi.models.import_export_task import ExportTask, ImportTask, ProjectViewExportTask @celery_app.task diff --git a/kpi/tests/api/v1/test_api_assets.py b/kpi/tests/api/v1/test_api_assets.py index 6be5ad05b5..2a788c087e 100644 --- a/kpi/tests/api/v1/test_api_assets.py +++ b/kpi/tests/api/v1/test_api_assets.py @@ -66,8 +66,9 @@ def test_xml_export_title_retained(self): 'survey': [{'label': 'Q1 Label.', 'type': 'decimal'}]} self.login('someuser', 'someuser') asset = self.create_asset(asset_title, json.dumps(content), format='json') - response = self.client.get(reverse('asset-detail', - kwargs={'uid': asset.uid, 'format': 'xml'})) + response = self.client.get( + reverse('asset-detail', kwargs={'uid': asset.uid, 'format': 'xml'}) + ) self.assertEqual(response.status_code, status.HTTP_200_OK) xml = check_lxml_fromstring(response.content) title_elts = xml.xpath('./*[local-name()="head"]/*[local-name()="title"]') @@ -80,8 +81,9 @@ def test_xml_export_name_as_title(self): 'survey': [{'label': 'Q1 Label.', 'type': 'decimal'}]} self.login('someuser', 'someuser') asset = self.create_asset(asset_name, json.dumps(content), format='json') - response = self.client.get(reverse('asset-detail', - kwargs={'uid': asset.uid, 'format': 'xml'})) + response = self.client.get( + reverse('asset-detail', kwargs={'uid': asset.uid, 'format': 'xml'}) + ) self.assertEqual(response.status_code, status.HTTP_200_OK) xml = check_lxml_fromstring(response.content) title_elts = xml.xpath('./*[local-name()="head"]/*[local-name()="title"]') @@ -93,8 +95,9 @@ def test_api_xml_export_auto_title(self): 'survey': [{'label': 'Q1 Label.', 'type': 'decimal'}]} self.login('someuser', 'someuser') asset = self.create_asset('', json.dumps(content), format='json') - response = self.client.get(reverse('asset-detail', - kwargs={'uid': asset.uid, 'format': 'xml'})) + response = self.client.get( + reverse('asset-detail', kwargs={'uid': asset.uid, 'format': 'xml'}) + ) self.assertEqual(response.status_code, status.HTTP_200_OK) xml = check_lxml_fromstring(response.content) title_elts = xml.xpath('./*[local-name()="head"]/*[local-name()="title"]') @@ -102,22 +105,28 @@ def test_api_xml_export_auto_title(self): self.assertNotEqual(title_elts[0].text, '') def test_xml_export_group(self): - example_formbuilder_output = {'survey': [{'type': 'begin_group', - 'relevant': '', - 'appearance': '', - 'name': 'group_hl3hw45', - 'label': 'Group 1 Label'}, - {'required': 'true', - 'type': 'decimal', - 'label': 'Question 1 Label'}, - {'type': 'end_group'}], - 'settings': [{'form_title': '', - 'form_id': 'group_form'}]} + example_formbuilder_output = { + 'survey': [ + { + 'type': 'begin_group', + 'relevant': '', + 'appearance': '', + 'name': 'group_hl3hw45', + 'label': 'Group 1 Label', + }, + {'required': 'true', 'type': 'decimal', 'label': 'Question 1 Label'}, + {'type': 'end_group'}, + ], + 'settings': [{'form_title': '', 'form_id': 'group_form'}], + } self.login('someuser', 'someuser') - asset = self.create_asset('', json.dumps(example_formbuilder_output), format='json') - response = self.client.get(reverse('asset-detail', - kwargs={'uid': asset.uid, 'format': 'xml'})) + asset = self.create_asset( + '', json.dumps(example_formbuilder_output), format='json' + ) + response = self.client.get( + reverse('asset-detail', kwargs={'uid': asset.uid, 'format': 'xml'}) + ) self.assertEqual(response.status_code, status.HTTP_200_OK) xml = check_lxml_fromstring(response.content) group_elts = xml.xpath('./*[local-name()="body"]/*[local-name()="group"]') @@ -131,9 +140,11 @@ class ObjectRelationshipsTests(BaseTestCase): def setUp(self): self.client.login(username='someuser', password='someuser') self.user = User.objects.get(username='someuser') - self.surv = Asset.objects.create(content={'survey': [{'type': 'text', 'name': 'q1'}]}, - owner=self.user, - asset_type='survey') + self.surv = Asset.objects.create( + content={'survey': [{'type': 'text', 'name': 'q1'}]}, + owner=self.user, + asset_type='survey', + ) self.coll = Asset.objects.create( asset_type=ASSET_TYPE_COLLECTION, name='sample collection', owner=self.user @@ -156,28 +167,23 @@ def test_collection_can_have_asset(self): the asset is now listed in the collection's list of assets. """ _ = self.client.get(reverse('asset-detail', args=[self.surv.uid])) - coll_req1 = self.client.get( - reverse('asset-detail', args=[self.coll.uid]) + coll_req1 = self.client.get(reverse('asset-detail', args=[self.coll.uid])) + self.assertEqual( + self._count_children_by_kind(coll_req1.data['children'], self.surv.kind), 0 ) - self.assertEqual(self._count_children_by_kind( - coll_req1.data['children'], self.surv.kind), 0) self.surv.parent = self.coll self.surv.save() - surv_req2 = self.client.get( - reverse('asset-detail', args=[self.surv.uid]) - ) + surv_req2 = self.client.get(reverse('asset-detail', args=[self.surv.uid])) self.assertIn('parent', surv_req2.data) self.assertIn(self.coll.uid, surv_req2.data['parent']) - coll_req2 = self.client.get( - reverse('asset-detail', args=[self.coll.uid]) - ) - self.assertEqual(self._count_children_by_kind( - coll_req2.data['children'], self.surv.kind), 1) + coll_req2 = self.client.get(reverse('asset-detail', args=[self.coll.uid])) self.assertEqual( - self.surv.uid, coll_req2.data['children']['results'][0]['uid']) + self._count_children_by_kind(coll_req2.data['children'], self.surv.kind), 1 + ) + self.assertEqual(self.surv.uid, coll_req2.data['children']['results'][0]['uid']) def test_add_asset_to_collection(self): """ @@ -269,7 +275,9 @@ def setUp(self): 'q1': '¿Qué tal?', '_submission_time': '2024-08-07T23:42:21', } - self.asset.deployment.mock_submissions([self.submission], ) + self.asset.deployment.mock_submissions( + [self.submission], + ) def test_owner_can_create_export(self): post_url = reverse('exporttask-list') @@ -291,10 +299,12 @@ def test_owner_can_create_export(self): result_content = result_response.getvalue().decode('utf-8') self.assertEqual(result_response.status_code, status.HTTP_200_OK) version_uid = self.asset.latest_deployed_version_uid - expected_content = ''.join([ - '"q1";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"\r\n', - f'"¿Qué tal?";"{self.submission["_id"]}";"{self.submission["_uuid"]}";"2024-08-07T23:42:21";"";"";"submitted_via_web";"someuser";"{version_uid}";"";"1"\r\n', - ]) + expected_content = ''.join( + [ + '"q1";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"\r\n', # noqa: E501 + f'"¿Qué tal?";"{self.submission["_id"]}";"{self.submission["_uuid"]}";"2024-08-07T23:42:21";"";"";"submitted_via_web";"someuser";"{version_uid}";"";"1"\r\n', # noqa: E501 + ] + ) self.assertEqual(result_content, expected_content) return detail_response diff --git a/kpi/tests/api/v1/test_api_submissions.py b/kpi/tests/api/v1/test_api_submissions.py index 44f7cee41d..906a0ef144 100644 --- a/kpi/tests/api/v1/test_api_submissions.py +++ b/kpi/tests/api/v1/test_api_submissions.py @@ -1,4 +1,3 @@ -# coding: utf-8 import pytest from django.conf import settings from django.urls import reverse @@ -74,24 +73,26 @@ def test_list_submissions_limit(self): self._get_endpoint('submission-list'), kwargs={'parent_lookup_asset': asset.uid, 'format': 'json'}, ) - response = self.client.get( - url, - {'limit': limit + excess, 'format': 'json'} - ) + response = self.client.get(url, {'limit': limit + excess, 'format': 'json'}) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.data), limit) def test_list_submissions_as_owner_with_params(self): response = self.client.get( - self.submission_list_url, { + self.submission_list_url, + { 'format': 'json', 'start': 1, 'limit': 5, 'sort': '{"q1": -1}', 'fields': '["q1", "_submitted_by"]', - 'query': '{"_submitted_by": {"$in": ["unknownuser", "someuser", "anotheruser"]}}', - } + 'query': ( + '{"_submitted_by": {"$in":' + ' ["unknownuser", "someuser", "anotheruser"]' + '}}' + ), + }, ) # ToDo add more assertions. E.g. test whether sort, limit, start really work self.assertEqual(len(response.data), 5) @@ -174,6 +175,6 @@ def test_edit_submission_snapshot_missing_unauthenticated(self): pass -class SubmissionValidationStatusApiTests(test_api_submissions.SubmissionValidationStatusApiTests): +class SubmissionValidationStatusApiTests(test_api_submissions.SubmissionValidationStatusApiTests): # noqa: E501 URL_NAMESPACE = None diff --git a/kpi/tests/api/v2/test_api_assets.py b/kpi/tests/api/v2/test_api_assets.py index 2abee0542f..26932cb780 100644 --- a/kpi/tests/api/v2/test_api_assets.py +++ b/kpi/tests/api/v2/test_api_assets.py @@ -32,9 +32,7 @@ from kpi.urls.router_api_v2 import URL_NAMESPACE as ROUTER_URL_NAMESPACE from kpi.utils.hash import calculate_hash from kpi.utils.object_permission import get_anonymous_user -from kpi.utils.project_views import ( - get_region_for_view, -) +from kpi.utils.project_views import get_region_for_view class AssetListApiTests(BaseAssetTestCase): @@ -1056,15 +1054,15 @@ def test_submission_count(self): 'q1': 'a1', 'q2': 'a2', '_id': 1, - '_submitted_by': '' + '_submitted_by': '', }, { '__version__': self.asset.latest_deployed_version.uid, 'q1': 'a3', 'q2': 'a4', '_id': 2, - '_submitted_by': anotheruser.username - } + '_submitted_by': anotheruser.username, + }, ] self.asset.deployment.mock_submissions(submissions) @@ -1339,18 +1337,15 @@ def get_asset_file_content(self, url): @property def asset_file_payload(self): - geojson_ = StringIO(json.dumps( - { - 'type': 'Feature', - 'geometry': { - 'type': 'Point', - 'coordinates': [125.6, 10.1] - }, - 'properties': { - 'name': 'Dinagat Islands' + geojson_ = StringIO( + json.dumps( + { + 'type': 'Feature', + 'geometry': {'type': 'Point', 'coordinates': [125.6, 10.1]}, + 'properties': {'name': 'Dinagat Islands'}, } - } - )) + ) + ) geojson_.name = 'dingagat_island.geojson' return { 'file_type': AssetFile.MAP_LAYER, diff --git a/kpi/tests/api/v2/test_api_logout_all.py b/kpi/tests/api/v2/test_api_logout_all.py index c4c04ee2ea..86df1f7c7c 100644 --- a/kpi/tests/api/v2/test_api_logout_all.py +++ b/kpi/tests/api/v2/test_api_logout_all.py @@ -28,12 +28,8 @@ def test_logout_all_sessions_does_not_affect_other_users(self): user1 = User.objects.get(username='someuser') user2 = User.objects.get(username='anotheruser') # create sessions for user1 - UserSession.objects.create( - user=user1, session_key='12345', ip='1.2.3.4' - ) - UserSession.objects.create( - user=user1, session_key='56789', ip='5.6.7.8' - ) + UserSession.objects.create(user=user1, session_key='12345', ip='1.2.3.4') + UserSession.objects.create(user=user1, session_key='56789', ip='5.6.7.8') count = UserSession.objects.count() self.assertEqual(count, 2) diff --git a/kpi/tests/api/v2/test_api_paired_data.py b/kpi/tests/api/v2/test_api_paired_data.py index 6546a7ed00..4b02a4da04 100644 --- a/kpi/tests/api/v2/test_api_paired_data.py +++ b/kpi/tests/api/v2/test_api_paired_data.py @@ -37,7 +37,7 @@ def setUp(self): { 'name': 'group_restaurant', 'type': 'begin_group', - 'label': 'Restaurant' + 'label': 'Restaurant', }, { 'name': 'favourite_restaurant', diff --git a/kpi/tests/api/v2/test_api_submissions.py b/kpi/tests/api/v2/test_api_submissions.py index 93928269a0..b659b4d28f 100644 --- a/kpi/tests/api/v2/test_api_submissions.py +++ b/kpi/tests/api/v2/test_api_submissions.py @@ -1,11 +1,10 @@ -# coding: utf-8 import copy import json +import os import random import string import uuid from datetime import datetime - try: from zoneinfo import ZoneInfo except ImportError: @@ -18,9 +17,9 @@ import responses from django.conf import settings from django.urls import reverse +from django_digest.test import Client as DigestClient from rest_framework import status -from django_digest.test import Client as DigestClient from kobo.apps.audit_log.models import AuditLog, AuditType from kobo.apps.kobo_auth.shortcuts import User from kobo.apps.openrosa.apps.logger.models.instance import Instance @@ -113,7 +112,7 @@ def _add_submissions(self, other_fields: dict = None): 'q2': ''.join(random.choice(letters) for letter in range(10)), 'meta/instanceID': f'uuid:{uuid_}', '_uuid': str(uuid_), - '_submitted_by': submitted_by + '_submitted_by': submitted_by, } if other_fields is not None: submission.update(**other_fields) @@ -181,9 +180,9 @@ def test_audit_log_on_bulk_delete(self): self.test_delete_submissions_as_owner() # All submissions have been deleted and should be logged - deleted_submission_ids = AuditLog.objects.values_list( - 'pk', flat=True - ).filter(user=self.someuser, app_label='logger', model_name='instance') + deleted_submission_ids = AuditLog.objects.values_list('pk', flat=True).filter( + user=self.someuser, app_label='logger', model_name='instance' + ) assert len(expected_submission_ids) > 0 assert sorted(expected_submission_ids), sorted(deleted_submission_ids) @@ -308,11 +307,7 @@ def test_delete_some_allowed_submissions_with_partial_perms_as_anotheruser(self) # Try first submission submitted by unknown submissions = self.submissions_submitted_by_unknownuser - data = { - 'payload': { - 'submission_ids': [submissions[0]['_id']] - } - } + data = {'payload': {'submission_ids': [submissions[0]['_id']]}} response = self.client.delete( self.submission_bulk_url, data=data, format='json' ) @@ -334,7 +329,7 @@ def test_delete_some_allowed_submissions_with_partial_perms_as_anotheruser(self) response = self.client.get(self.submission_list_url, {'format': 'json'}) self.assertEqual(response.data['count'], count - 1) - def test_cannot_delete_view_only_submissions_with_partial_perms_as_anotheruser(self): + def test_cant_delete_view_only_submissions_with_partial_perms_as_anotheruser(self): """ someuser is the owner of the project anotheruser is allowed to view someuser's data and delete their own data @@ -344,7 +339,8 @@ def test_cannot_delete_view_only_submissions_with_partial_perms_as_anotheruser(s self.client.force_login(self.anotheruser) partial_perms = { PERM_VIEW_SUBMISSIONS: [{'_submitted_by': 'someuser'}], - PERM_DELETE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] # view_submission is implied + # view_submission is implied + PERM_DELETE_SUBMISSIONS: [{'_submitted_by': 'anotheruser'}] } # Allow anotheruser to delete their own data @@ -448,7 +444,11 @@ def test_list_submissions_as_owner_with_params(self): 'limit': 5, 'sort': '{"q1": -1}', 'fields': '["q1", "_submitted_by"]', - 'query': '{"_submitted_by": {"$in": ["unknownuser", "someuser", "anotheruser"]}}', + 'query': '{"_submitted_by": {' + ' "$in": ' + ' ["unknownuser", "someuser", "anotheruser"]' + ' }' + '}', }, ) # ToDo add more assertions. E.g. test whether sort, limit, start really work @@ -489,8 +489,7 @@ def test_list_submissions_limit(self): # Limit specified in query parameters should not be able to exceed # server-wide limit response = self.client.get( - submission_list_url, - {'limit': limit + excess, 'format': 'json'} + submission_list_url, {'limit': limit + excess, 'format': 'json'} ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -593,7 +592,7 @@ def test_list_submissions_asset_publicly_shared_as_authenticated_user(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.asset.remove_perm(anonymous_user, PERM_VIEW_SUBMISSIONS) - def test_list_submissions_asset_publicly_shared_and_shared_with_user_as_anotheruser(self): + def test_list_subs_asset_publicly_shared_and_shared_with_user_as_anotheruser(self): """ Running through behaviour described in issue kpi/#2870 where an asset that has been publicly shared and then explicity shared with a user, the @@ -652,13 +651,13 @@ def test_list_query_elem_match(self): }, { f'{group}/{question}': 'whop.gif', - } + }, ] self.asset.deployment.mock_submissions([submission]) data = { - 'query': f'{{"{group}":{{"$elemMatch":{{"{group}/{question}":{{"$exists":true}}}}}}}}', + 'query': f'{{"{group}":{{"$elemMatch":{{"{group}/{question}":{{"$exists":true}}}}}}}}', # noqa: E501 'format': 'json', } response = self.client.get(self.submission_list_url, data) @@ -1034,17 +1033,20 @@ def test_attachments_rewrite(self): { 'group_ec9yq67/group_dq8as25/group_xt0za80': [ { - 'group_ec9yq67/group_dq8as25/group_xt0za80/my_attachment': 'IMG_4266-11_38_22.jpg' + 'group_ec9yq67/group_dq8as25/group_xt0za80/my_attachment': + 'IMG_4266-11_38_22.jpg' }, { - 'group_ec9yq67/group_dq8as25/group_xt0za80/my_attachment': 'كوبو-رائع-10_7_41.jpg' + 'group_ec9yq67/group_dq8as25/group_xt0za80/my_attachment': + 'كوبو-رائع-10_7_41.jpg' }, ] }, { 'group_ec9yq67/group_dq8as25/group_xt0za80': [ { - 'group_ec9yq67/group_dq8as25/group_xt0za80/my_attachment': 'Screenshot 2024-02-14 at 18.31.39-11_38_35.jpg' + 'group_ec9yq67/group_dq8as25/group_xt0za80/my_attachment': + 'Screenshot 2024-02-14 at 18.31.39-11_38_35.jpg' } ] }, @@ -1056,7 +1058,13 @@ def test_attachments_rewrite(self): 'download_medium_url': 'http://kc.testserver/1.jpg', 'download_small_url': 'http://kc.testserver/1.jpg', 'mimetype': 'image/jpeg', - 'filename': 'anotheruser/attachments/formhub-uuid/submission-uuid/IMG_4266-11_38_22.jpg', + 'filename': os.path.join( + 'anotheruser', + 'attachments', + 'formhub-uuid', + 'submission-uuid', + 'IMG_4266-11_38_22.jpg' + ), 'instance': 1, 'xform': 1, 'id': 1, @@ -1067,7 +1075,13 @@ def test_attachments_rewrite(self): 'download_medium_url': 'http://kc.testserver/2.jpg', 'download_small_url': 'http://kc.testserver/2.jpg', 'mimetype': 'image/jpeg', - 'filename': 'anotheruser/attachments/formhub-uuid/submission-uuid/كوبو-رايع-10_7_41.jpg', + 'filename': os.path.join( + 'anotheruser', + 'attachments', + 'formhub-uuid', + 'submission-uuid', + 'كوبو-رايع-10_7_41.jpg' + ), 'instance': 1, 'xform': 1, 'id': 2, @@ -1078,7 +1092,13 @@ def test_attachments_rewrite(self): 'download_medium_url': 'http://kc.testserver/3.jpg', 'download_small_url': 'http://kc.testserver/3.jpg', 'mimetype': 'image/jpeg', - 'filename': 'anotheruser/attachments/formhub-uuid/submission-uuid/Screenshot_2024-02-14_at_18.31.39-11_38_35.jpg', + 'filename': os.path.join( + 'anotheruser', + 'attachments', + 'formhub-uuid', + 'submission-uuid', + 'Screenshot_2024-02-14_at_18.31.39-11_38_35.jpg' + ), 'instance': 1, 'xform': 1, 'id': 3, @@ -1135,8 +1155,8 @@ class SubmissionEditApiTests(BaseSubmissionTestCase): """ Tests for editin submissions. - WARNING: Tests in this class must work in v1 as well, or else be added to the skipped tests - in kpi/tests/api/v1/test_api_submissions.py + WARNING: Tests in this class must work in v1 as well, or else be added to the + skipped tests in kpi/tests/api/v1/test_api_submissions.py """ def setUp(self): @@ -1535,9 +1555,7 @@ def test_edit_submission_with_xml_encoding_declaration(self): format_type=SUBMISSION_FORMAT_TYPE_XML, submission_ids=[submission['_id']], )[0] - assert submission_xml.startswith( - '' - ) + assert submission_xml.startswith('') # Get edit endpoint edit_url = reverse( @@ -1893,9 +1911,7 @@ def setUp(self): } } current_time = datetime.now(tz=ZoneInfo('UTC')).isoformat('T', 'milliseconds') - self._add_submissions( - other_fields={'start': current_time, 'end': current_time} - ) + self._add_submissions(other_fields={'start': current_time, 'end': current_time}) self.submission = self.submissions_submitted_by_someuser[0] self.submission_url = reverse( @@ -1924,9 +1940,7 @@ def _check_duplicate(self, response, submission: dict = None): assert submission['end'] != duplicate_submission['end'] -class SubmissionDuplicateWithXMLNamespaceApiTests( - SubmissionDuplicateBaseApiTests -): +class SubmissionDuplicateWithXMLNamespaceApiTests(SubmissionDuplicateBaseApiTests): def setUp(self): with mock.patch( @@ -1942,9 +1956,7 @@ def test_duplicate_submission_with_xml_namespace(self): format_type=SUBMISSION_FORMAT_TYPE_XML, submission_ids=[self.submission['_id']], )[0] - assert ( - 'xmlns="http://opendatakit.org/submissions"' in submission_xml - ) + assert 'xmlns="http://opendatakit.org/submissions"' in submission_xml response = self.client.post(self.submission_url, {'format': 'json'}) assert response.status_code == status.HTTP_201_CREATED self._check_duplicate(response) @@ -1971,9 +1983,7 @@ def test_duplicate_submission_with_xml_encoding(self): format_type=SUBMISSION_FORMAT_TYPE_XML, submission_ids=[self.submission['_id']], )[0] - assert submission_xml.startswith( - '' - ) + assert submission_xml.startswith('') self.test_duplicate_submission_as_owner_allowed() def test_duplicate_submission_without_xml_encoding(self): @@ -1982,13 +1992,9 @@ def test_duplicate_submission_without_xml_encoding(self): format_type=SUBMISSION_FORMAT_TYPE_XML, submission_ids=[self.submission['_id']], )[0] - assert submission_xml.startswith( - '' - ) + assert submission_xml.startswith('') Instance.objects.filter(pk=self.submission['_id']).update( - xml=submission_xml.replace( - '', '' - ) + xml=submission_xml.replace('', '') ) self.test_duplicate_submission_as_owner_allowed() @@ -2186,9 +2192,7 @@ def test_bulk_update_submissions_with_xml_encoding(self): format_type=SUBMISSION_FORMAT_TYPE_XML, submission_ids=[submission['_id']], )[0] - assert submission_xml.startswith( - '' - ) + assert submission_xml.startswith('') self.test_bulk_update_submissions_allowed_as_owner() @pytest.mark.skip( @@ -2546,14 +2550,9 @@ def setUp(self): ) # Ensure all submissions have no validation status - response = self.client.get( - self.submission_list_url, format='json' - ) + response = self.client.get(self.submission_list_url, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) - emptied = [ - not s['_validation_status'] - for s in response.data['results'] - ] + emptied = [not s['_validation_status'] for s in response.data['results']] self.assertTrue(all(emptied)) # Make the owner change validation status of all submissions @@ -2570,9 +2569,7 @@ def setUp(self): def test_all_validation_statuses_applied(self): # ensure all submissions are not approved - response = self.client.get( - self.submission_list_url, format='json' - ) + response = self.client.get(self.submission_list_url, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) applied = [ s['_validation_status']['uid'] == 'validation_status_not_approved' @@ -2867,7 +2864,7 @@ def test_cannot_edit_submission_validation_statuses_as_anonymous(self): format='json') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - def test_edit_all_submission_validation_statuses_with_partial_perms_as_anotheruser(self): + def test_edit_all_sub_validation_statuses_with_partial_perms_as_anotheruser(self): """ someuser is the owner of the project. The project is partially shared with anotheruser. @@ -2923,7 +2920,7 @@ def test_edit_all_submission_validation_statuses_with_partial_perms_as_anotherus data['payload']['validation_status.uid'] ) - def test_edit_some_submission_validation_statuses_with_partial_perms_as_anotheruser(self): + def test_edit_some_sub_validation_statuses_with_partial_perms_as_anotheruser(self): """ someuser is the owner of the project. The project is partially shared with anotheruser. @@ -2942,9 +2939,7 @@ def test_edit_some_submission_validation_statuses_with_partial_perms_as_anotheru data = { 'payload': { 'validation_status.uid': 'validation_status_approved', - 'submission_ids': [ - rs['_id'] for rs in submissions - ] + 'submission_ids': [rs['_id'] for rs in submissions], } } @@ -2956,12 +2951,10 @@ def test_edit_some_submission_validation_statuses_with_partial_perms_as_anotheru # Try 2nd submission submitted by anotheruser submissions = self.submissions_submitted_by_anotheruser - data['payload']['submission_ids'] = [ - rs['_id'] for rs in submissions - ] - response = self.client.patch(self.validation_statuses_url, - data=data, - format='json') + data['payload']['submission_ids'] = [rs['_id'] for rs in submissions] + response = self.client.patch( + self.validation_statuses_url, data=data, format='json' + ) self.assertEqual(response.status_code, status.HTTP_200_OK) count = self._deployment.calculated_submission_count( diff --git a/kpi/tests/kpi_test_case.py b/kpi/tests/kpi_test_case.py index 67153868ed..87fd920da9 100644 --- a/kpi/tests/kpi_test_case.py +++ b/kpi/tests/kpi_test_case.py @@ -95,9 +95,7 @@ def create_collection(self, name, owner=None, owner_password=None, self.login(owner.username, owner_password) kwargs.update({'name': name, 'asset_type': ASSET_TYPE_COLLECTION}) - response = self.client.post( - reverse(self._get_endpoint('asset-list')), kwargs - ) + response = self.client.post(reverse(self._get_endpoint('asset-list')), kwargs) self.assertEqual(response.status_code, status.HTTP_201_CREATED) if owner and owner_password: diff --git a/kpi/tests/test_asset_content.py b/kpi/tests/test_asset_content.py index 06c8486f1f..74cb7aa6ab 100644 --- a/kpi/tests/test_asset_content.py +++ b/kpi/tests/test_asset_content.py @@ -246,13 +246,20 @@ def _name_to_autoname(rows): 'Four_score_and_seven_th_on_this_continent_001', ] - assert _name_to_autoname([{'label': x} for x in [ - 'What is your favorite all-time place to go swimming?', - 'What is your favorite all-time place to go running?', - 'What is your favorite all-time place to go to relax?', - ]]) == ['What_is_your_favorit_place_to_go_swimming', - 'What_is_your_favorit_place_to_go_running', - 'What_is_your_favorit_place_to_go_to_relax'] + assert _name_to_autoname( + [ + {'label': x} + for x in [ + 'What is your favorite all-time place to go swimming?', + 'What is your favorite all-time place to go running?', + 'What is your favorite all-time place to go to relax?', + ] + ] + ) == [ + 'What_is_your_favorit_place_to_go_swimming', + 'What_is_your_favorit_place_to_go_running', + 'What_is_your_favorit_place_to_go_to_relax', + ] def test_remove_empty_expressions(): diff --git a/kpi/tests/test_asset_versions.py b/kpi/tests/test_asset_versions.py index a6e1816815..711151a51e 100644 --- a/kpi/tests/test_asset_versions.py +++ b/kpi/tests/test_asset_versions.py @@ -15,7 +15,6 @@ from kobo.apps.kobo_auth.shortcuts import User from kpi.exceptions import BadAssetTypeException from kpi.utils.hash import calculate_hash - from ..models import Asset, AssetVersion @@ -62,10 +61,8 @@ def test_asset_deployment(self): bob = User.objects.create(username='bob') self.asset = Asset.objects.create( asset_type='survey', - content={ - 'survey': [{'type': 'note', 'label': ['Read me'], 'name': 'n1'}] - }, - owner=bob + content={'survey': [{'type': 'note', 'label': ['Read me'], 'name': 'n1'}]}, + owner=bob, ) self.assertEqual(self.asset.asset_versions.count(), 1) self.assertEqual(self.asset.latest_version.deployed, False) diff --git a/kpi/tests/test_mock_data.py b/kpi/tests/test_mock_data.py index 3c21b5805b..fc405ca3ae 100644 --- a/kpi/tests/test_mock_data.py +++ b/kpi/tests/test_mock_data.py @@ -84,7 +84,7 @@ 'label': [ 'Date and time', 'Fecha y hora', - '\u0627\u0644\u062a\u0627\u0631\u064a\u062e \u0648 \u0627\u0644\u0648\u0642\u062a', + '\u0627\u0644\u062a\u0627\u0631\u064a\u062e \u0648 \u0627\u0644\u0648\u0642\u062a', # noqa: E501 ], }, { @@ -95,7 +95,7 @@ 'label': [ 'GPS', 'GPS', - '\u0646\u0638\u0627\u0645 \u062a\u062d\u062f\u064a\u062f \u0627\u0644\u0645\u0648\u0627\u0642\u0639', + '\u0646\u0638\u0627\u0645 \u062a\u062d\u062f\u064a\u062f \u0627\u0644\u0645\u0648\u0627\u0642\u0639', # noqa: E501 ], }, { @@ -106,7 +106,7 @@ 'label': [ 'Photo', 'Foto', - '\u0635\u0648\u0631\u0629 \u0641\u0648\u062a\u0648\u063a\u0631\u0627\u0641\u064a\u0629', + '\u0635\u0648\u0631\u0629 \u0641\u0648\u062a\u0648\u063a\u0631\u0627\u0641\u064a\u0629', # noqa: E501 ], }, { @@ -131,7 +131,7 @@ 'label': [ 'Note (Should not be displayed!)', 'Nota (no se represente!)', - '\u0645\u0644\u0627\u062d\u0638\u0629 (\u064a\u062c\u0628 \u0623\u0646 \u0644\u0627 \u064a\u062a\u0645 \u0639\u0631\u0636!)', + '\u0645\u0644\u0627\u062d\u0638\u0629 (\u064a\u062c\u0628 \u0623\u0646 \u0644\u0627 \u064a\u062a\u0645 \u0639\u0631\u0636!)', # noqa: E501 ], }, { @@ -183,7 +183,7 @@ 'label': [ 'Second option', 'Segunda opci\xf3n', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', # noqa: E501 ], }, { @@ -203,7 +203,7 @@ 'label': [ 'Second option', 'Segunda opci\xf3n', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', # noqa: E501 ], }, ], @@ -271,10 +271,7 @@ def setUp(self): for i in range(0, num_submissions): submissions.append( OrderedDict( - [ - (key, SUBMISSION_DATA[key][i]) - for key in SUBMISSION_DATA.keys() - ] + [(key, SUBMISSION_DATA[key][i]) for key in SUBMISSION_DATA.keys()] ) ) @@ -374,7 +371,7 @@ def test_kobo_apps_reports_report_data_split_by_translated(self): expected = set( [ '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', # noqa: E501 ] ) self.assertEqual(responses, expected) @@ -462,9 +459,7 @@ def test_formpack_results(self): def _get_autoreport_values(qname, key, lang=None, index=False): stats = OrderedDict( - _get_stats_object( - self.fp, self.vs, submissions=submissions, lang=lang - ) + _get_stats_object(self.fp, self.vs, submissions=submissions, lang=lang) ) if index is False: return stats[qname][key] @@ -483,7 +478,7 @@ def _get_autoreport_values(qname, key, lang=None, index=False): _get_autoreport_values('Select_one', 'frequency', 'Arabic', 0), [ '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u0623\u0648\u0644', - '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', + '\u0627\u0644\u062e\u064a\u0627\u0631 \u0627\u0644\u062b\u0627\u0646\u064a', # noqa: E501 ], ) diff --git a/kpi/tests/test_mock_data_conflicting_version_exports.py b/kpi/tests/test_mock_data_conflicting_version_exports.py index 19ae9951f1..be5e191135 100644 --- a/kpi/tests/test_mock_data_conflicting_version_exports.py +++ b/kpi/tests/test_mock_data_conflicting_version_exports.py @@ -29,9 +29,7 @@ def setUp(self): self.asset = Asset.objects.get(uid='axD3Wc8ZnfgLXBcURRt5fM') # To avoid cluttering the fixture, assign permissions here self.asset.assign_perm(self.user, PERM_VIEW_SUBMISSIONS) - self.submissions = self.asset.deployment.get_submissions( - self.asset.owner - ) + self.submissions = self.asset.deployment.get_submissions(self.asset.owner) self.submission_id_field = '_id' self.formpack, self.submission_stream = report_data.build_formpack( self.asset, diff --git a/kpi/tests/test_mock_data_exports.py b/kpi/tests/test_mock_data_exports.py index b1d9da1f58..09f12c03d7 100644 --- a/kpi/tests/test_mock_data_exports.py +++ b/kpi/tests/test_mock_data_exports.py @@ -1,4 +1,4 @@ -# coding: utf-8 +# flake8: noqa import os import zipfile from collections import defaultdict @@ -581,7 +581,14 @@ def test_csv_export_hierarchy_in_labels(self): self.run_csv_export_test(expected_lines, export_options) def test_csv_export_filter_fields(self): - export_options = {'fields': ['start', 'end', 'Do_you_descend_from_unicellular_organism', '_index']} + export_options = { + 'fields': [ + 'start', + 'end', + 'Do_you_descend_from_unicellular_organism', + '_index', + ] + } expected_lines = [ '"start";"end";"Do you descend from an ancestral unicellular organism?";"_uuid";"_index"', '"2017-10-23T05:40:39.000-04:00";"2017-10-23T05:41:13.000-04:00";"No";"48583952-1892-4931-8d9c-869e7b49bafb";"1"', @@ -594,65 +601,513 @@ def test_xls_export_english_labels(self): submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English'} - expected_data = {self.asset.name: [ - ['start', 'end', 'What kind of symmetry do you have?', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id','_uuid','_submission_time','_validation_status','_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], - ['', '', '#symmetry', '', '', '', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', submissions[0]['_id'], '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], - ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '0', '1', '0', '3', 'Yes', 'No', submissions[1]['_id'], '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] - ]} + expected_data = { + self.asset.name: [ + [ + 'start', + 'end', + 'What kind of symmetry do you have?', + 'What kind of symmetry do you have?/Spherical', + 'What kind of symmetry do you have?/Radial', + 'What kind of symmetry do you have?/Bilateral', + 'How many segments does your body have?', + 'Do you have body fluids that occupy intracellular space?', + 'Do you descend from an ancestral unicellular organism?', + '_id', + '_uuid', + '_submission_time', + '_validation_status', + '_notes', + '_status', + '_submitted_by', + '__version__', + '_tags', + '_index', + ], + [ + '', + '', + '#symmetry', + '', + '', + '', + '#segments', + '#fluids', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + ], + [ + '2017-10-23T05:40:39.000-04:00', + '2017-10-23T05:41:13.000-04:00', + 'Spherical Radial Bilateral', + '1', + '1', + '1', + '6', + 'Yes, and some extracellular space', + 'No', + submissions[0]['_id'], + '48583952-1892-4931-8d9c-869e7b49bafb', + '2017-10-23T09:41:19', + '', + '', + 'submitted_via_web', + '', + version_uid, + '', + 1.0, + ], + [ + '2017-10-23T05:41:14.000-04:00', + '2017-10-23T05:41:32.000-04:00', + 'Radial', + '0', + '1', + '0', + '3', + 'Yes', + 'No', + submissions[1]['_id'], + '317ba7b7-bea4-4a8c-8620-a483c3079c4b', + '2017-10-23T09:41:38', + '', + '', + 'submitted_via_web', + '', + version_uid, + '', + 2.0, + ], + [ + '2017-10-23T05:41:32.000-04:00', + '2017-10-23T05:42:05.000-04:00', + 'Bilateral', + '0', + '0', + '1', + '2', + 'No / Unsure', + 'Yes', + submissions[2]['_id'], + '3f15cdfe-3eab-4678-8352-7806febf158d', + '2017-10-23T09:42:11', + '', + '', + 'submitted_via_web', + 'anotheruser', + version_uid, + '', + 3.0, + ], + ] + } self.run_xls_export_test(expected_data, export_options) def test_xls_export_english_labels_partial_submissions(self): submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English'} - expected_data = {self.asset.name: [ - ['start', 'end', 'What kind of symmetry do you have?', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id','_uuid','_submission_time','_validation_status','_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], - ['', '', '#symmetry', '', '', '', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 1.0] - ]} - self.run_xls_export_test( - expected_data, export_options, user=self.anotheruser - ) + expected_data = { + self.asset.name: [ + [ + 'start', + 'end', + 'What kind of symmetry do you have?', + 'What kind of symmetry do you have?/Spherical', + 'What kind of symmetry do you have?/Radial', + 'What kind of symmetry do you have?/Bilateral', + 'How many segments does your body have?', + 'Do you have body fluids that occupy intracellular space?', + 'Do you descend from an ancestral unicellular organism?', + '_id', + '_uuid', + '_submission_time', + '_validation_status', + '_notes', + '_status', + '_submitted_by', + '__version__', + '_tags', + '_index', + ], + [ + '', + '', + '#symmetry', + '', + '', + '', + '#segments', + '#fluids', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + ], + [ + '2017-10-23T05:41:32.000-04:00', + '2017-10-23T05:42:05.000-04:00', + 'Bilateral', + '0', + '0', + '1', + '2', + 'No / Unsure', + 'Yes', + submissions[2]['_id'], + '3f15cdfe-3eab-4678-8352-7806febf158d', + '2017-10-23T09:42:11', + '', + '', + 'submitted_via_web', + 'anotheruser', + version_uid, + '', + 1.0, + ], + ] + } + self.run_xls_export_test(expected_data, export_options, user=self.anotheruser) def test_xls_export_multiple_select_both(self): submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English', 'multiple_select': 'both'} - expected_data = {self.asset.name: [ - ['start', 'end', 'What kind of symmetry do you have?', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id','_uuid','_submission_time','_validation_status','_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], - ['', '', '#symmetry', '', '', '', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', submissions[0]['_id'], '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], - ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '0', '1', '0', '3', 'Yes', 'No', submissions[1]['_id'], '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '0', '0', '1', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] - ]} + expected_data = { + self.asset.name: [ + [ + 'start', + 'end', + 'What kind of symmetry do you have?', + 'What kind of symmetry do you have?/Spherical', + 'What kind of symmetry do you have?/Radial', + 'What kind of symmetry do you have?/Bilateral', + 'How many segments does your body have?', + 'Do you have body fluids that occupy intracellular space?', + 'Do you descend from an ancestral unicellular organism?', + '_id', + '_uuid', + '_submission_time', + '_validation_status', + '_notes', + '_status', + '_submitted_by', + '__version__', + '_tags', + '_index', + ], + [ + '', + '', + '#symmetry', + '', + '', + '', + '#segments', + '#fluids', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + ], + [ + '2017-10-23T05:40:39.000-04:00', + '2017-10-23T05:41:13.000-04:00', + 'Spherical Radial Bilateral', + '1', + '1', + '1', + '6', + 'Yes, and some extracellular space', + 'No', + submissions[0]['_id'], + '48583952-1892-4931-8d9c-869e7b49bafb', + '2017-10-23T09:41:19', + '', + '', + 'submitted_via_web', + '', + version_uid, + '', + 1.0, + ], + [ + '2017-10-23T05:41:14.000-04:00', + '2017-10-23T05:41:32.000-04:00', + 'Radial', + '0', + '1', + '0', + '3', + 'Yes', + 'No', + submissions[1]['_id'], + '317ba7b7-bea4-4a8c-8620-a483c3079c4b', + '2017-10-23T09:41:38', + '', + '', + 'submitted_via_web', + '', + version_uid, + '', + 2.0, + ], + [ + '2017-10-23T05:41:32.000-04:00', + '2017-10-23T05:42:05.000-04:00', + 'Bilateral', + '0', + '0', + '1', + '2', + 'No / Unsure', + 'Yes', + submissions[2]['_id'], + '3f15cdfe-3eab-4678-8352-7806febf158d', + '2017-10-23T09:42:11', + '', + '', + 'submitted_via_web', + 'anotheruser', + version_uid, + '', + 3.0, + ], + ] + } self.run_xls_export_test(expected_data, export_options) def test_xls_export_multiple_select_summary(self): submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English', 'multiple_select': 'summary'} - expected_data = {self.asset.name: [ - ['start', 'end', 'What kind of symmetry do you have?', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id', '_uuid', '_submission_time', '_validation_status', '_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], - ['', '', '#symmetry', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', 'Spherical Radial Bilateral', '6', 'Yes, and some extracellular space', 'No', submissions[0]['_id'], '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], - ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', 'Radial', '3', 'Yes', 'No', submissions[1]['_id'], '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', 'Bilateral', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] - ]} + expected_data = { + self.asset.name: [ + [ + 'start', + 'end', + 'What kind of symmetry do you have?', + 'How many segments does your body have?', + 'Do you have body fluids that occupy intracellular space?', + 'Do you descend from an ancestral unicellular organism?', + '_id', + '_uuid', + '_submission_time', + '_validation_status', + '_notes', + '_status', + '_submitted_by', + '__version__', + '_tags', + '_index', + ], + [ + '', + '', + '#symmetry', + '#segments', + '#fluids', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + ], + [ + '2017-10-23T05:40:39.000-04:00', + '2017-10-23T05:41:13.000-04:00', + 'Spherical Radial Bilateral', + '6', + 'Yes, and some extracellular space', + 'No', + submissions[0]['_id'], + '48583952-1892-4931-8d9c-869e7b49bafb', + '2017-10-23T09:41:19', + '', + '', + 'submitted_via_web', + '', + version_uid, + '', + 1.0, + ], + [ + '2017-10-23T05:41:14.000-04:00', + '2017-10-23T05:41:32.000-04:00', + 'Radial', + '3', + 'Yes', + 'No', + submissions[1]['_id'], + '317ba7b7-bea4-4a8c-8620-a483c3079c4b', + '2017-10-23T09:41:38', + '', + '', + 'submitted_via_web', + '', + version_uid, + '', + 2.0, + ], + [ + '2017-10-23T05:41:32.000-04:00', + '2017-10-23T05:42:05.000-04:00', + 'Bilateral', + '2', + 'No / Unsure', + 'Yes', + submissions[2]['_id'], + '3f15cdfe-3eab-4678-8352-7806febf158d', + '2017-10-23T09:42:11', + '', + '', + 'submitted_via_web', + 'anotheruser', + version_uid, + '', + 3.0, + ], + ] + } self.run_xls_export_test(expected_data, export_options) def test_xls_export_multiple_select_details(self): submissions = self.forms[self.form_names[0]]['submissions'] version_uid = self.asset.latest_deployed_version_uid export_options = {'lang': 'English', 'multiple_select': 'details'} - expected_data = {self.asset.name: [ - ['start', 'end', 'What kind of symmetry do you have?/Spherical', 'What kind of symmetry do you have?/Radial', 'What kind of symmetry do you have?/Bilateral', 'How many segments does your body have?', 'Do you have body fluids that occupy intracellular space?', 'Do you descend from an ancestral unicellular organism?', '_id', '_uuid', '_submission_time', '_validation_status', '_notes', '_status', '_submitted_by', '__version__', '_tags', '_index'], - ['', '', '#symmetry', '', '', '#segments', '#fluids', '', '', '', '', '', '', '', '', '', '', ''], - ['2017-10-23T05:40:39.000-04:00', '2017-10-23T05:41:13.000-04:00', '1', '1', '1', '6', 'Yes, and some extracellular space', 'No', submissions[0]['_id'], '48583952-1892-4931-8d9c-869e7b49bafb', '2017-10-23T09:41:19', '', '', 'submitted_via_web', '', version_uid, '', 1.0], - ['2017-10-23T05:41:14.000-04:00', '2017-10-23T05:41:32.000-04:00', '0', '1', '0', '3', 'Yes', 'No', submissions[1]['_id'], '317ba7b7-bea4-4a8c-8620-a483c3079c4b', '2017-10-23T09:41:38', '', '', 'submitted_via_web', '', version_uid, '', 2.0], - ['2017-10-23T05:41:32.000-04:00', '2017-10-23T05:42:05.000-04:00', '0', '0', '1', '2', 'No / Unsure', 'Yes', submissions[2]['_id'], '3f15cdfe-3eab-4678-8352-7806febf158d', '2017-10-23T09:42:11', '', '', 'submitted_via_web', 'anotheruser', version_uid, '', 3.0] - ]} + expected_data = { + self.asset.name: [ + [ + 'start', + 'end', + 'What kind of symmetry do you have?/Spherical', + 'What kind of symmetry do you have?/Radial', + 'What kind of symmetry do you have?/Bilateral', + 'How many segments does your body have?', + 'Do you have body fluids that occupy intracellular space?', + 'Do you descend from an ancestral unicellular organism?', + '_id', + '_uuid', + '_submission_time', + '_validation_status', + '_notes', + '_status', + '_submitted_by', + '__version__', + '_tags', + '_index', + ], + [ + '', + '', + '#symmetry', + '', + '', + '#segments', + '#fluids', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + '', + ], + [ + '2017-10-23T05:40:39.000-04:00', + '2017-10-23T05:41:13.000-04:00', + '1', + '1', + '1', + '6', + 'Yes, and some extracellular space', + 'No', + submissions[0]['_id'], + '48583952-1892-4931-8d9c-869e7b49bafb', + '2017-10-23T09:41:19', + '', + '', + 'submitted_via_web', + '', + version_uid, + '', + 1.0, + ], + [ + '2017-10-23T05:41:14.000-04:00', + '2017-10-23T05:41:32.000-04:00', + '0', + '1', + '0', + '3', + 'Yes', + 'No', + submissions[1]['_id'], + '317ba7b7-bea4-4a8c-8620-a483c3079c4b', + '2017-10-23T09:41:38', + '', + '', + 'submitted_via_web', + '', + version_uid, + '', + 2.0, + ], + [ + '2017-10-23T05:41:32.000-04:00', + '2017-10-23T05:42:05.000-04:00', + '0', + '0', + '1', + '2', + 'No / Unsure', + 'Yes', + submissions[2]['_id'], + '3f15cdfe-3eab-4678-8352-7806febf158d', + '2017-10-23T09:42:11', + '', + '', + 'submitted_via_web', + 'anotheruser', + version_uid, + '', + 3.0, + ], + ] + } self.run_xls_export_test(expected_data, export_options) def test_xls_export_filter_fields(self): @@ -741,9 +1196,7 @@ def test_xls_export_filter_fields_with_media_url(self): export_options = {'fields': ['an_image'], 'include_media_url': True} asset = self.assets[asset_name] submissions = self.forms[asset_name]['submissions'] - submission = asset.deployment.get_submission( - submissions[0]['_id'], asset.owner - ) + submission = asset.deployment.get_submission(submissions[0]['_id'], asset.owner) media_url = submission['_attachments'][0]['download_url'] expected_data = { asset_name: [ @@ -1143,7 +1596,7 @@ def test_export_latest_version_only(self): '"Do you descend... new label";"_id";"_uuid";"_submission_time";"_validation_status";"_notes";"_status";"_submitted_by";"__version__";"_tags";"_index"', f'"no";"{submissions[0]["_id"]}";"48583952-1892-4931-8d9c-869e7b49bafb";"2017-10-23T09:41:19";"";"";"submitted_via_web";"";"{version_uid}";"";"1"', f'"no";"{submissions[1]["_id"]}";"317ba7b7-bea4-4a8c-8620-a483c3079c4b";"2017-10-23T09:41:38";"";"";"submitted_via_web";"";"{version_uid}";"";"2"', - f'"yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"' + f'"yes";"{submissions[2]["_id"]}";"3f15cdfe-3eab-4678-8352-7806febf158d";"2017-10-23T09:42:11";"";"";"submitted_via_web";"anotheruser";"{version_uid}";"";"3"', ] self.run_csv_export_test( expected_lines, {'fields_from_all_versions': 'false'}) @@ -1183,10 +1636,12 @@ def test_export_with_disabled_questions(self): asset = Asset.objects.create( name='Form with undocumented `disabled` column', owner=self.user, - content={'survey': [ - {'label': 'q', 'name': 'q', 'type': 'integer'}, - {'name': 'ignore', 'type': 'select_one nope', 'disabled': True}, - ]}, + content={ + 'survey': [ + {'label': 'q', 'name': 'q', 'type': 'integer'}, + {'name': 'ignore', 'type': 'select_one nope', 'disabled': True}, + ] + }, ) asset.deploy(backend='mock', active=True) submissions = [ diff --git a/kpi/tests/test_usage_calculator.py b/kpi/tests/test_usage_calculator.py index abf4733862..8cf9dad719 100644 --- a/kpi/tests/test_usage_calculator.py +++ b/kpi/tests/test_usage_calculator.py @@ -23,6 +23,7 @@ class BaseServiceUsageTestCase(BaseAssetTestCase): This class contains setup logic and utility functions to test usage calculations """ + fixtures = ['test_data'] URL_NAMESPACE = ROUTER_URL_NAMESPACE @@ -120,12 +121,12 @@ def add_submissions(self, count=2): '_uuid': str(uuid.uuid4()), '_attachments': [ { - 'download_url': 'http://testserver/anotheruser/audio_conversion_test_clip.3gp', + 'download_url': 'http://testserver/anotheruser/audio_conversion_test_clip.3gp', # noqa: E501 'filename': 'anotheruser/audio_conversion_test_clip.3gp', 'mimetype': 'video/3gpp', }, { - 'download_url': 'http://testserver/anotheruser/audio_conversion_test_image.jpg', + 'download_url': 'http://testserver/anotheruser/audio_conversion_test_image.jpg', # noqa: E501 'filename': 'anotheruser/audio_conversion_test_image.jpg', 'mimetype': 'image/jpeg', }, diff --git a/kpi/tests/test_utils.py b/kpi/tests/test_utils.py index bc295b1d93..46812daf72 100644 --- a/kpi/tests/test_utils.py +++ b/kpi/tests/test_utils.py @@ -34,30 +34,21 @@ class ConvertHierarchicalKeysToNestedDictTestCase(TestCase): def test_regular_group(self): dict_ = { 'group_lx4sf58/question_1': 'answer_1', - 'group_lx4sf58/question_2': 'answer_2' + 'group_lx4sf58/question_2': 'answer_2', } expected = { - 'group_lx4sf58': { - 'question_1': 'answer_1', - 'question_2': 'answer_2' - } + 'group_lx4sf58': {'question_1': 'answer_1', 'question_2': 'answer_2'} } assert convert_hierarchical_keys_to_nested_dict(dict_) == expected def test_nested_groups(self): - dict_ = { - 'parent_group/middle_group/inner_group/question_1': 'answer_1' - } + dict_ = {'parent_group/middle_group/inner_group/question_1': 'answer_1'} expected = { 'parent_group': { - 'middle_group': { - 'inner_group': { - 'question_1': 'answer_1' - } - } + 'middle_group': {'inner_group': {'question_1': 'answer_1'}} } } @@ -73,10 +64,12 @@ def test_nested_repeated_groups(self): 'group_lq3wx73/middle_group/middle_q': 'middle 1.1.1.1', 'group_lq3wx73/middle_group/inner_group': [ { - 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner 1.1.1.1' + 'group_lq3wx73/middle_group/inner_group/inner_q': + 'inner 1.1.1.1' }, { - 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner 1.1.1.2' + 'group_lq3wx73/middle_group/inner_group/inner_q': + 'inner 1.1.1.2' }, ], }, @@ -84,10 +77,12 @@ def test_nested_repeated_groups(self): 'group_lq3wx73/middle_group/middle_q': 'middle 1.1.2.1', 'group_lq3wx73/middle_group/inner_group': [ { - 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner 1.1.2.1' + 'group_lq3wx73/middle_group/inner_group/inner_q': + 'inner 1.1.2.1' }, { - 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner 1.1.2.1' + 'group_lq3wx73/middle_group/inner_group/inner_q': + 'inner 1.1.2.1' }, ], }, @@ -99,7 +94,8 @@ def test_nested_repeated_groups(self): 'group_lq3wx73/middle_group/middle_q': 'middle 1.2.1.1', 'group_lq3wx73/middle_group/inner_group': [ { - 'group_lq3wx73/middle_group/inner_group/inner_q': 'inner_q 1.2.1.1' + 'group_lq3wx73/middle_group/inner_group/inner_q': + 'inner_q 1.2.1.1' } ], } @@ -133,9 +129,7 @@ def test_nested_repeated_groups(self): 'middle_group': [ { 'middle_q': 'middle 1.2.1.1', - 'inner_group': [ - {'inner_q': 'inner_q 1.2.1.1'} - ], + 'inner_group': [{'inner_q': 'inner_q 1.2.1.1'}], } ] }, @@ -161,7 +155,7 @@ def test_nested_repeated_groups_in_group(self): 'people': { 'person': [ {'name': 'Julius Caesar', 'age': 55}, - {'name': 'Augustus', 'age': 75} + {'name': 'Augustus', 'age': 75}, ] } } @@ -185,15 +179,15 @@ def test_sluggify(self): def test_sluggify_label(self): inp_exps = [ - [['asdf jkl'], 'asdf_jkl'], - [['asdf', ['asdf']], 'asdf_001'], - [['2. asdf'], '_2_asdf'], - [['2. asdf', ['_2_asdf']], '_2_asdf_001'], - [['asdf#123'], 'asdf_123'], - [[' hello '], 'hello'], + [['asdf jkl'], 'asdf_jkl'], + [['asdf', ['asdf']], 'asdf_001'], + [['2. asdf'], '_2_asdf'], + [['2. asdf', ['_2_asdf']], '_2_asdf_001'], + [['asdf#123'], 'asdf_123'], + [[' hello '], 'hello'], # FIX THIS when we come up with a better way to summarize # arabic and cyrillic text - [['أين السوق؟', ['_', '__001']], '__002'] + [['أين السوق؟', ['_', '__001']], '__002'], ] for inps, expected in inp_exps: inp = inps[0] diff --git a/kpi/tests/utils/dicts.py b/kpi/tests/utils/dicts.py index d1e5790405..68bc5eac22 100644 --- a/kpi/tests/utils/dicts.py +++ b/kpi/tests/utils/dicts.py @@ -17,7 +17,8 @@ def convert_hierarchical_keys_to_nested_dict(dict_: dict) -> dict: keys = key.split('/') sub_dict = result - # Traverse each part of the key except the last one to build the nested structure + # Traverse each part of the key except the last one to build the nested + # structure. # # Example: # In keys = ['a', 'b', 'c'], the sub-keys 'a' and 'b' represent intermediate diff --git a/kpi/tests/utils/xml.py b/kpi/tests/utils/xml.py index 1df962c5e7..0127e6da20 100644 --- a/kpi/tests/utils/xml.py +++ b/kpi/tests/utils/xml.py @@ -5,9 +5,7 @@ from kpi.utils.xml import check_lxml_fromstring -def get_form_and_submission_tag_names( - form: str, submission: str -) -> tuple[str, str]: +def get_form_and_submission_tag_names(form: str, submission: str) -> tuple[str, str]: submission_root_name = check_lxml_fromstring(submission.encode()).tag tree = etree.ElementTree(check_lxml_fromstring(form)) root = tree.getroot() diff --git a/kpi/urls/__init__.py b/kpi/urls/__init__.py index 27d09f68c5..1bf7ab0f39 100644 --- a/kpi/urls/__init__.py +++ b/kpi/urls/__init__.py @@ -14,7 +14,6 @@ from kpi.views.current_user import CurrentUserViewSet from kpi.views.environment import EnvironmentView from kpi.views.token import TokenView - from ..views.v2.logout import logout_from_all_devices from .router_api_v1 import router_api_v1 from .router_api_v2 import URL_NAMESPACE, router_api_v2 @@ -40,7 +39,7 @@ re_path( r'^authorized_application/authenticate_user/$', authorized_application_authenticate_user, - name='authenticate_user' + name='authenticate_user', ), path('browser_tests/', browser_tests), path('modern_browsers/', modern_browsers), @@ -54,8 +53,11 @@ ConfigurationFile.content_view, name='configurationfile'), re_path(r'^private-media/', include(private_storage.urls)), # Statistics for superusers - re_path(r'^superuser_stats/', include(('kobo.apps.superuser_stats.urls', 'superuser_stats'))), - path('logout-all/', logout_from_all_devices, name='logout_all') + re_path( + r'^superuser_stats/', + include(('kobo.apps.superuser_stats.urls', 'superuser_stats')), + ), + path('logout-all/', logout_from_all_devices, name='logout_all'), ] diff --git a/kpi/utils/database.py b/kpi/utils/database.py index 6b1e624b53..3313a7d1ec 100644 --- a/kpi/utils/database.py +++ b/kpi/utils/database.py @@ -2,10 +2,7 @@ from functools import wraps from django.conf import settings -from django.db import ( - connections, - models, -) +from django.db import connections, models thread_local = threading.local() @@ -46,13 +43,16 @@ def update_autofield_sequence( # Error: # > setval: value -1 is out of bounds for sequence # Using abs() and testing if max(pk) equals -1, leaves the sequence alone. + + # Intermediate variable `single_quotes_args` is just here to lure linter about Q000 + single_quotes_args = "'{table}','{column}'" sql_template = ( - "SELECT setval(" - " pg_get_serial_sequence('{table}','{column}'), " - " abs(coalesce(max({column}), 1)), " - " max({column}) IS NOT null and max({column}) != -1" - ") " - "FROM {table};" + 'SELECT setval(' + f' pg_get_serial_sequence({single_quotes_args}), ' + ' abs(coalesce(max({column}), 1)), ' + ' max({column}) IS NOT null and max({column}) != -1' + ') ' + 'FROM {table};' ) autofield = None for f in model._meta.get_fields(): @@ -61,9 +61,7 @@ def update_autofield_sequence( break if not autofield: return - query = sql_template.format( - table=model._meta.db_table, column=autofield.column - ) + query = sql_template.format(table=model._meta.db_table, column=autofield.column) connection = connections[using] with connection.cursor() as cursor: cursor.execute(query) diff --git a/kpi/utils/monkey_patching.py b/kpi/utils/monkey_patching.py index 506cec40ba..8acd02a9c1 100644 --- a/kpi/utils/monkey_patching.py +++ b/kpi/utils/monkey_patching.py @@ -1,9 +1,7 @@ import django.contrib.auth.management import django.db.models.deletion from django.conf import settings -from django.contrib.auth.management import ( - DEFAULT_DB_ALIAS, -) +from django.contrib.auth.management import DEFAULT_DB_ALIAS from django.contrib.auth.management import ( create_permissions as django_create_permissions, ) @@ -39,7 +37,7 @@ def get_candidate_relations_to_delete(opts): candidates to delete, based on `on_delete` attribute, from the other database - which obviously raises an error because the table does not exist. - See https://github.com/django/django/blob/52116774549e27ac5d1ba9423e2fe61c5503a4a4/django/db/models/deletion.py#L86-L93 + See https://github.com/django/django/blob/52116774549e27ac5d1ba9423e2fe61c5503a4a4/django/db/models/deletion.py#L86-L93 # noqa: E501 """ db_connection = router.db_for_write(opts.model) @@ -47,9 +45,7 @@ def get_candidate_relations_to_delete(opts): return ( f for f in opts.get_fields(include_hidden=True) - if f.auto_created - and not f.concrete - and (f.one_to_one or f.one_to_many) + if f.auto_created and not f.concrete and (f.one_to_one or f.one_to_many) # new condition below from monkey-patching and ( f.remote_field.model._meta.app_label in SHARED_APP_LABELS @@ -59,8 +55,7 @@ def get_candidate_relations_to_delete(opts): and db_connection == settings.OPENROSA_DB_ALIAS ) or ( - f.remote_field.model._meta.app_label - not in OPENROSA_APP_LABELS + f.remote_field.model._meta.app_label not in OPENROSA_APP_LABELS and db_connection == DEFAULT_DB_ALIAS ) ) @@ -69,4 +64,6 @@ def get_candidate_relations_to_delete(opts): django.contrib.auth.management.create_permissions = create_permissions -django.db.models.deletion.get_candidate_relations_to_delete = get_candidate_relations_to_delete +django.db.models.deletion.get_candidate_relations_to_delete = ( + get_candidate_relations_to_delete +) diff --git a/kpi/utils/project_view_exports.py b/kpi/utils/project_view_exports.py index b14ef2a652..d8ab5e1643 100644 --- a/kpi/utils/project_view_exports.py +++ b/kpi/utils/project_view_exports.py @@ -117,11 +117,7 @@ def get_q(countries: list[str], export_type: str) -> QuerySet: def get_submission_count(xform_id: int) -> int: - result = ( - XForm.objects.values('num_of_submissions') - .filter(pk=xform_id) - .first() - ) + result = XForm.objects.values('num_of_submissions').filter(pk=xform_id).first() if not result: return 0 diff --git a/kpi/utils/usage_calculator.py b/kpi/utils/usage_calculator.py index cee15a9798..eaf4aacca0 100644 --- a/kpi/utils/usage_calculator.py +++ b/kpi/utils/usage_calculator.py @@ -6,10 +6,7 @@ from django.utils import timezone from kobo.apps.kobo_auth.shortcuts import User -from kobo.apps.openrosa.apps.logger.models import ( - DailyXFormSubmissionCounter, - XForm, -) +from kobo.apps.openrosa.apps.logger.models import DailyXFormSubmissionCounter, XForm from kobo.apps.organizations.models import Organization from kobo.apps.organizations.utils import ( get_monthly_billing_dates, @@ -27,34 +24,31 @@ def __init__(self, user: User, organization: Optional[Organization]): self._user_ids = [user.pk] self._user_id_query = self._filter_by_user([user.pk]) if organization and settings.STRIPE_ENABLED: - # if the user is in an organization and has an enterprise plan, get all org users - # we evaluate this queryset instead of using it as a subquery because it's referencing - # fields from the auth_user tables on kpi *and* kobocat, making getting results in a - # single query not feasible until those tables are combined + # If the user is in an organization and has an enterprise plan, get all org + # users we evaluate this queryset instead of using it as a subquery. It's + # referencing fields from the auth_user tables on kpi *and* kobocat, + # making getting results in a single query not feasible until those tables + # are combined. user_ids = list( User.objects.filter( organizations_organization__id=organization.id, - organizations_organization__djstripe_customers__subscriptions__status__in=ACTIVE_STRIPE_STATUSES, - organizations_organization__djstripe_customers__subscriptions__items__price__product__metadata__has_key='plan_type', - organizations_organization__djstripe_customers__subscriptions__items__price__product__metadata__plan_type='enterprise', - ).values_list('pk', flat=True)[ - : settings.ORGANIZATION_USER_LIMIT - ] + organizations_organization__djstripe_customers__subscriptions__status__in=ACTIVE_STRIPE_STATUSES, # noqa: E501 + organizations_organization__djstripe_customers__subscriptions__items__price__product__metadata__has_key='plan_type', # noqa: E501 + organizations_organization__djstripe_customers__subscriptions__items__price__product__metadata__plan_type='enterprise', # noqa: E501 + ).values_list('pk', flat=True)[: settings.ORGANIZATION_USER_LIMIT] ) if user_ids: self._user_ids = user_ids self._user_id_query = self._filter_by_user(user_ids) now = timezone.now() - self.current_month_start, self.current_month_end = ( - get_monthly_billing_dates(organization) - ) - self.current_year_start, self.current_year_end = ( - get_yearly_billing_dates(organization) + self.current_month_start, self.current_month_end = get_monthly_billing_dates( + organization ) - self.current_month_filter = Q( - date__range=[self.current_month_start, now] + self.current_year_start, self.current_year_end = get_yearly_billing_dates( + organization ) + self.current_month_filter = Q(date__range=[self.current_month_start, now]) self.current_year_filter = Q(date__range=[self.current_year_start, now]) def _filter_by_user(self, user_ids: list) -> Q: @@ -82,9 +76,7 @@ def get_nlp_usage_counters(self): 0, ), mt_characters_current_month=Coalesce( - Sum( - 'total_mt_characters', filter=self.current_month_filter - ), + Sum('total_mt_characters', filter=self.current_month_filter), 0, ), asr_seconds_all_time=Coalesce(Sum('total_asr_seconds'), 0), @@ -104,9 +96,11 @@ def get_storage_usage(self): Users are represented by their ids with `self._user_ids` """ - xforms = XForm.objects.only('attachment_storage_bytes', 'id').exclude( - pending_delete=True - ).filter(self._user_id_query) + xforms = ( + XForm.objects.only('attachment_storage_bytes', 'id') + .exclude(pending_delete=True) + .filter(self._user_id_query) + ) total_storage_bytes = xforms.aggregate( bytes_sum=Coalesce(Sum('attachment_storage_bytes'), 0), @@ -120,22 +114,22 @@ def get_submission_counters(self): Users are represented by their ids with `self._user_ids` """ - submission_count = DailyXFormSubmissionCounter.objects.only( - 'counter', 'user_id' - ).filter(self._user_id_query).aggregate( - all_time=Coalesce(Sum('counter'), 0), - current_year=Coalesce( - Sum('counter', filter=self.current_year_filter), 0 - ), - current_month=Coalesce( - Sum('counter', filter=self.current_month_filter), 0 - ), + submission_count = ( + DailyXFormSubmissionCounter.objects.only('counter', 'user_id') + .filter(self._user_id_query) + .aggregate( + all_time=Coalesce(Sum('counter'), 0), + current_year=Coalesce( + Sum('counter', filter=self.current_year_filter), 0 + ), + current_month=Coalesce( + Sum('counter', filter=self.current_month_filter), 0 + ), + ) ) total_submission_count = {} for submission_key, count in submission_count.items(): - total_submission_count[submission_key] = ( - count if count is not None else 0 - ) + total_submission_count[submission_key] = count if count is not None else 0 return total_submission_count diff --git a/kpi/views/environment.py b/kpi/views/environment.py index 31d2200eac..d4d5ef7f8d 100644 --- a/kpi/views/environment.py +++ b/kpi/views/environment.py @@ -165,9 +165,7 @@ def process_other_configs(request): ) ) - data['asr_mt_features_enabled'] = check_asr_mt_access_for_user( - request.user - ) + data['asr_mt_features_enabled'] = check_asr_mt_access_for_user(request.user) data['submission_placeholder'] = SUBMISSION_PLACEHOLDER if settings.STRIPE_ENABLED: diff --git a/kpi/views/v2/asset.py b/kpi/views/v2/asset.py index e9f0086148..0089eafd82 100644 --- a/kpi/views/v2/asset.py +++ b/kpi/views/v2/asset.py @@ -21,20 +21,11 @@ CLONE_COMPATIBLE_TYPES, CLONE_FROM_VERSION_ID_ARG_NAME, ) -from kpi.exceptions import ( - BadAssetTypeException, -) -from kpi.filters import ( - AssetOrderingFilter, - KpiObjectPermissionsFilter, - SearchFilter, -) +from kpi.exceptions import BadAssetTypeException +from kpi.filters import AssetOrderingFilter, KpiObjectPermissionsFilter, SearchFilter from kpi.highlighters import highlight_xform from kpi.mixins.object_permission import ObjectPermissionViewSetMixin -from kpi.models import ( - Asset, - UserAssetSubscription, -) +from kpi.models import Asset, UserAssetSubscription from kpi.paginators import AssetPagination from kpi.permissions import ( AssetPermission, @@ -42,12 +33,7 @@ ReportPermission, get_perm_name, ) -from kpi.renderers import ( - AssetJsonRenderer, - SSJsonRenderer, - XFormRenderer, - XlsRenderer, -) +from kpi.renderers import AssetJsonRenderer, SSJsonRenderer, XFormRenderer, XlsRenderer from kpi.serializers.v2.asset import ( AssetBulkActionsSerializer, AssetListSerializer, @@ -58,10 +44,7 @@ from kpi.utils.bugfix import repair_file_column_content_and_save from kpi.utils.hash import calculate_hash from kpi.utils.kobo_to_xlsform import to_xlsform_structure -from kpi.utils.object_permission import ( - get_database_user, - get_objects_for_user, -) +from kpi.utils.object_permission import get_database_user, get_objects_for_user from kpi.utils.ss_structure_to_mdtable import ss_structure_to_mdtable @@ -906,7 +889,9 @@ def _prepare_cloned_data(self, original_asset, source_version, partial_update): cloned_data.pop('asset_type', None) else: # Change asset_type if needed. - cloned_data['asset_type'] = self.request.data.get(ASSET_TYPE_ARG_NAME, original_asset.asset_type) + cloned_data['asset_type'] = self.request.data.get( + ASSET_TYPE_ARG_NAME, original_asset.asset_type + ) cloned_asset_type = cloned_data.get('asset_type') # Settings are: Country, Description, Sector and Share-metadata @@ -934,7 +919,9 @@ def _prepare_cloned_data(self, original_asset, source_version, partial_update): cloned_data['content'] = json.dumps(cloned_data.get('content')) return cloned_data else: - raise BadAssetTypeException('Destination type is not compatible with source type') + raise BadAssetTypeException( + 'Destination type is not compatible with source type' + ) def _validate_destination_type(self, original_asset_): """ diff --git a/kpi/views/v2/asset_snapshot.py b/kpi/views/v2/asset_snapshot.py index 2807eeaf77..5763ca8de4 100644 --- a/kpi/views/v2/asset_snapshot.py +++ b/kpi/views/v2/asset_snapshot.py @@ -9,9 +9,7 @@ from rest_framework.response import Response from rest_framework.reverse import reverse -from kobo.apps.openrosa.libs.utils.logger_tools import ( - http_open_rosa_error_handler, -) +from kobo.apps.openrosa.libs.utils.logger_tools import http_open_rosa_error_handler from kpi.authentication import DigestAuthentication, EnketoSessionAuthentication from kpi.constants import PERM_VIEW_ASSET from kpi.exceptions import SubmissionIntegrityError @@ -28,9 +26,7 @@ from kpi.serializers.v2.open_rosa import FormListSerializer, ManifestSerializer from kpi.tasks import enketo_flush_cached_preview from kpi.utils.object_permission import get_database_user -from kpi.utils.project_views import ( - user_has_project_view_asset_perm, -) +from kpi.utils.project_views import user_has_project_view_asset_perm from kpi.utils.xml import XMLFormWithDisclaimer from kpi.views.no_update_model import NoUpdateModelViewSet from kpi.views.v2.open_rosa import OpenRosaViewSetMixin diff --git a/kpi/views/v2/data.py b/kpi/views/v2/data.py index cb894e3b73..b1808cf36e 100644 --- a/kpi/views/v2/data.py +++ b/kpi/views/v2/data.py @@ -8,12 +8,7 @@ from django.http import Http404, HttpResponseRedirect from django.utils.translation import gettext_lazy as t from pymongo.errors import OperationFailure -from rest_framework import ( - renderers, - serializers, - status, - viewsets, -) +from rest_framework import renderers, serializers, status, viewsets from rest_framework.decorators import action from rest_framework.pagination import _positive_int as positive_int from rest_framework.request import Request @@ -22,9 +17,7 @@ from rest_framework_extensions.mixins import NestedViewSetMixin from kobo.apps.audit_log.models import AuditAction, AuditLog, AuditType -from kobo.apps.openrosa.libs.utils.logger_tools import ( - http_open_rosa_error_handler, -) +from kobo.apps.openrosa.libs.utils.logger_tools import http_open_rosa_error_handler from kpi.authentication import EnketoSessionAuthentication from kpi.constants import ( PERM_CHANGE_SUBMISSIONS, @@ -48,10 +41,7 @@ SubmissionValidationStatusPermission, ViewSubmissionPermission, ) -from kpi.renderers import ( - SubmissionGeoJsonRenderer, - SubmissionXMLRenderer, -) +from kpi.renderers import SubmissionGeoJsonRenderer, SubmissionXMLRenderer from kpi.serializers.v2.data import DataBulkActionsValidator from kpi.utils.log import logging from kpi.utils.viewset_mixins import AssetNestedObjectViewsetMixin @@ -362,9 +352,7 @@ def destroy(self, request, pk, *args, **kwargs): fields=['_id', '_uuid'] ) - if deployment.delete_submission( - submission_id, user=request.user - ): + if deployment.delete_submission(submission_id, user=request.user): AuditLog.objects.create( app_label='logger', model_name='instance', @@ -486,10 +474,7 @@ def list(self, request, *args, **kwargs): try: submissions = deployment.get_submissions( - request.user, - format_type=format_type, - request=request, - **filters + request.user, format_type=format_type, request=request, **filters ) except OperationFailure as err: message = str(err) @@ -600,7 +585,7 @@ def _bulk_delete(self, request: Request) -> dict: serializer_params = { 'data': request.data, 'context': self.get_serializer_context(), - 'perm': PERM_DELETE_SUBMISSIONS + 'perm': PERM_DELETE_SUBMISSIONS, } bulk_actions_validator = DataBulkActionsValidator(**serializer_params) bulk_actions_validator.is_valid(raise_exception=True) @@ -615,25 +600,27 @@ def _bulk_delete(self, request: Request) -> dict: user=request.user, submission_ids=data['submission_ids'], query=data['query'], - fields=['_id', '_uuid'] + fields=['_id', '_uuid'], ) # Prepare logs before deleting all submissions. audit_logs = [] for submission in submissions: - audit_logs.append(AuditLog( - app_label='logger', - model_name='instance', - object_id=submission['_id'], - user=request.user, - user_uid=request.user.extra_details.uid, - metadata={ - 'asset_uid': self.asset.uid, - 'uuid': submission['_uuid'], - }, - action=AuditAction.DELETE, - log_type=AuditType.SUBMISSION_MANAGEMENT, - )) + audit_logs.append( + AuditLog( + app_label='logger', + model_name='instance', + object_id=submission['_id'], + user=request.user, + user_uid=request.user.extra_details.uid, + metadata={ + 'asset_uid': self.asset.uid, + 'uuid': submission['_uuid'], + }, + action=AuditAction.DELETE, + log_type=AuditType.SUBMISSION_MANAGEMENT, + ) + ) try: deleted = deployment.delete_submissions( From ea5113531673ac0594351f9066e5015c712144b9 Mon Sep 17 00:00:00 2001 From: Olivier Leger Date: Mon, 7 Oct 2024 14:13:38 -0400 Subject: [PATCH 119/119] Make no QA for F401 consistent --- kobo/apps/superuser_stats/migrations/0001_initial.py | 2 +- kpi/tests/test_mock_data_exports.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/kobo/apps/superuser_stats/migrations/0001_initial.py b/kobo/apps/superuser_stats/migrations/0001_initial.py index 506c69bf3f..a9f2c0d9db 100644 --- a/kobo/apps/superuser_stats/migrations/0001_initial.py +++ b/kobo/apps/superuser_stats/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# flake8: noqa +# flake8: noqa: F401 # Generated by Django 4.2.11 on 2024-07-03 19:37 from django.db import migrations diff --git a/kpi/tests/test_mock_data_exports.py b/kpi/tests/test_mock_data_exports.py index 09f12c03d7..2e62a7083c 100644 --- a/kpi/tests/test_mock_data_exports.py +++ b/kpi/tests/test_mock_data_exports.py @@ -1,4 +1,4 @@ -# flake8: noqa +# flake8: noqa: F401 import os import zipfile from collections import defaultdict