From 8fdb62abaa5b72d28cebf86231268538020c05f3 Mon Sep 17 00:00:00 2001 From: hadleyking Date: Thu, 14 Mar 2024 11:51:48 -0400 Subject: [PATCH] Remove API app Changes to be committed: deleted: api/__init__.py deleted: api/admin.py deleted: api/apps.py deleted: api/fixtures/bootstrap.json deleted: api/fixtures/metafixtures deleted: api/fixtures/metafixtures.json deleted: api/keys.sh deleted: api/migrations/0001_initial.py deleted: api/migrations/0002_auto_20220124_2356.py deleted: api/migrations/0003_rename_meta_table_prefix_table.py deleted: api/migrations/0004_rename_group_info_groupinfo.py deleted: api/migrations/0005_rename_prefixes_prefix.py deleted: api/migrations/0006_delete_new_users.py deleted: api/migrations/__init__.py deleted: api/model/__init__.py deleted: api/model/groups.py deleted: api/model/prefix.py deleted: api/models.py deleted: api/permissions.py deleted: api/rdb.sh deleted: api/request_definitions/GET.schema deleted: api/request_definitions/POST.schema deleted: api/request_definitions/templates/DELETE_delete_object_by_id.schema deleted: api/request_definitions/templates/GET_activate_account.schema deleted: api/request_definitions/templates/GET_get_object_by_id.schema deleted: api/request_definitions/templates/GET_retrieve_available_schema.schema deleted: api/request_definitions/templates/POST_convert_existing_object_between_schemas.schema deleted: api/request_definitions/templates/POST_convert_payload_to_schema.schema deleted: api/request_definitions/templates/POST_new_account.schema deleted: api/request_definitions/templates/POST_object_listing_by_token.schema deleted: api/request_definitions/templates/POST_objects_draft.schema deleted: api/request_definitions/templates/POST_objects_publish.schema deleted: api/request_definitions/templates/POST_read_object.schema deleted: api/request_definitions/templates/POST_validate_payload_against_schema.schema deleted: api/scripts/__init__.py deleted: api/scripts/method_specific/GET_draft_object_by_id.py deleted: api/scripts/method_specific/GET_published_object_by_id.py deleted: api/scripts/method_specific/GET_published_object_by_id_with_version.py deleted: api/scripts/method_specific/GET_retrieve_available_schema.py deleted: api/scripts/method_specific/POST_api_objects_drafts_create.py deleted: api/scripts/method_specific/POST_api_objects_drafts_delete.py deleted: api/scripts/method_specific/POST_api_objects_drafts_modify.py deleted: api/scripts/method_specific/POST_api_objects_drafts_permissions.py deleted: api/scripts/method_specific/POST_api_objects_drafts_permissions_set.py deleted: api/scripts/method_specific/POST_api_objects_drafts_publish.py deleted: api/scripts/method_specific/POST_api_objects_drafts_read.py deleted: api/scripts/method_specific/POST_api_objects_drafts_token.py deleted: api/scripts/method_specific/POST_api_objects_publish.py deleted: api/scripts/method_specific/POST_api_objects_published.py deleted: api/scripts/method_specific/POST_api_objects_search.py deleted: api/scripts/method_specific/POST_api_objects_token.py deleted: api/scripts/method_specific/POST_validate_payload_against_schema.py deleted: api/scripts/method_specific/__init__.py deleted: api/scripts/utilities/DbUtils.py deleted: api/scripts/utilities/FileUtils.py deleted: api/scripts/utilities/JsonUtils.py deleted: api/scripts/utilities/RequestUtils.py deleted: api/scripts/utilities/ResponseUtils.py deleted: api/scripts/utilities/SettingsUtils.py deleted: api/scripts/utilities/UserUtils.py deleted: api/scripts/utilities/__init__.py deleted: api/serializers.py deleted: api/signals.py deleted: api/templates/api/account_activation_message.html deleted: api/urls.py deleted: api/validation_definitions/IEEE/2791object.json deleted: api/validation_definitions/IEEE/description_domain.json deleted: api/validation_definitions/IEEE/error_domain.json deleted: api/validation_definitions/IEEE/execution_domain.json deleted: api/validation_definitions/IEEE/io_domain.json deleted: api/validation_definitions/IEEE/parametric_domain.json deleted: api/validation_definitions/IEEE/provenance_domain.json deleted: api/validation_definitions/IEEE/usability_domain.json deleted: api/validation_definitions/IEEE_sub/IEEE2791-2020.schema deleted: api/validation_definitions/IEEE_sub/domains/description_domain.json deleted: api/validation_definitions/IEEE_sub/domains/error_domain.json deleted: api/validation_definitions/IEEE_sub/domains/execution_domain.json deleted: api/validation_definitions/IEEE_sub/domains/io_domain.json deleted: api/validation_definitions/IEEE_sub/domains/parametric_domain.json deleted: api/validation_definitions/IEEE_sub/domains/provenance_domain.json deleted: api/validation_definitions/IEEE_sub/domains/usability_domain.json deleted: api/validation_definitions/uri_external deleted: api/views.py new file: config/settings.py modified: config/urls.py Changes not staged for commit: modified: authentication/apis.py modified: authentication/migrations/0001_initial.py deleted: authentication/migrations/0002_newuser.py modified: authentication/services.py modified: docs/refactor.md modified: search/selectors.py modified: tests/fixtures/test_data.json deleted: tests/test_database.py deleted: tests/test_fixtures.py deleted: tests/test_models modified: tests/test_views/test_api_account_activate.py deleted: tests/test_views/test_api_accounts_describe.py modified: tests/test_views/test_api_auth_add.py modified: tests/test_views/test_api_auth_reset_token.py deleted: tests/test_views/test_api_groups_group_info.py deleted: tests/test_views/test_api_groups_modify.py deleted: tests/test_views/test_api_objects.py deleted: tests/test_views/test_api_objects_drafts_create.py deleted: tests/test_views/test_api_objects_drafts_modify.py deleted: tests/test_views/test_api_objects_drafts_publish.py deleted: tests/test_views/test_api_objects_search.py deleted: tests/test_views/test_api_objects_validate.py deleted: tests/test_views/test_api_prefixes_create.py deleted: tests/test_views/test_api_prefixes_token.py deleted: tests/test_views/test_get_object_id_draft.py deleted: tests/test_views/test_get_objectid.py deleted: tests/test_views/test_published_object_by_id.py modified: token.json --- api/__init__.py | 0 api/admin.py | 17 - api/apps.py | 26 - api/fixtures/bootstrap.json | 2058 ----------------- api/fixtures/metafixtures | 58 - api/fixtures/metafixtures.json | 10 - api/keys.sh | 16 - api/migrations/0001_initial.py | 159 -- api/migrations/0002_auto_20220124_2356.py | 60 - .../0003_rename_meta_table_prefix_table.py | 17 - .../0004_rename_group_info_groupinfo.py | 20 - api/migrations/0005_rename_prefixes_prefix.py | 20 - api/migrations/0006_delete_new_users.py | 16 - api/migrations/__init__.py | 0 api/model/__init__.py | 0 api/model/groups.py | 466 ---- api/model/prefix.py | 745 ------ api/models.py | 109 - api/permissions.py | 273 --- api/rdb.sh | 41 - api/request_definitions/GET.schema | 20 - api/request_definitions/POST.schema | 40 - .../DELETE_delete_object_by_id.schema | 55 - .../templates/GET_activate_account.schema | 25 - .../templates/GET_get_object_by_id.schema | 55 - .../GET_retrieve_available_schema.schema | 13 - ...ert_existing_object_between_schemas.schema | 55 - .../POST_convert_payload_to_schema.schema | 44 - .../templates/POST_new_account.schema | 20 - .../POST_object_listing_by_token.schema | 19 - .../templates/POST_objects_draft.schema | 65 - .../templates/POST_objects_publish.schema | 65 - .../templates/POST_read_object.schema | 33 - ...OST_validate_payload_against_schema.schema | 33 - api/scripts/__init__.py | 0 .../method_specific/GET_draft_object_by_id.py | 73 - .../GET_published_object_by_id.py | 111 - ...GET_published_object_by_id_with_version.py | 114 - .../GET_retrieve_available_schema.py | 49 - .../POST_api_objects_drafts_create.py | 166 -- .../POST_api_objects_drafts_delete.py | 117 - .../POST_api_objects_drafts_modify.py | 171 -- .../POST_api_objects_drafts_permissions.py | 158 -- ...POST_api_objects_drafts_permissions_set.py | 242 -- .../POST_api_objects_drafts_publish.py | 217 -- .../POST_api_objects_drafts_read.py | 121 - .../POST_api_objects_drafts_token.py | 238 -- .../POST_api_objects_publish.py | 183 -- .../POST_api_objects_published.py | 115 - .../POST_api_objects_search.py | 120 - .../method_specific/POST_api_objects_token.py | 25 - .../POST_validate_payload_against_schema.py | 59 - api/scripts/method_specific/__init__.py | 0 api/scripts/utilities/DbUtils.py | 986 -------- api/scripts/utilities/FileUtils.py | 167 -- api/scripts/utilities/JsonUtils.py | 308 --- api/scripts/utilities/RequestUtils.py | 30 - api/scripts/utilities/ResponseUtils.py | 53 - api/scripts/utilities/SettingsUtils.py | 146 -- api/scripts/utilities/UserUtils.py | 268 --- api/scripts/utilities/__init__.py | 0 api/serializers.py | 27 - api/signals.py | 116 - .../api/account_activation_message.html | 41 - api/urls.py | 154 -- .../IEEE/2791object.json | 178 -- .../IEEE/description_domain.json | 165 -- .../IEEE/error_domain.json | 24 - .../IEEE/execution_domain.json | 111 - .../IEEE/io_domain.json | 58 - .../IEEE/parametric_domain.json | 42 - .../IEEE/provenance_domain.json | 126 - .../IEEE/usability_domain.json | 16 - .../IEEE_sub/IEEE2791-2020.schema | 178 -- .../IEEE_sub/domains/description_domain.json | 165 -- .../IEEE_sub/domains/error_domain.json | 24 - .../IEEE_sub/domains/execution_domain.json | 111 - .../IEEE_sub/domains/io_domain.json | 58 - .../IEEE_sub/domains/parametric_domain.json | 42 - .../IEEE_sub/domains/provenance_domain.json | 126 - .../IEEE_sub/domains/usability_domain.json | 16 - api/validation_definitions/uri_external | 0 api/views.py | 1640 ------------- config/settings.py | 261 +++ config/urls.py | 1 - 85 files changed, 261 insertions(+), 12309 deletions(-) delete mode 100755 api/__init__.py delete mode 100755 api/admin.py delete mode 100755 api/apps.py delete mode 100644 api/fixtures/bootstrap.json delete mode 100644 api/fixtures/metafixtures delete mode 100644 api/fixtures/metafixtures.json delete mode 100755 api/keys.sh delete mode 100644 api/migrations/0001_initial.py delete mode 100644 api/migrations/0002_auto_20220124_2356.py delete mode 100644 api/migrations/0003_rename_meta_table_prefix_table.py delete mode 100644 api/migrations/0004_rename_group_info_groupinfo.py delete mode 100644 api/migrations/0005_rename_prefixes_prefix.py delete mode 100644 api/migrations/0006_delete_new_users.py delete mode 100644 api/migrations/__init__.py delete mode 100644 api/model/__init__.py delete mode 100644 api/model/groups.py delete mode 100644 api/model/prefix.py delete mode 100755 api/models.py delete mode 100644 api/permissions.py delete mode 100755 api/rdb.sh delete mode 100755 api/request_definitions/GET.schema delete mode 100755 api/request_definitions/POST.schema delete mode 100755 api/request_definitions/templates/DELETE_delete_object_by_id.schema delete mode 100755 api/request_definitions/templates/GET_activate_account.schema delete mode 100755 api/request_definitions/templates/GET_get_object_by_id.schema delete mode 100755 api/request_definitions/templates/GET_retrieve_available_schema.schema delete mode 100755 api/request_definitions/templates/POST_convert_existing_object_between_schemas.schema delete mode 100755 api/request_definitions/templates/POST_convert_payload_to_schema.schema delete mode 100755 api/request_definitions/templates/POST_new_account.schema delete mode 100755 api/request_definitions/templates/POST_object_listing_by_token.schema delete mode 100755 api/request_definitions/templates/POST_objects_draft.schema delete mode 100755 api/request_definitions/templates/POST_objects_publish.schema delete mode 100755 api/request_definitions/templates/POST_read_object.schema delete mode 100755 api/request_definitions/templates/POST_validate_payload_against_schema.schema delete mode 100755 api/scripts/__init__.py delete mode 100755 api/scripts/method_specific/GET_draft_object_by_id.py delete mode 100755 api/scripts/method_specific/GET_published_object_by_id.py delete mode 100755 api/scripts/method_specific/GET_published_object_by_id_with_version.py delete mode 100755 api/scripts/method_specific/GET_retrieve_available_schema.py delete mode 100755 api/scripts/method_specific/POST_api_objects_drafts_create.py delete mode 100755 api/scripts/method_specific/POST_api_objects_drafts_delete.py delete mode 100755 api/scripts/method_specific/POST_api_objects_drafts_modify.py delete mode 100755 api/scripts/method_specific/POST_api_objects_drafts_permissions.py delete mode 100755 api/scripts/method_specific/POST_api_objects_drafts_permissions_set.py delete mode 100755 api/scripts/method_specific/POST_api_objects_drafts_publish.py delete mode 100755 api/scripts/method_specific/POST_api_objects_drafts_read.py delete mode 100755 api/scripts/method_specific/POST_api_objects_drafts_token.py delete mode 100755 api/scripts/method_specific/POST_api_objects_publish.py delete mode 100644 api/scripts/method_specific/POST_api_objects_published.py delete mode 100755 api/scripts/method_specific/POST_api_objects_search.py delete mode 100755 api/scripts/method_specific/POST_api_objects_token.py delete mode 100755 api/scripts/method_specific/POST_validate_payload_against_schema.py delete mode 100755 api/scripts/method_specific/__init__.py delete mode 100755 api/scripts/utilities/DbUtils.py delete mode 100755 api/scripts/utilities/FileUtils.py delete mode 100755 api/scripts/utilities/JsonUtils.py delete mode 100755 api/scripts/utilities/RequestUtils.py delete mode 100755 api/scripts/utilities/ResponseUtils.py delete mode 100755 api/scripts/utilities/SettingsUtils.py delete mode 100755 api/scripts/utilities/UserUtils.py delete mode 100755 api/scripts/utilities/__init__.py delete mode 100755 api/serializers.py delete mode 100644 api/signals.py delete mode 100644 api/templates/api/account_activation_message.html delete mode 100755 api/urls.py delete mode 100755 api/validation_definitions/IEEE/2791object.json delete mode 100755 api/validation_definitions/IEEE/description_domain.json delete mode 100755 api/validation_definitions/IEEE/error_domain.json delete mode 100755 api/validation_definitions/IEEE/execution_domain.json delete mode 100755 api/validation_definitions/IEEE/io_domain.json delete mode 100755 api/validation_definitions/IEEE/parametric_domain.json delete mode 100755 api/validation_definitions/IEEE/provenance_domain.json delete mode 100755 api/validation_definitions/IEEE/usability_domain.json delete mode 100755 api/validation_definitions/IEEE_sub/IEEE2791-2020.schema delete mode 100755 api/validation_definitions/IEEE_sub/domains/description_domain.json delete mode 100755 api/validation_definitions/IEEE_sub/domains/error_domain.json delete mode 100755 api/validation_definitions/IEEE_sub/domains/execution_domain.json delete mode 100755 api/validation_definitions/IEEE_sub/domains/io_domain.json delete mode 100755 api/validation_definitions/IEEE_sub/domains/parametric_domain.json delete mode 100755 api/validation_definitions/IEEE_sub/domains/provenance_domain.json delete mode 100755 api/validation_definitions/IEEE_sub/domains/usability_domain.json delete mode 100755 api/validation_definitions/uri_external delete mode 100755 api/views.py create mode 100644 config/settings.py diff --git a/api/__init__.py b/api/__init__.py deleted file mode 100755 index e69de29b..00000000 diff --git a/api/admin.py b/api/admin.py deleted file mode 100755 index 44e4d46d..00000000 --- a/api/admin.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -"""Django Admin - -Registers models for the Django Admin app -""" - -from django.contrib import admin -from api.models import BCO -from api.model.prefix import Prefix, prefix_table -from api.model.groups import GroupInfo - -class BcoModelAdmin(admin.ModelAdmin): - search_fields = ["contents", "object_id"] -admin.site.register(BCO, BcoModelAdmin) -admin.site.register(prefix_table) -admin.site.register(GroupInfo) -admin.site.register(Prefix) diff --git a/api/apps.py b/api/apps.py deleted file mode 100755 index f06dd842..00000000 --- a/api/apps.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python3 -"""Run code after start-up -TODO: move things from settings.py into here. -Source: https://stackoverflow.com/a/42744626/5029459 -Source: https://docs.djangoproject.com/en/3.2/ref/applications/#django.apps.AppConfig.ready -""" - -import sys -from django.apps import AppConfig -from django.db.models.signals import post_migrate -from api.signals import populate_models - - -class ApiConfig(AppConfig): - """API Configuration""" - - default_auto_field = "django.db.models.AutoField" - name = "api" - - def ready(self): - """Create the anonymous user if they don't exist.""" - - if 'test' in sys.argv or 'loaddata' in sys.argv or 'flush' in sys.argv: - return - else: - post_migrate.connect(populate_models, sender=self) \ No newline at end of file diff --git a/api/fixtures/bootstrap.json b/api/fixtures/bootstrap.json deleted file mode 100644 index 8a3d0590..00000000 --- a/api/fixtures/bootstrap.json +++ /dev/null @@ -1,2058 +0,0 @@ -[ - { - "object_id": "https://w3id.org/biocompute/1.3.0/examples/UVP_BCO.json", - "etag": "39fb1c62f43ff72ac95f91a433d5e425fb08bc07ec0f719ecfd27fb3cd3a3635", - "spec_version": "https://w3id.org/biocompute/1.3.0/", - "provenance_domain": { - "name": "Lineage assignment for an isolate of M. tuberculosis based on its single nucleotide polymorphism (SNP) profile based on UVC v1.0.", - "version": "v1.0", - "review": [ - { - "status": "approved", - "reviewer_comment": "Approved by GW staff.", - "date": "2017-11-12T12:30:48-0400", - "reviewer": { - "name": "Anjan Purkayastha", - "affiliation": "George Washington University", - "email": "anjan.purkayastha@gmail.com", - "contribution": [ - "curatedBy" - ] - } - }, - { - "status": "approved", - "reviewer_comment": "Approved by Critical Path Institute staff.", - "date": "2017-11-12T12:30:48-0400", - "reviewer": { - "name": "Marco Schito", - "affiliation": "Critical Path Institute", - "email": "mschito@c-path.org", - "contribution": [ - "curatedBy" - ] - } - }, - { - "status": "approved", - "date": "2017-11-12T12:30:48-0400", - "reviewer_comment": "Approved by Critical Path Institute staff.", - "reviewer": { - "name": "Kenneth Ramey", - "affiliation": "Critical Path Institute", - "email": "kramey@c-path.org", - "contribution": [ - "curatedBy" - ] - } - } - ], - "obsolete_after": "2118-09-26T14:43:43-0400", - "embargo": { - "start_time": "2000-09-26T14:43:43-0400", - "end_time": "2018-10-08T18:02:33-0400" - }, - "created": "2017-11-12T12:30:48-0400", - "modified": "2018-10-08T18:35:33-0400", - "contributors": [ - { - "name": "Matthew Ezewudo", - "affiliation": "Critical Path Institute", - "email": "mezewudo@c-path.org", - "contribution": [ - "authoredBy" - ] - }, - { - "name": "Jamie Posie", - "affiliation": "CDC Atlanta, GA", - "contribution": [ - "authoredBy" - ] - }, - { - "name": "Anjan Purkayastha", - "affiliation": "George Washington University", - "email": "anjan.purkayastha@gmail.com", - "contribution": [ - "authoredBy", - "curatedBy" - ] - }, - { - "name": "Marco Schito", - "affiliation": "Critical Path Institute", - "email": "mschito@c-path.org", - "contribution": [ - "authoredBy" - ] - }, - { - "name": "Charles Hadley King", - "affiliation": "George Washington University", - "email": "hadley_king@gwu.edu", - "contribution": [ - "authoredBy", - "curatedBy" - ], - "orcid": "https://orcid.org/0000-0003-1409-4549" - }, - { - "name": "ReseqTB Consortium", - "affiliation": "Critical Path Institute", - "email": "info@c-path.org", - "contribution": [ - "createdAt" - ] - } - ], - "license": "https://spdx.org/licenses/CC-BY-4.0.html" - }, - "usability_domain": [ - "Lineage assignment for an isolate of M. tuberculosis[taxonomy:1773] based on its single nucleotide polymorphism [so:0000694] (SNP) profile." - ], - "extension_domain": [ - { - "extension_schema": "https://raw.githubusercontent.com/biocompute-objects/extension_domain/1.1.0/scm/scm_extension.json", - "scm_extension": { - "scm_repository": "https://github.com/CPTR-ReSeqTB/UVP", - "scm_type": "git", - "scm_commit": "9e8f588b3cd3f5eebde29f7d2879e1a1e1c1aed3", - "scm_path": "UVP/scripts/UVP.py" - } - } - ], - "description_domain": { - "keywords": [ - "Mycobacterium tuberculosis", - "Phylogenetics", - "Bacterial lineage analysis", - "Single Nucleotide Polymorphism", - "SNP" - ], - "xref": [ - { - "namespace": "pubmed", - "name": "PubMed", - "ids": [ - "00000" - ], - "access_time": "2018-13-02T10:15-05:00" - }, - { - "namespace": "so", - "name": "Sequence Ontology", - "ids": [ - "0000694" - ], - "access_time": "2018-13-02T10:15-05:00" - }, - { - "namespace": "taxonomy", - "name": "Taxonomy", - "ids": [ - "1773" - ], - "access_time": "2018-13-02T10:15-05:00" - } - ], - "platform": [ - "Linux" - ], - "pipeline_steps": [ - { - "step_number": 1, - "name": "FastQValidator", - "description": "To verify if input file is in fastq format", - "version": "1.0.5", - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_1.fastq.gz" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_2.fastq.gz" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/validation/Validation_report.txt" - } - ] - }, - { - "step_number": 2, - "name": "FastQC", - "description": "assess Quality of raw sequence reads", - "version": "0.11.5", - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_1.fastq.gz" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_2.fastq.gz" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/fastqc/ERR552106_1_fastqc.html" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/fastqc/ERR552106_1_fastqc.zip" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/fastqc/ERR552106_2_fastqc.html" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/fastqc/ERR552106_2_fastqc.zip" - } - ] - }, - { - "step_number": 3, - "name": "Kraken", - "description": "Assesses species specificity of sequence reads", - "version": "0.10.5", - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_1.fastq.gz" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_2.fastq.gz" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/kraken/final_report.txt" - } - ] - }, - { - "step_number": 4, - "name": "BWA", - "description": "Aligns sequence reads to reference genome", - "version": "0.7.12", - "prerequisite": [ - { - "name": "M. tuberculosis H37Rv genome reference file", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/NC_000962.fa" - } - } - ], - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_1.fastq.gz" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_2.fastq.gz" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.bam" - } - ] - }, - { - "step_number": 5, - "name": "Qualimap", - "description": "Assess mapping quality of aligned reads", - "version": "2.1.1", - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.bam" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/agogo.css" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/ajax-loader.gif" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/basic.css" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/bgfooter.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/bgtop.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/comment-bright.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/comment-close.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/comment.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/doctools.js" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/down-pressed.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/down.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/file.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/jquery.js" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/minus.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/plus.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/pygments.css" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/qualimap_logo_small.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/report.css" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/searchtools.js" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/underscore.js" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/up-pressed.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/up.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/css/websupport.js" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_coverage_0to50_histogram.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_coverage_across_reference.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_coverage_histogram.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_coverage_quotes.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_gc_content_per_window.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_homopolymer_indels.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_insert_size_across_reference.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_insert_size_histogram.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_mapping_quality_across_reference.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_mapping_quality_histogram.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_reads_clipping_profile.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_reads_content_per_read_position.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/images_qualimapReport/genome_uniq_read_starts_histogram.png" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/coverage_across_reference.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/coverage_histogram.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/duplication_rate_histogram.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/genome_fraction_coverage.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/homopolymer_indels.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/insert_size_across_reference.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/insert_size_histogram.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/mapped_reads_clipping_profile.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/mapped_reads_gc-content_distribution.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/mapped_reads_nucleotide_content.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/mapping_quality_across_reference.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/raw_data_qualimapReport/mapping_quality_histogram.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/genome_results.txt" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/qualimap/qualimapReport.html" - } - ] - }, - { - "step_number": 6, - "name": "MarkDuplicates", - "description": "Removes duplicate reads from alignment", - "version": "1.134", - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.bam" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.no_dups.bam" - } - ] - }, - { - "step_number": 7, - "name": "IndelRealigner", - "description": "Perfoms re-alignment around insertions and deletions", - "version": "3.4.0", - "prerequisite": [ - { - "name": "M. tuberculosis H37Rv genome reference file", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/NC_000962.fa" - } - } - ], - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.no_dups.bam" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.realigned.bam" - } - ] - }, - { - "step_number": 8, - "name": "BaseRecalibrator", - "description": "Recalibrates base quality scores", - "version": "3.4.0", - "prerequisite": [ - { - "name": "M. tuberculosis H37Rv genome reference file", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/NC_000962.fa" - } - }, - { - "name": "Variation sites file", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/snps.vcf" - } - } - ], - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.realigned.bam" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.recalibrated.bam" - } - ] - }, - { - "step_number": 9, - "name": "BuildBamIndex", - "description": "Indexes sorted BAM files for variant calling", - "version": "1.134", - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.recalibrated.bam" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.recalibrated.bai" - } - ] - }, - { - "step_number": 10, - "name": "UnifiedGenotyper", - "description": "Calls variant positions in alignment", - "version": "3.4.0", - "prerequisite": [ - { - "name": "M. tuberculosis H37Rv genome reference file", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/NC_000962.fa" - } - } - ], - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.recalibrated.bam" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK.vcf" - }, - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK.mpileup" - } - ] - }, - { - "step_number": 11, - "name": "VCFtools", - "description": "Filters raw VCF to exclude poor quality variants", - "version": "0.1.12b", - "prerequisite": [ - { - "name": "Excluded list file", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/excluded_loci.txt" - } - } - ], - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK.vcf" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK_filtered.vcf" - } - ] - }, - { - "step_number": 12, - "name": "SnpEff", - "description": "Annotates variants in VCF file", - "version": "4.1", - "prerequisite": [ - { - "name": "M. tuberculosis H37Rv GenBank File", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/NC_000962.gbk" - } - } - ], - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK_filtered.vcf" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK_annotated.vcf" - } - ] - }, - { - "step_number": 13, - "name": "parse_annotation.py", - "description": "Parses annotated VCF to create annotation text file", - "version": "", - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK_annotated.vcf" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_Final_annotation.txt" - } - ] - }, - { - "step_number": 14, - "name": "lineage_parser.py", - "description": "Assigns Mycobacterium tuberculosis Complex lineage to isolate", - "version": "", - "prerequisite": [ - { - "name": "Lineage Markers File", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/lineage_markers.txt" - } - } - ], - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_Final_annotation.txt" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106.lineage_report.txt" - } - ] - }, - { - "step_number": 15, - "name": "BEDtools", - "description": "Creates loci based coverage statistics of genome coverage", - "version": "2.17.0", - "input_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/bam_files/ERR552106.recalibrated.bam" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_genome_region_coverage.txt" - } - ] - }, - { - "step_number": 16, - "name": "resis_parser.py", - "description": "Creates a coverage depth and width table of all loci in isolate genome", - "version": "", - "input_list": [ - { - "uri": "[path_to_genome_loci_text_file]" - }, - { - "uri": "[path_to_per_position_depth_text_file]" - } - ], - "output_list": [ - { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_Coverage.txt" - } - ] - } - ] - }, - "execution_domain": { - "script": [ - { - "uri": { - "uri": "https://github.com/CPTR-ReSeqTB/UVP/commit/9e8f588b3cd3f5eebde29f7d2879e1a1e1c1aed3" - } - } - ], - "script_driver": "Python", - "software_prerequisites": [ - { - "name": "BEDtools", - "version": "2.17.0", - "uri": { - "uri": "https://github.com/arq5x/bedtools/releases/tag/v2.17.0", - "access_time": "2018-10-08T18:35:33-0400", - "sha1_checksum": "5e4507c54355a4a38c6d3e7497a2836a123c6655" - } - }, - { - "name": "Bcftools", - "version": "1.2", - "uri": { - "uri": "https://github.com/samtools/bcftools/releases/download/1.2/bcftools-1.2.tar.bz2", - "access_time": "2018-10-08T18:35:33-0400", - "sha1_checksum": "352908143497da0640b928248165e83212dc4298" - } - }, - { - "name": "BWA", - "version": "0.7.12", - "uri": { - "uri": "https://sourceforge.net/projects/bio-bwa/files/bwa-0.7.12.tar.bz2/download", - "access_time": "2018-10-08T18:35:33-0400", - "sha1_checksum": "6389ca75328bae6d946bfdd58ff4beb0feebaedd" - } - }, - { - "name": "FastQC", - "version": "0.11.5", - "uri": { - "uri": "https://www.bioinformatics.babraham.ac.uk/projects/fastq_screen/fastq_screen_v0.13.0.tar.gz", - "access_time": "2018-10-08T18:35:33-0400" - } - }, - { - "name": "GATK", - "version": "3.4.0", - "uri": { - "uri": "https://github.com/broadgsa/gatk-protected/releases/tag/3.4", - "access_time": "2018-10-08T18:35:33-0400", - "sha1_checksum": "f19618653a0d23baaf147efe7f14aeb4eeb0cbb8" - } - }, - { - "name": "Kraken", - "version": "0.10.5", - "uri": { - "uri": "https://ccb.jhu.edu/software/kraken/dl/kraken-0.10.5-beta.tgz", - "access_time": "2018-10-08T18:35:33-0400" - } - }, - { - "name": "Picard", - "version": "1.134", - "uri": { - "uri": "https://github.com/broadinstitute/picard/releases/tag/1.134", - "access_time": "2018-10-08T18:35:33-0400", - "sha1_checksum": "a7a08c474e4d99346eec7a9956a8fe71943b5d80" - } - }, - { - "name": "Pigz", - "version": "2.3.3", - "uri": { - "uri": "http://springdale.math.ias.edu/data/puias/unsupported/7/SRPMS/pigz-2.3.3-1.sdl7.src.rpm", - "access_time": "2018-10-08T18:35:33-0400" - } - }, - { - "name": "Qualimap", - "version": "2.11", - "uri": { - "uri": "https://bitbucket.org/kokonech/qualimap/downloads/qualimap_v2.1.1.zip", - "access_time": "2018-10-08T18:35:33-0400" - } - }, - { - "name": "Samtools", - "version": "1.2", - "uri": { - "uri": "https://github.com/samtools/samtools/archive/1.2.zip", - "access_time": "2018-10-08T18:35:33-0400" - } - }, - { - "name": "SnpEff", - "version": "4.1", - "uri": { - "uri": "https://sourceforge.net/projects/snpeff/files/snpEff_v4_1l_core.zip/download", - "access_time": "2018-10-08T18:35:33-0400", - "sha1_checksum": "c96e21564b05d6a7912e4dd35f9ef6fe2e094fbb" - } - }, - { - "name": "Vcftools", - "version": "0.1.12b", - "uri": { - "uri": "https://sourceforge.net/projects/vcftools/files/vcftools_0.1.12.tar.gz/download", - "access_time": "2018-10-08T18:35:33-0400", - "sha1_checksum": "29a1ab67786e39be57cbb1ef4e0f6682110b7516" - } - } - ], - "external_data_endpoints": [ - { - "name": "BCOReSeqTB", - "url": "https://github.com/CPTR-ReSeqTB/UVP/" - } - ], - "environment_variables": { - "CORE": "8" - } - }, - "io_domain": { - "input_subdomain": [ - { - "uri": { - "filename": "Mycobacterium tuberculosis H37Rv, complete genome", - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/NC_000962.fa" - } - }, - { - "uri": { - "filename": "Mycobacterium tuberculosis H37Rv, complete genome", - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/NC_000962.gbk" - } - }, - { - "uri": { - "filename": "excluded_loci", - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/excluded_loci.txt" - } - }, - { - "uri": { - "filename": "lineage_markers", - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/lineage_markers.txt" - } - }, - { - "uri": { - "filename": "variation sites", - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_reference_files/snps.vcf" - } - }, - { - "uri": { - "filename": "ERR552106_2.fastq.gz", - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_2.fastq.gz" - } - }, - { - "uri": { - "filename": "ERR552106_1.fastq.gz", - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_input_fastq_files/ERR552106_1.fastq.gz" - } - } - ], - "output_subdomain": [ - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106.lineage_report.txt" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106.log" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_Coverage.txt" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106ERR552106_Final_annotation.txt" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK.vcf" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_GATK_filtered.vcf" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_Lineage.txt" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_deleted_loci.txt" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://bco.reseqtb.org/UVP-BCO/UVPv2.4.1_sample_results/ERR552106/ERR552106_genome_region_coverage.txt" - } - } - ] - }, - "error_domain": { - "empirical_error": { - "description": [ - "This test object represents tests done with single lineage sequences to establish the sensitivity of UVP to detect lineage and antibiotic resistant variants", - "Test objective was to evaluate the ability of UVP to identify strain lineage and antibiotic resistant variants from samples of high, medium, low sequence qualities and depths of coverage of 10, 15, 20, 25 and 30-fold. Simulated reads developed from 12 lineage-specific M. tuberculosis (Mtb) genome sequences were used to test UVP." - ], - "parameters": { - "sample_type": "single Mtb lineages (n = 12) with antibiotic resistant variants introduced in silico", - "total_sample_size": "180", - "platform": "Illumina HiSeq 2000", - "paired_end": true, - "length": "100", - "simulated": true, - "program": "ART", - "simulator_parameters": [ - { - "ss": "hs20" - }, - { - "l": "100" - }, - { - "m": "500" - }, - { - "qU": "45" - }, - { - "s": "100" - } - ], - "sequence_quality_level_parameters": { - "description": "these correspond to the ART parameters: qs, qs2, ir, ir2, dr, dr2.", - "sequence_quality_high": { - "substitution_error_rate_R1": "0.0004", - "substitution_error_rate_R2": "0.0007", - "insertion_error_rate_R1": "0.00009", - "insertion_error_rate_R2": "0.00015", - "deletion_error_rate_R1": "0.00011", - "deletion_error_rate_R2": "0.00023", - "units": "errors per sequenced base" - }, - "sequence_quality_medium": { - "substitution_error_rate_R1": "0.004", - "substitution_error_rate_R2": "0.007", - "insertion_error_rate_R1": "0.0009", - "insertion_error_rate_R2": "0.0015", - "deletion_error_rate_R1": "0.0011", - "deletion_error_rate_R2": "0.0023", - "units": "errors per sequenced base" - }, - "sequence_quality_low": { - "substitution_error_rate_R1": "0.04", - "substitution_error_rate_R2": "0.07", - "insertion_error_rate_R1": "0.009", - "insertion_error_rate_R2": "0.015", - "deletion_error_rate_R1": "0.011", - "deletion_error_rate_R2": "0.023", - "units": "errors per sequenced base" - } - } - }, - "summary results": { - "sequence_quality_high": { - "sample size": "60", - "result": { - "lineage_assignment_rate": "93.33", - "mean_AR_identification_rate": "86.72", - "Units": "Percentage" - } - }, - "sequence_quality_medium": { - "sample size": "60", - "result": { - "lineage_assignment_rate": "90.00", - "mean_AR_identification_rate": "81.00", - "Units": "Percentage" - } - }, - "sequence_quality_low": { - "sample size": "60", - "result": { - "lineage_assignment_rate": "0.00", - "mean_AR_identification_rate": "0.00", - "Units": "Percentage" - } - }, - "coverage_10": { - "sample size": "36", - "result": { - "lineage_assignment_rate": "41.67", - "mean_AR_identification_rate": "22.42", - "Units": "Percentage" - } - }, - "coverage_15": { - "sample size": "36", - "result": { - "lineage_assignment_rate": "63.89", - "mean_AR_identification_rate": "57.14", - "Units": "Percentage" - } - }, - "coverage_20": { - "sample size": "36", - "result": { - "lineage_assignment_rate": "66.67", - "mean_AR_identification_rate": "66.46", - "Units": "Percentage" - } - }, - "coverage_25": { - "sample size": "36", - "result": { - "lineage_assignment_rate": "66.67", - "mean_AR_identification_rate": "66.66", - "Units": "Percentage" - } - }, - "coverage_30": { - "sample size": "36", - "result": { - "lineage_assignment_rate": "66.67", - "mean_AR_identification_rate": "66.66", - "Units": "Percentage" - } - } - }, - "detailed results": [ - { - "sequence_quality_high": { - "coverage_10": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "66.67", - "mean_AR_identification_rate": "40.75", - "Units": "Percentage" - } - }, - "coverage_15": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "100.00", - "mean_AR_identification_rate": "92.85", - "Units": "Percentage" - } - }, - "coverage_20": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "100.00", - "mean_AR_identification_rate": "100.00", - "Units": "Percentage" - } - }, - "coverage_25": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "100.00", - "mean_AR_identification_rate": "100.00", - "Units": "Percentage" - } - }, - "coverage_30": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "100.00", - "mean_AR_identification_rate": "100.00", - "Units": "Percentage" - } - } - } - }, - { - "sequence_quality_medium": { - "coverage_10": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "58.34", - "mean_AR_identification_rate": "26.50", - "Units": "Percentage" - } - }, - "coverage_15": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "91.66", - "mean_AR_identification_rate": "78.57", - "Units": "Percentage" - } - }, - "coverage_20": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "100.00", - "mean_AR_identification_rate": "99.40", - "Units": "Percentage" - } - }, - "coverage_25": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "100.00", - "mean_AR_identification_rate": "100.00", - "Units": "Percentage" - } - }, - "coverage_30": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "100.00", - "mean_AR_identification_rate": "100.00", - "Units": "Percentage" - } - } - } - }, - { - "sequence_quality_low": { - "coverage_10": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "0.00", - "mean_AR_identification_rate": "0.00", - "Units": "Percentage" - } - }, - "coverage_15": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "0.00", - "mean_AR_identification_rate": "0.00", - "Units": "Percentage" - } - }, - "coverage_20": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "0.00", - "mean_AR_identification_rate": "0.00", - "Units": "Percentage" - } - }, - "coverage_25": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "0.00", - "mean_AR_identification_rate": "0.00", - "Units": "Percentage" - } - }, - "coverage_30": { - "sample size": "12", - "result": { - "lineage_assignment_rate": "0.00", - "mean_AR_identification_rate": "0.00", - "Units": "Percentage" - } - } - } - } - ] - }, - "algorithmic_error": { - "placeholder": "for algorithmic error domain" - } - } - }, - { - "object_id": "https://raw.githubusercontent.com/biocompute-objects/BCO_Specification/1.4.2/examples/HIVE_metagenomics.json", - "etag": "caed07395b6afb58c8810d174a315260124f687740bc3bb14387de5e84c7e3d4", - "spec_version": "https://w3id.org/ieee/ieee-2791-schema/", - "provenance_domain": { - "name": "Healthy human fecal metagenomic diversity", - "version": "1.0.0", - "review": [ - { - "status": "approved", - "reviewer_comment": "Approved by GW staff.", - "reviewer": { - "name": "Charles Hadley King", - "affiliation": "George Washington University", - "email": "hadley_king@gwu.edu", - "contribution": [ - "curatedBy" - ], - "orcid": "https://orcid.org/0000-0003-1409-4549" - } - } - ], - "obsolete_after": "2118-09-26T14:43:43-0400", - "embargo": { - "start_time": "2000-09-26T14:43:43-0400", - "end_time": "2000-09-26T14:43:45-0400" - }, - "created": "2018-11-29T11:29:08-0500", - "modified": "2018-11-30T11:29:08-0500", - "contributors": [ - { - "name": "Charles Hadley King", - "affiliation": "George Washington University", - "email": "hadley_king@gwu.edu", - "contribution": [ - "createdBy", - "curatedBy", - "authoredBy" - ], - "orcid": "https://orcid.org/0000-0003-1409-4549" - }, - { - "name": "Raja Mazumder", - "affiliation": "George Washington University", - "email": "mazumder@gwu.edu", - "contribution": [ - "createdBy", - "curatedBy", - "authoredBy" - ], - "orcid": "https://orcid.org/0000-0001-88238-9945" - } - ], - "license": "https://spdx.org/licenses/CC-BY-4.0.html" - }, - "usability_domain": [ - "Identify the most common organism present in a human [taxID:9606] fecal [UBERON:0001988] sample, ", - "Identify the general community composition of organisms in a human [taxID:9606] fecal [UBERON:0001988] sample, ", - "CensuScope is used to do a census of the composition of the read files. Based on a user-defined threshold, organisms identified are used for alignment in the Hexagon alignment." - ], - "extension_domain": [ - { - "extension_schema": "https://raw.githubusercontent.com/biocompute-objects/extension_domain/1.1.0/scm/scm_extension.json", - "scm_extension": { - "scm_repository": "https://github.com/biocompute-objects/HIVE_metagenomics", - "scm_type": "git", - "scm_commit": "e4620f642fb20557f6c679397696614305ed07b1", - "scm_path": "biocompute-objects/HIVE_metagenomics", - "scm_preview": "https://github.com/example/repo1/blob/c9ffea0b60fa3bcf8e138af7c99ca141a6b8fb21/workflow/hive-viral-mutation-detection.cwl" - } - } - ], - "description_domain": { - "keywords": [ - "metagenome", - "metagenomic analysis", - "fecal" - ], - "xref": [ - { - "namespace": "uberon", - "name": "Uber Anatomy Ontology", - "ids": [ - "0001988" - ], - "access_time": "2016-11-30T06:46-0500" - }, - { - "namespace": "taxonomy", - "name": "Taxonomy", - "ids": [ - "9606" - ], - "access_time": "2016-11-30T06:46-0500" - } - ], - "platform": [ - "hive" - ], - "pipeline_steps": [ - { - "step_number": 1, - "name": "CensuScope", - "description": "Detect taxonomic composition of a metagenomic data set.", - "version": "1.3", - "prerequisite": [ - { - "name": "Filtered_NT_feb18_2016", - "uri": { - "uri": "https://hive.biochemistry.gwu.edu/genome/513957", - "access_time": "2016-11-30T06:46-0500" - } - } - ], - "input_list": [ - { - "uri": "https://hive.biochemistry.gwu.edu/nuc-read/545722", - "access_time": "2016-11-30T06:46-0500" - }, - { - "uri": "https://hive.biochemistry.gwu.edu/nuc-read/545721", - "access_time": "2016-11-30T06:46-0500" - } - ], - "output_list": [ - { - "uri": "https://hive.biochemistry.gwu.edu/546223/dnaAccessionBasedResult.csv", - "access_time": "2016-11-30T06:46-0500" - } - ] - }, - { - "step_number": 2, - "name": "HIVE-hexagon", - "description": "Alignment of reads to a set of references", - "version": "1.3", - "input_list": [ - { - "uri": "http://example.com/data/546223/dnaAccessionBased.csv", - "access_time": "2016-11-30T06:46-0500" - }, - { - "uri": "https://hive.biochemistry.gwu.edu/nuc-read/545722", - "access_time": "2016-11-30T06:46-0500" - }, - { - "uri": "https://hive.biochemistry.gwu.edu/nuc-read/545721", - "access_time": "2016-11-30T06:46-0500" - } - ], - "output_list": [ - { - "uri": "https://hive.biochemistry.gwu.edu/546232/alCount-Unalignedo524569-alCount--1.csv", - "access_time": "2016-11-30T06:46-0500" - } - ] - } - ] - }, - "execution_domain": { - "script": [ - { - "uri": { - "uri": "https://github.com/biocompute-objects/HIVE_metagenomics/blob/master/driverHIVEmetagenomic.py" - } - } - ], - "script_driver": "shell", - "software_prerequisites": [ - { - "name": "CensuScope", - "version": "albinoni.2", - "uri": { - "uri": "http://example.com/dna.cgi?cmd=dna-screening&cmdMode=-", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "name": "HIVE-hexagon", - "version": "babajanian.1", - "uri": { - "uri": "http://example.com/dna.cgi?cmd=dna-hexagon&cmdMode=-", - "access_time": "2017-01-24T09:40:17-0500" - } - } - ], - "external_data_endpoints": [ - { - "name": "HIVE", - "url": "https://hive.biochemistry.gwu.edu/dna.cgi?cmd=login" - }, - { - "name": "access to e-utils", - "url": "http://eutils.ncbi.nlm.nih.gov/entrez/eutils/" - } - ], - "environment_variables": { - "key": "HOSTTYPE", - "value": "x86_64-linux" - } - }, - "parametric_domain": [ - { - "param": "seed", - "value": "14", - "step": "2" - }, - { - "param": "minimum_match_len", - "value": "66", - "step": "2" - }, - { - "param": "divergence_threshold_percent", - "value": "0.30", - "step": "2" - }, - { - "param": "minimum_coverage", - "value": "15", - "step": "2" - }, - { - "param": "freq_cutoff", - "value": "0.10", - "step": "2" - } - ], - "io_domain": { - "input_subdomain": [ - { - "uri": { - "filename": "Hepatitis C virus genotype 1", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/22129792", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "Hepatitis C virus type 1b complete genome", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/5420376", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "Hepatitis C virus (isolate JFH-1) genomic RNA", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/13122261", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "Hepatitis C virus clone J8CF, complete genome", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/386646758", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "Hepatitis C virus S52 polyprotein gene", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/295311559", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "HCV1a_drug_resistant_sample0001-01", - "uri": "http://example.com/nuc-read/514682", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "HCV1a_drug_resistant_sample0001-02", - "uri": "http://example.com/nuc-read/514683", - "access_time": "2017-01-24T09:40:17-0500" - } - } - ], - "output_subdomain": [ - { - "mediatype": "text/csv", - "uri": { - "uri": "http://example.com/data/514769/dnaAccessionBased.csv", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://example.com/data/514801/SNPProfile*.csv", - "access_time": "2017-01-24T09:40:17-0500" - } - } - ] - }, - "error_domain": { - "empirical_error": { - "false_negative_alignment_hits": "<0.0010", - "false_discovery": "<0.05" - }, - "algorithmic_error": { - "false_positive_mutation_calls_discovery": "<0.00005", - "false_discovery": "0.005" - } - } - }, - { - "object_id": "https://raw.githubusercontent.com/biocompute-objects/BCO_Specification/1.4.2/examples/HIVE_metagenomics.json", - "etag": "caed07395b6afb58c8810d174a315260124f687740bc3bb14387de5e84c7e3d4", - "spec_version": "https://w3id.org/ieee/ieee-2791-schema/", - "provenance_domain": { - "name": "Healthy human fecal metagenomic diversity", - "version": "1.0.0", - "review": [ - { - "status": "approved", - "reviewer_comment": "Approved by GW staff.", - "reviewer": { - "name": "Charles Hadley King", - "affiliation": "George Washington University", - "email": "hadley_king@gwu.edu", - "contribution": [ - "curatedBy" - ], - "orcid": "https://orcid.org/0000-0003-1409-4549" - } - } - ], - "obsolete_after": "2118-09-26T14:43:43-0400", - "embargo": { - "start_time": "2000-09-26T14:43:43-0400", - "end_time": "2000-09-26T14:43:45-0400" - }, - "created": "2018-11-29T11:29:08-0500", - "modified": "2018-11-30T11:29:08-0500", - "contributors": [ - { - "name": "Charles Hadley King", - "affiliation": "George Washington University", - "email": "hadley_king@gwu.edu", - "contribution": [ - "createdBy", - "curatedBy", - "authoredBy" - ], - "orcid": "https://orcid.org/0000-0003-1409-4549" - }, - { - "name": "Raja Mazumder", - "affiliation": "George Washington University", - "email": "mazumder@gwu.edu", - "contribution": [ - "createdBy", - "curatedBy", - "authoredBy" - ], - "orcid": "https://orcid.org/0000-0001-88238-9945" - } - ], - "license": "https://spdx.org/licenses/CC-BY-4.0.html" - }, - "usability_domain": [ - "Identify the most common organism present in a human [taxID:9606] fecal [UBERON:0001988] sample, ", - "Identify the general community composition of organisms in a human [taxID:9606] fecal [UBERON:0001988] sample, ", - "CensuScope is used to do a census of the composition of the read files. Based on a user-defined threshold, organisms identified are used for alignment in the Hexagon alignment." - ], - "extension_domain": [ - { - "extension_schema": "https://raw.githubusercontent.com/biocompute-objects/extension_domain/1.1.0/scm/scm_extension.json", - "scm_extension": { - "scm_repository": "https://github.com/biocompute-objects/HIVE_metagenomics", - "scm_type": "git", - "scm_commit": "e4620f642fb20557f6c679397696614305ed07b1", - "scm_path": "biocompute-objects/HIVE_metagenomics", - "scm_preview": "https://github.com/example/repo1/blob/c9ffea0b60fa3bcf8e138af7c99ca141a6b8fb21/workflow/hive-viral-mutation-detection.cwl" - } - } - ], - "description_domain": { - "keywords": [ - "metagenome", - "metagenomic analysis", - "fecal" - ], - "xref": [ - { - "namespace": "uberon", - "name": "Uber Anatomy Ontology", - "ids": [ - "0001988" - ], - "access_time": "2016-11-30T06:46-0500" - }, - { - "namespace": "taxonomy", - "name": "Taxonomy", - "ids": [ - "9606" - ], - "access_time": "2016-11-30T06:46-0500" - } - ], - "platform": [ - "hive" - ], - "pipeline_steps": [ - { - "step_number": 1, - "name": "CensuScope", - "description": "Detect taxonomic composition of a metagenomic data set.", - "version": "1.3", - "prerequisite": [ - { - "name": "Filtered_NT_feb18_2016", - "uri": { - "uri": "https://hive.biochemistry.gwu.edu/genome/513957", - "access_time": "2016-11-30T06:46-0500" - } - } - ], - "input_list": [ - { - "uri": "https://hive.biochemistry.gwu.edu/nuc-read/545722", - "access_time": "2016-11-30T06:46-0500" - }, - { - "uri": "https://hive.biochemistry.gwu.edu/nuc-read/545721", - "access_time": "2016-11-30T06:46-0500" - } - ], - "output_list": [ - { - "uri": "https://hive.biochemistry.gwu.edu/546223/dnaAccessionBasedResult.csv", - "access_time": "2016-11-30T06:46-0500" - } - ] - }, - { - "step_number": 2, - "name": "HIVE-hexagon", - "description": "Alignment of reads to a set of references", - "version": "1.3", - "input_list": [ - { - "uri": "http://example.com/data/546223/dnaAccessionBased.csv", - "access_time": "2016-11-30T06:46-0500" - }, - { - "uri": "https://hive.biochemistry.gwu.edu/nuc-read/545722", - "access_time": "2016-11-30T06:46-0500" - }, - { - "uri": "https://hive.biochemistry.gwu.edu/nuc-read/545721", - "access_time": "2016-11-30T06:46-0500" - } - ], - "output_list": [ - { - "uri": "https://hive.biochemistry.gwu.edu/546232/alCount-Unalignedo524569-alCount--1.csv", - "access_time": "2016-11-30T06:46-0500" - } - ] - } - ] - }, - "execution_domain": { - "script": [ - { - "uri": { - "uri": "https://github.com/biocompute-objects/HIVE_metagenomics/blob/master/driverHIVEmetagenomic.py" - } - } - ], - "script_driver": "shell", - "software_prerequisites": [ - { - "name": "CensuScope", - "version": "albinoni.2", - "uri": { - "uri": "http://example.com/dna.cgi?cmd=dna-screening&cmdMode=-", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "name": "HIVE-hexagon", - "version": "babajanian.1", - "uri": { - "uri": "http://example.com/dna.cgi?cmd=dna-hexagon&cmdMode=-", - "access_time": "2017-01-24T09:40:17-0500" - } - } - ], - "external_data_endpoints": [ - { - "name": "HIVE", - "url": "https://hive.biochemistry.gwu.edu/dna.cgi?cmd=login" - }, - { - "name": "access to e-utils", - "url": "http://eutils.ncbi.nlm.nih.gov/entrez/eutils/" - } - ], - "environment_variables": { - "key": "HOSTTYPE", - "value": "x86_64-linux" - } - }, - "parametric_domain": [ - { - "param": "seed", - "value": "14", - "step": "2" - }, - { - "param": "minimum_match_len", - "value": "66", - "step": "2" - }, - { - "param": "divergence_threshold_percent", - "value": "0.30", - "step": "2" - }, - { - "param": "minimum_coverage", - "value": "15", - "step": "2" - }, - { - "param": "freq_cutoff", - "value": "0.10", - "step": "2" - } - ], - "io_domain": { - "input_subdomain": [ - { - "uri": { - "filename": "Hepatitis C virus genotype 1", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/22129792", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "Hepatitis C virus type 1b complete genome", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/5420376", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "Hepatitis C virus (isolate JFH-1) genomic RNA", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/13122261", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "Hepatitis C virus clone J8CF, complete genome", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/386646758", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "Hepatitis C virus S52 polyprotein gene", - "uri": "http://www.ncbi.nlm.nih.gov/nuccore/295311559", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "HCV1a_drug_resistant_sample0001-01", - "uri": "http://example.com/nuc-read/514682", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "uri": { - "filename": "HCV1a_drug_resistant_sample0001-02", - "uri": "http://example.com/nuc-read/514683", - "access_time": "2017-01-24T09:40:17-0500" - } - } - ], - "output_subdomain": [ - { - "mediatype": "text/csv", - "uri": { - "uri": "http://example.com/data/514769/dnaAccessionBased.csv", - "access_time": "2017-01-24T09:40:17-0500" - } - }, - { - "mediatype": "text/csv", - "uri": { - "uri": "http://example.com/data/514801/SNPProfile*.csv", - "access_time": "2017-01-24T09:40:17-0500" - } - } - ] - }, - "error_domain": { - "empirical_error": { - "false_negative_alignment_hits": "<0.0010", - "false_discovery": "<0.05" - }, - "algorithmic_error": { - "false_positive_mutation_calls_discovery": "<0.00005", - "false_discovery": "0.005" - } - } - }, - { - "object_id": "https://raw.githubusercontent.com/biocompute-objects/BCO_Specification/1.4.2glycosylation-sites-UniCarbKB", - "etag": "5741d66ddf7881db33f7075ce8b64b941bd7cc001965f31682e5da9966c7f3ba", - "spec_version": "https://w3id.org/ieee/ieee-2791-schema/", - "provenance_domain": { - "name": "glycosylation-sites-UniCarbKB", - "version": "1.0", - "review": [ - { - "status": "approved", - "reviewer_comment": "The dataset has passed the manual and automated QC steps and the readme has also been reviewed", - "reviewer": { - "name": "Rahi Navelkar", - "affiliation": "The George Washington University", - "email": "rsn13@gwu.edu", - "contribution": [ - "curatedBy" - ] - } - } - ], - "created": "2018-02-21T14:46:55-5:00", - "modified": "2018-10-10T11:34:02-5:00", - "contributors": [ - { - "name": "Matthew Campbell", - "affiliation": "Institute for Glycomics, Griffith University, Gold Coast, Queensland, Australia", - "email": "m.campbell2@griffith.edu.au", - "contribution": [ - "contributedBy" - ] - }, - { - "name": "Rahi Navelkar", - "affiliation": "The George Washington University", - "email": "rsn13@gwu.edu", - "contribution": [ - "curatedBy" - ] - }, - { - "name": "Robel Kahsay", - "affiliation": "The George Washington University", - "email": "hadley_king@gwu.edu", - "contribution": [ - "createdBy" - ] - } - ], - "license": "https://creativecommons.org/licenses/by/4.0/" - }, - "usability_domain": [ - "List of human [taxid:9606] proteins with information on glycosylation sites from UniCarbKB database [https://academic.oup.com/nar/article/42/D1/D215/1052197, https://doi.org/10.1093/nar/gkt1128]" - ], - "extension_domain": [ - { - "extension_schema": "https://raw.githubusercontent.com/biocompute-objects/extension_domain/1.1.0/license/license_extension.json", - "license_extension": { - "data_license": "https://creativecommons.org/licenses/by/4.0/", - "scripts_license": "https://www.gnu.org/licenses/gpl-3.0.en.html" - } - }, - { - "extension_schema": "https://raw.githubusercontent.com/biocompute-objects/extension_domain/1.1.0/scm/scm_extension.json", - "scm_extension": { - "scm_repository": "https://github.com/GW-HIVE/glygen-backend-integration/", - "scm_type": "git", - "scm_commit": "d34b85553e775dd5452005d786fe6e47d6048ee0", - "scm_path": "/data/projects/glygen/generated/datasets/reviewed/human_proteoform_glycosylation_sites_unicarbkb_glytoucan.readme.txt" - } - } - ], - "description_domain": { - "keywords": [ - "protein", - "canonical", - "glycosylation", - "glycan" - ], - "xref": [ - { - "namespace": "taxonomy", - "name": "Taxonomy", - "ids": [ - "9606" - ], - "access_time": "2018-21-02T14:46:55-5:00" - } - ], - "platform": [ - "centos7" - ], - "pipeline_steps": [ - { - "step_number": 1, - "name": "ac2canonical.py", - "description": "Python script for mapping the UniProtKB accessions in the input file to the UniProtKB canonical accessions ", - "version": "", - "input_list": [ - { - "uri": "/human_protein_position_pmid_id_aminoacid_glytoucan_2018_09_04_07_51_27.txt" - } - ], - "output_list": [ - { - "uri": "human_protein_position_pmid_id_aminoacid_glytoucan_2018_09_04_07_51_27.txt" - } - ] - }, - { - "step_number": 2, - "name": "make-proteoform_glycosylation_sites_unicarbkb_glytoucan-csv-step2b.py", - "description": "Python scripts for retrieving glycosylation type or linkage type through UniCarbKB structure webpage ", - "input_list": [ - { - "uri": "human_protein_position_pmid_id_aminoacid_glytoucan_2018_09_04_07_51_27.txt" - } - ], - "output_list": [ - { - "uri": "human_proteoform_glycosylation_sites_unicarbkb_glytoucan.csv" - } - ] - }, - { - "step_number": 2, - "name": "make-proteoform_glycosylation_sites_unicarbkb_glytoucan-csv-step2b.py", - "description": "Python scripts for retrieving glycosylation type or linkage type through UniCarbKB structure webpage ", - "input_list": [ - { - "uri": "human_protein_position_pmid_id_aminoacid_glytoucan_2018_09_04_07_51_27.txt" - } - ], - "output_list": [ - { - "uri": "human_proteoform_glycosylation_sites_unicarbkb_glytoucan.csv" - } - ] - }, - { - "step_number": 3, - "name": "make-proteoform_glycosylation_sites_unicarbkb_glytoucan-csv-step3.py", - "description": "Python script for quality check of the processed file. Records which fall under one or more following criteria's are flagged and eliminated and can be accessed using the log file. The elimination steps include - a. If the protein accession is not included in UniProtKB protein list - UniProtKB Nov-2017 Release b. If the amino acid position does not match to the amino acid on the associated position on fasta sequence - UniProtKB Nov-2017 Release c. If the id (UnicarbKB structure id) is not present in input file d. If the glycosylation type (linkage type) is not retrieved through step 3 e. If a serine or threonine is reported for an N-linked glycan structure f. If an asparagine is reported for an O-linked glycan structure", - "input_list": [ - { - "uri": "human_proteoform_glycosylation_sites_unicarbkb_glytoucan.csv" - }, - { - "uri": "human_protein_all.fasta" - } - ], - "output_list": [ - { - "uri": "human_proteoform_glycosylation_sites_unicarbkb_glytoucan.csv" - }, - { - "uri": "human_proteoform_glycosylation_sites_unicarbkb_glytoucan.log" - } - ] - } - ] - }, - "execution_domain": { - "script": [ - { - "uri": { - "uri": "https://github.com/glygener/glygen-backend-integration/blob/master/integration/ac2canonical.py" - } - }, - { - "uri": { - "uri": "https://github.com/glygener/glygen-backend-integration/blob/master/integration/make-proteoform_glycosylation_sites_unicarbkb_glytoucan-csv-step2a.py" - } - }, - { - "uri": { - "uri": "https://github.com/glygener/glygen-backend-integration/blob/master/integration/make-proteoform_glycosylation_sites_unicarbkb_glytoucan-csv-step2b.py" - } - }, - { - "uri": { - "uri": "https://github.com/glygener/glygen-backend-integration/blob/master/integration/make-proteoform_glycosylation_sites_unicarbkb_glytoucan-csv-step3.py" - } - } - ], - "script_driver": "manual", - "software_prerequisites": [ - { - "name": "Python", - "version": "2.7.13", - "uri": { - "uri": "https://www.python.org/downloads/release/python-2713/", - "access_time": "2017-01-24T09:40:17-0500", - "sha1_checksum": "17add4bf0ad0ec2f08e0cae6d205c700" - } - } - ], - "external_data_endpoints": [ - { - "name": "UniCarbKB", - "url": "http://www.unicarbkb.org/" - }, - { - "name": "access glygen-backend-integration", - "url": "https://github.com/glygener/glygen-backend-integration" - } - ], - "environment_variables": { - } - }, - "io_domain": { - "input_subdomain": [ - { - "uri": { - "filename": "human_protein_position_pmid_id_aminoacid_glytoucan_2018_09_04_07_51_27.txt", - "uri": "http://data.glygen.org/datasets/source/human_protein_position_pmid_id_aminoacid_glytoucan_2018_09_04_07_51_27.txt", - "access_time": "2018-10-10T11:34:02-5:00" - } - }, - { - "uri": { - "filename": "human_protein_all.fasta", - "uri": "http://data.glygen.org/GLYDS00053", - "access_time": "2018-10-10T11:34:02-5:00" - } - } - ], - "output_subdomain": [ - { - "mediatype": "csv/text", - "uri": { - "filename": "human_proteoform_glycosylation_sites_unicarbkb_glytoucan.log", - "uri": "http://data.glygen.org/datasets/logs/human_proteoform_glycosylation_sites_unicarbkb_glytoucan.log", - "access_time": "2018-10-10T11:37:02-5:00" - } - }, - { - "mediatype": "csv/text", - "uri": { - "filename": "human_proteoform_glycosylation_sites_unicarbkb_glytoucan.csv", - "uri": "http://data.glygen.org/GLYDS00040", - "access_time": "2018-10-10T11:37:02-5:00" - } - } - ] - }, - "error_domain": { - "empirical_error": { - "comment": "Unique value statistics for the dataset", - "statistics": [ - { - "key": "uniprotkb_canonical_ac", - "value": 92, - "description": "Accession assigned to the protein isoform chosen to be the canonical sequence in UniProtKB database" - }, - { - "key": "glycosylation_site", - "value": 223, - "description": "Site on the protein sequence where glycosylation is observed" - }, - { - "key": "evidence", - "value": 163, - "description": "NCBI PubMed Id (PMID) as evidence for the entry" - }, - { - "key": "unicarbkb_id", - "value": 984, - "description": "UnicarbKB data structure identifier" - }, - { - "key": "glytoucan_ac", - "value": 824, - "description": "Unique accession assigned to the registered glycan structure in GlyTouCan database" - }, - { - "key": "amino_acid", - "value": 3, - "description": "Three letter code abbreviation of the amino acid" - }, - { - "key": "glycosylation_type", - "value": 3, - "description": "Type of glycosylation linkage type" - } - ] - }, - "algorithmic_error": { - } - } - } -] \ No newline at end of file diff --git a/api/fixtures/metafixtures b/api/fixtures/metafixtures deleted file mode 100644 index d66ca52f..00000000 --- a/api/fixtures/metafixtures +++ /dev/null @@ -1,58 +0,0 @@ -[ - { - "model": "api.bco_draft_meta", - "pk": 1, - "fields": { - "n_objects": "1" - } - }, - { - "model": "api.bco_publish_meta", - "pk": 1, - "fields": { - "n_objects": "1" - } - }, - { - "model": "api.galaxy_draft_meta", - "pk": 1, - "fields": { - "n_objects": "1" - } - }, - { - "model": "api.galaxy_publish_meta", - "pk": 1, - "fields": { - "n_objects": "1" - } - }, - { - "model": "api.glygen_draft_meta", - "pk": 1, - "fields": { - "n_objects": "1" - } - }, - { - "model": "api.glygen_publish_meta", - "pk": 1, - "fields": { - "n_objects": "1" - } - }, - { - "model": "api.oncomx_draft_meta", - "pk": 1, - "fields": { - "n_objects": "1" - } - }, - { - "model": "api.oncomx_publish_meta", - "pk": 1, - "fields": { - "n_objects": "1" - } - } -] diff --git a/api/fixtures/metafixtures.json b/api/fixtures/metafixtures.json deleted file mode 100644 index d222310b..00000000 --- a/api/fixtures/metafixtures.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - { - "model": "api.prefix_table", - "pk": 1, - "fields": { - "prefix": "BCO", - "n_objects": "1" - } - } -] diff --git a/api/keys.sh b/api/keys.sh deleted file mode 100755 index ed33c02b..00000000 --- a/api/keys.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/bash - -cd .. - -echo " " -echo " " -echo "Anon key for the installation is..." -sqlite3 db.sqlite3 'SELECT B.key FROM auth_user AS A JOIN authtoken_token AS B ON A.id = B.user_id WHERE A.username = "anon";' -echo " " -echo " " -echo "Wheel key for the installation is..." -sqlite3 db.sqlite3 'SELECT B.key FROM auth_user AS A JOIN authtoken_token AS B ON A.id = B.user_id WHERE A.username = "wheel";' -echo " " -echo " " - -cd api \ No newline at end of file diff --git a/api/migrations/0001_initial.py b/api/migrations/0001_initial.py deleted file mode 100644 index 538b352a..00000000 --- a/api/migrations/0001_initial.py +++ /dev/null @@ -1,159 +0,0 @@ -# Generated by Django 3.2 on 2021-10-01 12:33 - -from django.conf import settings -from django.db import migrations, models -import django.db.models.deletion -import django.utils.timezone - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("auth", "0012_alter_user_first_name_max_length"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="meta_table", - fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("n_objects", models.IntegerField()), - ("prefix", models.CharField(max_length=5)), - ], - ), - migrations.CreateModel( - name="new_users", - fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("email", models.EmailField(max_length=254)), - ("temp_identifier", models.TextField(max_length=100)), - ("token", models.TextField(blank=True, null=True)), - ("hostname", models.TextField(blank=True, null=True)), - ("created", models.DateTimeField(default=django.utils.timezone.now)), - ], - ), - migrations.CreateModel( - name="prefixes", - fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("prefix", models.CharField(max_length=5)), - ( - "owner_group", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="auth.group", - to_field="name", - ), - ), - ( - "owner_user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL, - to_field="username", - ), - ), - ], - ), - migrations.CreateModel( - name="group_info", - fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "delete_members_on_group_deletion", - models.BooleanField(default=False), - ), - ("description", models.TextField()), - ("expiration", models.DateTimeField(blank=True, null=True)), - ("max_n_members", models.IntegerField(blank=True, null=True)), - ( - "group", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="auth.group", - to_field="name", - ), - ), - ( - "owner_user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL, - to_field="username", - ), - ), - ], - ), - migrations.CreateModel( - name="bco", - fields=[ - ( - "id", - models.AutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("contents", models.JSONField()), - ("object_class", models.TextField(blank=True, null=True)), - ("object_id", models.TextField()), - ("prefix", models.CharField(max_length=5)), - ("schema", models.TextField()), - ("state", models.TextField()), - ("last_update", models.DateTimeField()), - ( - "owner_group", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="auth.group", - to_field="name", - ), - ), - ( - "owner_user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL, - to_field="username", - ), - ), - ], - ), - ] diff --git a/api/migrations/0002_auto_20220124_2356.py b/api/migrations/0002_auto_20220124_2356.py deleted file mode 100644 index 778906d1..00000000 --- a/api/migrations/0002_auto_20220124_2356.py +++ /dev/null @@ -1,60 +0,0 @@ -# Generated by Django 3.2.10 on 2022-01-24 23:56 - -from django.conf import settings -from django.db import migrations, models -import django.db.models.deletion -import django.utils.timezone - - -class Migration(migrations.Migration): - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("api", "0001_initial"), - ] - - operations = [ - migrations.AddField( - model_name="prefixes", - name="certifying_key", - field=models.TextField(blank=True, null=True), - ), - migrations.AddField( - model_name="prefixes", - name="certifying_server", - field=models.TextField(blank=True, null=True), - ), - migrations.AddField( - model_name="prefixes", - name="created", - field=models.DateTimeField( - blank=True, default=django.utils.timezone.now, null=True - ), - ), - migrations.AddField( - model_name="prefixes", - name="created_by", - field=models.ForeignKey( - default="wheel", - on_delete=django.db.models.deletion.CASCADE, - related_name="created_by", - to=settings.AUTH_USER_MODEL, - to_field="username", - ), - ), - migrations.AddField( - model_name="prefixes", - name="description", - field=models.TextField(blank=True, null=True), - ), - migrations.AddField( - model_name="prefixes", - name="expires", - field=models.DateTimeField(blank=True, null=True), - ), - migrations.AlterField( - model_name="group_info", - name="description", - field=models.TextField(blank=True), - ), - ] diff --git a/api/migrations/0003_rename_meta_table_prefix_table.py b/api/migrations/0003_rename_meta_table_prefix_table.py deleted file mode 100644 index 697f102d..00000000 --- a/api/migrations/0003_rename_meta_table_prefix_table.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 3.2.10 on 2022-01-25 00:14 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ("api", "0002_auto_20220124_2356"), - ] - - operations = [ - migrations.RenameModel( - old_name="meta_table", - new_name="prefix_table", - ), - ] diff --git a/api/migrations/0004_rename_group_info_groupinfo.py b/api/migrations/0004_rename_group_info_groupinfo.py deleted file mode 100644 index 94c31c1f..00000000 --- a/api/migrations/0004_rename_group_info_groupinfo.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 3.2.10 on 2022-03-22 17:57 - -from django.conf import settings -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("auth", "0012_alter_user_first_name_max_length"), - ("api", "0003_rename_meta_table_prefix_table"), - ] - - operations = [ - migrations.RenameModel( - old_name="group_info", - new_name="GroupInfo", - ), - ] diff --git a/api/migrations/0005_rename_prefixes_prefix.py b/api/migrations/0005_rename_prefixes_prefix.py deleted file mode 100644 index d253bdfc..00000000 --- a/api/migrations/0005_rename_prefixes_prefix.py +++ /dev/null @@ -1,20 +0,0 @@ -# Generated by Django 3.2.10 on 2022-03-22 18:29 - -from django.conf import settings -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ("auth", "0012_alter_user_first_name_max_length"), - ("api", "0004_rename_group_info_groupinfo"), - ] - - operations = [ - migrations.RenameModel( - old_name="prefixes", - new_name="Prefix", - ), - ] diff --git a/api/migrations/0006_delete_new_users.py b/api/migrations/0006_delete_new_users.py deleted file mode 100644 index 790aa2ba..00000000 --- a/api/migrations/0006_delete_new_users.py +++ /dev/null @@ -1,16 +0,0 @@ -# Generated by Django 3.2.13 on 2024-03-07 21:53 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('api', '0005_rename_prefixes_prefix'), - ] - - operations = [ - migrations.DeleteModel( - name='new_users', - ), - ] diff --git a/api/migrations/__init__.py b/api/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/api/model/__init__.py b/api/model/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/api/model/groups.py b/api/model/groups.py deleted file mode 100644 index 2bda7bdf..00000000 --- a/api/model/groups.py +++ /dev/null @@ -1,466 +0,0 @@ -#!/usr/bin/env python3 -"""Functions for operations with groups -""" - -import sys -from django.db import models -from django.db.models.signals import post_save -from django.contrib.auth.models import Group, User -from django.dispatch import receiver -from rest_framework import status -from rest_framework.response import Response - -from api.scripts.utilities.DbUtils import DbUtils -from api.scripts.utilities.UserUtils import UserUtils -from api.models import BCO - -usr_utils = UserUtils() -db_utils = DbUtils() - - -class GroupInfo(models.Model): - """Some additional information for Group. - This information is stored separately from - Group so as to not complicate or compromise - anything relating to authentication. - Delete group members on group deletion? - """ - - delete_members_on_group_deletion = models.BooleanField(default=False) - description = models.TextField(blank=True) - expiration = models.DateTimeField(blank=True, null=True) - group = models.ForeignKey(Group, on_delete=models.CASCADE, to_field="name") - max_n_members = models.IntegerField(blank=True, null=True) - owner_user = models.ForeignKey(User, on_delete=models.CASCADE, to_field="username") - - def __str__(self): - """String for representing the GroupInfo model (in Admin site etc.).""" - return f"{self.group}" - - -def post_api_groups_info(request): - """Retrieve Group information by user""" - - user = usr_utils.user_from_request(request=request) - - try: - bulk_request = request.data["POST_api_groups_info"] - - group_info = [] - - for index, value in enumerate(bulk_request["names"]): - group = Group.objects.get(name=value) - - try: - admin = GroupInfo.objects.get(group=value).owner_user == user - description = GroupInfo.objects.get(group=value).description - except GroupInfo.DoesNotExist: - admin = False - description = "N/A" - - group_permissions = list( - group.permissions.all().values_list("codename", flat=True) - ) - group_members = list(group.user_set.all().values_list("username", flat=True)) - group_info.append( - { - "name": group.name, - "permissions": group_permissions, - "members": group_members, - "admin": admin, - "description": description, - } - ) - except Exception as error: - return Response( - status=status.HTTP_400_BAD_REQUEST, - data={"message": "Bad request. Request is not formatted correctly."} - ) - - return Response(status=status.HTTP_200_OK, data=group_info) - - -def post_api_groups_create(request): - """ - Instantiate any necessary imports. - Not guaranteed which of username and group - will be provided. - Create the optional keys if they haven't - been provided. - The group has not been created, so create it. - Update the group info. - TODO: Expiration needs to be casted to a datetime object; will likely - need to be separate fields in UI - The expiration field can't be a blank string because django will complain - about the field being a DateTimeField and thus requiring a particular - format for "blank" or "null" as defined in the model. - - Note the bool typecast for delete_members_on_group_deletion, - this is necessary since the request to create the group - doesn't have a concept of type bool. - Add users which exist and give an error for those that don't. - - As this view is for a bulk operation, status 200 - means that the request was successfully processed, - but NOT necessarily each item in the request. - """ - - bulk_request = request.data["POST_api_groups_create"] - group_admin = usr_utils.user_from_request(request=request) - groups = list(Group.objects.all().values_list("name", flat=True)) - return_data = [] - any_failed = False - - for creation_object in bulk_request: - - standardized = creation_object["name"].lower() - if standardized not in groups: - if "usernames" not in creation_object: - creation_object["usernames"] = [] - if "delete_members_on_group_deletion" not in creation_object: - creation_object["delete_members_on_group_deletion"] = False - - if "description" not in creation_object: - creation_object["description"] = "" - - if "max_n_members" not in creation_object: - creation_object["max_n_members"] = -1 - - Group.objects.create(name=creation_object["name"]) - group_admin.groups.add(Group.objects.get(name=creation_object["name"])) - - if ( - "expiration" not in creation_object - or creation_object["expiration"] == "" - ): - GroupInfo.objects.create( - delete_members_on_group_deletion=bool( - creation_object["delete_members_on_group_deletion"] - ), - description=creation_object["description"], - group=Group.objects.get(name=creation_object["name"]), - max_n_members=creation_object["max_n_members"], - owner_user=group_admin, - ) - else: - GroupInfo.objects.create( - delete_members_on_group_deletion=bool( - creation_object["delete_members_on_group_deletion"] - ), - description=creation_object["description"], - expiration=creation_object["expiration"], - group=Group.objects.get(name=creation_object["name"]), - max_n_members=creation_object["max_n_members"], - owner_user=group_admin, - ) - - users_added = [] - users_excluded = [] - - for usrnm in creation_object["usernames"]: - if usr_utils.check_user_exists(user_name=usrnm): - User.objects.get(username=usrnm).groups.add( - Group.objects.get(name=creation_object["name"]) - ) - users_added.append(usrnm) - else: - users_excluded.append(usrnm) - - if len(users_excluded) > 0: - return_data.append( - db_utils.messages( - parameters={ - "group": standardized, - "users_excluded": users_excluded, - } - )["201_group_users_excluded"] - ) - - else: - return_data.append( - db_utils.messages(parameters={"group": standardized})[ - "201_group_create" - ] - ) - - else: - # Update the request status. - return_data.append( - db_utils.messages(parameters={"group": standardized})[ - "409_group_conflict" - ] - ) - any_failed = True - - if any_failed: - return Response(status=status.HTTP_207_MULTI_STATUS, data=return_data) - - return Response(status=status.HTTP_200_OK, data=return_data) - - -def post_api_groups_delete(request): - """Instantiate any necessary imports.""" - - bulk_request = request.data["POST_api_groups_delete"]["names"] - - # Establish who has made the request. - requestor_info = usr_utils.user_from_request(request=request) - - # Get all group names. - - # This is a better solution than querying for - # each individual group name. - groups = list(Group.objects.all().values_list("name", flat=True)) - - # Construct an array to return information about processing - # the request. - return_data = [] - any_failed = False - - # Since bulk_request is an array, go over each - # item in the array. - for deletion_object in bulk_request: - # Standardize the group name. - standardized = deletion_object.lower() - deleted_count = 0 - if standardized in groups: - # Get the group and its information. - grouped = Group.objects.get(name=standardized) - group_information = GroupInfo.objects.get(group=grouped.name) - - # Check that the requestor is the group admin. - if requestor_info.username == group_information.owner_user.username: - # Delete the group, checking to see if all users - # in the group also get deleted. - if group_information.delete_members_on_group_deletion: - # Delete all members of the group. - User.objects.filter(groups__name=grouped.name).delete() - # Delete the group itself. - deleted_count, deleted_info = grouped.delete() - if deleted_count < 2: - # Too few deleted, error with this delete - return_data.append( - db_utils.messages(parameters={"group": grouped.name})[ - "404_missing_bulk_parameters" - ] - ) - any_failed = True - continue - - elif deleted_count > 2: - print(deleted_count, "deleted_count") - # We don't expect there to be duplicates, so while this was successful it should throw a warning - return_data.append( - db_utils.messages(parameters={"group": grouped.name})[ - "418_too_many_deleted" - ] - ) - any_failed = True - continue - # Everything looks OK - return_data.append( - db_utils.messages(parameters={"group": grouped.name})[ - "200_OK_group_delete" - ] - ) - else: - # Requestor is not the admin. - return_data.append( - db_utils.messages(parameters={})["403_insufficient_permissions"] - ) - any_failed = True - else: - # Update the request status. - return_data.append(db_utils.messages(parameters={})["400_bad_request"]) - any_failed = True - - if any_failed: - return Response(status=status.HTTP_207_MULTI_STATUS, data=return_data) - - return Response(status=status.HTTP_200_OK, data=return_data) - - -def post_api_groups_modify(request): - """Instantiate any necessary imports. - TODO: This needs a serious revamp... Permissions and specific groups need - to be adjusted. IE no one should be able to change a group without GroupInfo. - """ - try: - bulk_request = request.data["POST_api_groups_modify"] - except: - return Response(status=status.HTTP_400_BAD_REQUEST) - requestor_info = usr_utils.user_from_request(request=request) - groups = list(Group.objects.all().values_list("name", flat=True)) - return_data = [] - for modification_object in bulk_request: - standardized = modification_object["name"].lower() - - if standardized in groups: - grouped = Group.objects.get(name=standardized) - if ( - requestor_info.is_superuser == True - or grouped in requestor_info.groups.all() - ): - # TODO: We shouldn't use a try/except as an if statement; I think there is actually - # a get_or_create() function: - # group_information = GroupInfo.objects.get_or_create(group=grouped, owner_user=requestor_info) - # But would need to be tested - try: - group_information = GroupInfo.objects.get(group=grouped) - except: - group_information = GroupInfo.objects.create( - group=grouped, owner_user=requestor_info - ) - if "actions" in modification_object: - action_set = modification_object["actions"] - - # Invalid inputs don't throw 400, 401, or 403 for the - # request. That is, provided parameters that don't - # exist (for example, an owner_user that does not exist) - # simply get skipped over. - # First do the "easy" tasks - name and description. - # Change name of group if set in actions - if "rename" in action_set: - # Simply re-name to whatever we've been provided, - # assuming the group doesn't already exist. - if action_set["rename"] not in groups: - grouped.name = action_set["rename"] - grouped.save() - group_information.group = grouped - group_information.save() - bco_list = BCO.objects.filter(owner_group=standardized) - for bco in bco_list: - bco.owner_group = grouped - bco.save() - - # Change description of group if set in actions. - if "redescribe" in action_set: - group_information.description = action_set["redescribe"] - group_information.save() - - # Now the ownership tasks. - # TODO: Is owner_group defined for this type of object? - # Does not appear to be set, also does not appear to be inherited. - # WARNING: This could cause an error if this is sent in! - if "owner_group" in action_set: - # Make sure the provided owner group exists. - if usr_utils.check_group_exists(name=action_set["owner_group"]): - group_information.owner_group = Group.objects.get( - name=action_set["owner_group"] - ) - group_information.save() - else: - # TODO: This seems to be some type of error state - pass - - if "owner_user" in action_set: - # Make sure the provided owner user exists. - if usr_utils.check_user_exists( - user_name=action_set["owner_user"] - ): - group_information.owner_user = User.objects.get( - username=action_set["owner_user"] - ) - group_information.save() - else: - # TODO: This seems to be some type of error state - pass - - # Finally, perform the set logic to add and remove - # users and groups. - - # Get all users in the group. - all_users = set([i.username for i in list(grouped.user_set.all())]) - - # Removals are processed first, then additions. - # Remove the users provided, if any. - if "remove_users" in action_set: - users = User.objects.filter( - username__in=action_set["remove_users"] - ) - for user in users: - user.groups.remove(grouped) - - # Get the users in the groups provided, if any. - if "disinherit_from" in action_set: - # Get all the groups first, then get the user list. - rm_group_users = list( - User.objects.filter( - groups__in=Group.objects.filter( - name__in=action_set["disinherit_from"] - ) - ).values_list("username", flat=True) - ) - - all_users = all_users - set(rm_group_users) - - # Addition explained at https://stackoverflow.com/a/1306663 - - # Add the users provided, if any. - if "add_users" in action_set: - users = User.objects.filter( - username__in=action_set["add_users"] - ) - for user in users: - user.groups.add(grouped) - - # Get the users in the groups provided, if any. - if "inherit_from" in action_set: - # Get all the groups first, then get the user list. - a_group_users = list( - User.objects.filter( - groups__in=Group.objects.filter( - name__in=action_set["inherit_from"] - ) - ).values_list("username", flat=True) - ) - all_users.update(a_group_users) - else: - pass - return_data.append( - db_utils.messages(parameters={"group": grouped.name})[ - "200_OK_group_modify" - ] - ) - else: - # Requestor is not the admin. - return_data.append( - db_utils.messages(parameters={})["403_insufficient_permissions"] - ) - else: - # Update the request status. - return_data.append(db_utils.messages(parameters={})["400_bad_request"]) - - # As this view is for a bulk operation, status 200 - # means that the request was successfully processed, - # but NOT necessarily each item in the request. - return Response(status=status.HTTP_200_OK, data=return_data) - - -@receiver(post_save, sender=User) -def associate_user_group(sender, instance, created, **kwargs): - """Create Group and GroupInfo - - Link user creation to groups. - Create a group for this user. - Source: https://stackoverflow.com/a/55206382/5029459 - Automatically add the user to the BCO drafters and publishers groups, - if the user isn't anon or the already existent bco_drafter or bco_publisher. - """ - - if 'test' in sys.argv or 'loaddata' in sys.argv: - return - - else: - if created: - print(instance) - Group.objects.create(name=instance) - group = Group.objects.get(name=instance) - group.user_set.add(instance) - if instance.username not in ["anon", "bco_drafter", "bco_publisher", "AnonymousUser"]: - User.objects.get(username=instance).groups.add( - Group.objects.get(name="bco_drafter") - ) - User.objects.get(username=instance).groups.add( - Group.objects.get(name="bco_publisher") - ) diff --git a/api/model/prefix.py b/api/model/prefix.py deleted file mode 100644 index 496dc922..00000000 --- a/api/model/prefix.py +++ /dev/null @@ -1,745 +0,0 @@ -#!/usr/bin/env python3 -"""Functions for operations with groups -""" - - -import re -import sys -from django.db import models -from django.contrib.auth.models import Group, Permission, User -from django.db.models.signals import post_save, post_delete, pre_save -from django.dispatch import receiver -import django.db.utils as PermErrors -from django.contrib.contenttypes.models import ContentType -from django.utils import timezone -from rest_framework import status -from rest_framework.response import Response - -from api.model.groups import GroupInfo -from api.scripts.utilities import DbUtils -from api.scripts.utilities import UserUtils - - -# Generic meta data model -# TODO: rename to prefix_meta -class prefix_table(models.Model): - """The number of objects for a given prefix.""" - - # Field is required. - n_objects = models.IntegerField() - - # Which prefix the object falls under. - - # Field is required. - prefix = models.CharField(max_length=5) - - def __str__(self): - """String for representing the BCO model (in Admin site etc.).""" - return self.prefix - - -class Prefix(models.Model): - """Link Prefix to groups and users. - - Be careful about related_name. - Source: https://stackoverflow.com/questions/53651114/using-same-foreign-key-twice-in-a-model-in-django-as-different-fields - Which server is this prefix certified with? - What is the certifying key? - """ - - certifying_server = models.TextField(blank=True, null=True) - certifying_key = models.TextField(blank=True, null=True) - created = models.DateTimeField(default=timezone.now, blank=True, null=True) - created_by = models.ForeignKey( - User, - on_delete=models.CASCADE, - related_name="created_by", - to_field="username", - default="wheel", - ) - description = models.TextField(blank=True, null=True) - expires = models.DateTimeField(blank=True, null=True) - owner_group = models.ForeignKey(Group, on_delete=models.CASCADE, to_field="name") - owner_user = models.ForeignKey(User, on_delete=models.CASCADE, to_field="username") - prefix = models.CharField(max_length=5) - - def __str__(self): - """String for representing the BCO model (in Admin site etc.).""" - return f"{self.prefix}" - - -def post_api_prefixes_create(request): - """Create a prefix - - Create a prefix to be used to classify BCOs and to determine permissions - for objects created under that prefix. The requestor must be in the group - prefix_admins to create a prefix. - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into - arbitrary media types. - """ - - db_utils = DbUtils.DbUtils() - user_utils = UserUtils.UserUtils() - bulk_request = request.data["POST_api_prefixes_create"] - unavailable = list(Prefix.objects.all().values_list("prefix", flat=True)) - return_data = [] - any_failed = False - for creation_object in bulk_request: - try: - owner_user = User.objects.get(username=creation_object["owner_user"]) - except User.DoesNotExist: - return_data.append( - db_utils.messages(parameters={"username": creation_object["owner_user"]})[ - "404_user_not_found" - ] - ) - any_failed = True - continue - if creation_object["owner_group"] == "bco_drafter": - is_public = True - else: - is_public = False - for prfx in creation_object["prefixes"]: - standardized = prfx["prefix"].upper() - if not re.match(r"^[A-Z]{3,5}$", standardized): - return_data.append( - db_utils.messages(parameters={"prefix": standardized})[ - "400_bad_request_malformed_prefix" - ] - ) - any_failed = True - continue - - if standardized in unavailable: - return_data.append( - db_utils.messages(parameters={"prefix": standardized})[ - "409_prefix_conflict" - ] - ) - any_failed = True - continue - - if "expiration_date" in prfx: - if ( - db_utils.check_expiration(dt_string=prfx["expiration_date"]) - is not None - ): - return_data.append( - db_utils.messages( - parameters={"expiration_date": prfx["expiration_date"]} - )["400_invalid_expiration_date"] - ) - any_failed = True - continue - - draft = prfx["prefix"].lower() + "_drafter" - publish = prfx["prefix"].lower() + "_publisher" - - if len(Group.objects.filter(name=draft)) != 0: - drafters = Group.objects.get(name=draft) - owner_user.groups.add(drafters) - else: - Group.objects.create(name=draft) - drafters = Group.objects.get(name=draft) - owner_user.groups.add(drafters) - GroupInfo.objects.create( - delete_members_on_group_deletion=False, - description=prfx["description"], - group=drafters, - max_n_members=-1, - owner_user=owner_user, - ) - - if len(Group.objects.filter(name=publish)) != 0: - publishers = Group.objects.get(name=publish) - owner_user.groups.add(publishers) - else: - Group.objects.create(name=publish) - publishers = Group.objects.get(name=publish) - owner_user.groups.add(publishers) - GroupInfo.objects.create( - delete_members_on_group_deletion=False, - description=prfx["description"], - group=publishers, - max_n_members=-1, - owner_user=owner_user, - ) - if is_public is True: - owner_group = "bco_drafter" - else: - owner_group = publish - - write_result = DbUtils.DbUtils().write_object( - p_app_label="api", - p_model_name="Prefix", - p_fields=[ - "created_by", - "description", - "owner_group", - "owner_user", - "prefix", - ], - p_data={ - "created_by": user_utils.user_from_request( - request=request - ).username, - "description": prfx["description"], - "owner_group": owner_group, - "owner_user": creation_object["owner_user"], - "prefix": standardized, - }, - ) - if write_result != 1: - return_data.append( - db_utils.messages(parameters={"prefix": standardized})[ - "409_prefix_conflict" - ] - ) - any_failed = True - continue - - return_data.append( - db_utils.messages(parameters={"prefix": standardized})[ - "201_prefix_create" - ] - ) - - if any_failed and len(return_data) == 1: - return Response(status=return_data[0]["status_code"], data=return_data) - - if any_failed and len(return_data) > 1: - return Response(status=status.HTTP_207_MULTI_STATUS, data=return_data) - - return Response(status=status.HTTP_200_OK, data=return_data) - - -def post_api_prefixes_delete(request): - """Deletes a prefix - - The requestor must be in the group prefix_admins to delete a prefix. - Any object created under this prefix will have its permissions "locked out." - This means that any other view which relies on object-level permissions, such - as /api/objects/drafts/read/, will not allow any requestor access to particular - objects. - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into - arbitrary media types. - """ - - db_utils = DbUtils.DbUtils() - - bulk_request = request.data["POST_api_prefixes_delete"] - - # Get all existing prefixes. - unavailable = list(Prefix.objects.all().values_list("prefix", flat=True)) - - return_data = [] - - for creation_object in bulk_request: - - # Create a list to hold information about errors. - errors = {} - - # Standardize the prefix name. - standardized = creation_object.upper() - - # Create a flag for if one of these checks fails. - error_check = False - - if standardized not in unavailable: - error_check = True - # Update the request status. - errors["404_missing_prefix"] = db_utils.messages( - parameters={"prefix": standardized} - )["404_missing_prefix"] - - if error_check is False: - # The prefix exists, so delete it. - # No need to use DB Utils here, - # just delete straight up. - # Source: https://stackoverflow.com/a/3681691 - # Django *DOESN'T* want primary keys now... - prefixed = Prefix.objects.get(prefix=standardized) - prefixed.delete() - # Deleted the prefix. - errors["200_OK_prefix_delete"] = db_utils.messages( - parameters={"prefix": standardized} - )["200_OK_prefix_delete"] - - # Append the possible "errors". - return_data.append(errors) - - # As this view is for a bulk operation, status 200 - # means that the request was successfully processed, - # but NOT necessarily each item in the request. - return Response(status=status.HTTP_200_OK, data=return_data) - - -def post_api_prefixes_modify(request): - """Modify a Prefix - - Modify a prefix which already exists. - The requestor *must* be in the group prefix_admins to modify a prefix. - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into - arbitrary media types. - """ - # Instantiate any necessary imports. - db_utils = DbUtils.DbUtils() - user_utils = UserUtils.UserUtils() - - bulk_request = request.data["POST_api_prefixes_modify"] - unavailable = list(Prefix.objects.all().values_list("prefix", flat=True)) - - # Construct an array to return information about processing - # the request. - return_data = [] - - # Since bulk_request is an array, go over each - # item in the array. - for creation_object in bulk_request: - - # Go over each prefix proposed. - for prfx in creation_object["prefixes"]: - - # Create a list to hold information about errors. - errors = {} - - # Standardize the prefix name. - standardized = prfx["prefix"].upper() - - # Create a flag for if one of these checks fails. - error_check = False - - if standardized not in unavailable: - - error_check = True - - # Update the request status. - # Bad request. - errors["404_missing_prefix"] = db_utils.messages( - parameters={"prefix": standardized} - )["404_missing_prefix"] - - # Does the user exist? - if ( - user_utils.check_user_exists(user_name=creation_object["owner_user"]) - is False - ): - - error_check = True - - # Bad request. - errors["404_user_not_found"] = db_utils.messages( - parameters={"username": creation_object["owner_user"]} - )["404_user_not_found"] - - # Does the group exist? - if ( - user_utils.check_group_exists(name=creation_object["owner_group"]) - is False - ): - - error_check = True - - # Bad request. - errors["404_group_not_found"] = db_utils.messages( - parameters={"group": creation_object["owner_group"]} - )["404_group_not_found"] - - # Was the expiration date validly formatted and, if so, - # is it after right now? - if "expiration_date" in prfx: - if ( - db_utils.check_expiration(dt_string=prfx["expiration_date"]) - is not None - ): - - error_check = True - - # Bad request. - errors["400_invalid_expiration_date"] = db_utils.messages( - parameters={"expiration_date": prfx["expiration_date"]} - )["400_invalid_expiration_date"] - - # Did any check fail? - if error_check is False: - - # The prefix has not been created, so create it. - DbUtils.DbUtils().write_object( - p_app_label="api", - p_model_name="Prefix", - p_fields=[ - "created_by", - "description", - "owner_group", - "owner_user", - "prefix", - ], - p_data={ - "created_by": user_utils.user_from_request( - request=request - ).username, - "description": prfx["description"], - "owner_group": creation_object["owner_group"], - "owner_user": creation_object["owner_user"], - "prefix": standardized, - }, - ) - - # Created the prefix. - errors["201_prefix_modify"] = db_utils.messages( - parameters={"prefix": standardized} - )["201_prefix_modify"] - - # Append the possible "errors". - return_data.append(errors) - - # As this view is for a bulk operation, status 200 - # means that the request was successfully processed, - # but NOT necessarily each item in the request. - return Response(status=status.HTTP_200_OK, data=return_data) - - -def post_api_prefixes_permissions_set(request): - """Set the permissions for prefixes.""" - - # Instantiate any necessary imports. - db = DbUtils.DbUtils() - uu = UserUtils.UserUtils() - - # First, get which user we're dealing with. - user = uu.user_from_request(request=request) - - # Define the bulk request. - bulk_request = request.data["POST_api_prefixes_permissions_set"] - - # Get all existing prefixes. - unavailable = list(Prefix.objects.all().values_list("prefix", flat=True)) - - # Construct an array to return information about processing - # the request. - return_data = [] - - # Since bulk_request is an array, go over each - # item in the array. - for creation_object in bulk_request: - - # Go over each prefix proposed. - for prfx in creation_object["prefixes"]: - - # Create a list to hold information about errors. - errors = {} - - # Standardize the prefix name. - standardized = prfx.upper() - - # Create a flag for if one of these checks fails. - error_check = False - - # Has the prefix already been created? - if standardized not in unavailable: - - error_check = True - - # Update the request status. - errors["404_missing_prefix"] = db.messages( - parameters={"prefix": standardized} - )["404_missing_prefix"] - - # The prefix exists, but is the requestor the owner? - if ( - uu.check_user_owns_prefix(user_name=user.username, prfx=standardized) - is False - and user.username != "wheel" - ): - - error_check = True - - # Bad request, the user isn't the owner or wheel. - errors["403_requestor_is_not_prefix_owner"] = db.messages( - parameters={"prefix": standardized} - )["403_requestor_is_not_prefix_owner"] - - # The "expensive" work of assigning permissions is held off - # if any of the above checks fails. - - # Did any check fail? - if error_check is False: - - # Split out the permissions assignees into users and groups. - assignees = {"group": [], "username": []} - - if "username" in creation_object: - assignees["username"] = creation_object["username"] - - if "group" in creation_object: - assignees["group"] = creation_object["group"] - - # Go through each one. - for user_name in assignees["username"]: - - # Create a list to hold information about sub-errors. - sub_errors = {} - - # Create a flag for if one of these sub-checks fails. - sub_error_check = False - - # Get the user whose permissions are being assigned. - if uu.check_user_exists(user_name=user_name) is False: - - sub_error_check = True - - # Bad request, the user doesn't exist. - sub_errors["404_user_not_found"] = db.messages( - parameters={"username": user_name} - )["404_user_not_found"] - - # Was the user found? - if sub_error_check is False: - - assignee = User.objects.get(username=user_name) - - # Permissions are defined directly as they are - # in the POST request. - - # Assumes permissions are well-formed... - - # Source: https://docs.djangoproject.com/en/3.2/topics/auth/default/#permissions-and-authorization - assignee.user_permissions.set( - [ - Permission.objects.get(codename=i + "_" + prfx) - for i in creation_object["permissions"] - ] - ) - - # Permissions assigned. - sub_errors["200_OK_prefix_permissions_update"] = db.messages( - parameters={"prefix": standardized} - )["200_OK_prefix_permissions_update"] - - # Add the sub-"errors". - errors["username"] = sub_errors - - for g in assignees["group"]: - - # Create a list to hold information about sub-errors. - sub_errors = {} - - # Create a flag for if one of these sub-checks fails. - sub_error_check = False - - # Get the group whose permissions are being assigned. - if uu.check_group_exists(name=g) is False: - - sub_error_check = True - - # Bad request, the group doesn't exist. - sub_errors["404_group_not_found"] = db.messages( - parameters={"group": g} - )["404_group_not_found"] - - # Was the group found? - if sub_error_check is False: - - assignee = Group.objects.get(name=g) - - # Permissions are defined directly as they are - # in the POST request. - - # Assumes permissions are well-formed... - - # Source: https://docs.djangoproject.com/en/3.2/topics/auth/default/#permissions-and-authorization - assignee.permissions.set( - [ - Permission.objects.get(codename=i + "_" + prfx) - for i in creation_object["permissions"] - ] - ) - - # Permissions assigned. - sub_errors["200_OK_prefix_permissions_update"] = db.messages( - parameters={"prefix": standardized} - )["200_OK_prefix_permissions_update"] - - # Add the sub-"errors". - errors["group"] = sub_errors - - # Append the possible "errors". - return_data.append(errors) - - # As this view is for a bulk operation, status 200 - # means that the request was successfully processed, - # but NOT necessarily each item in the request. - return Response(status=status.HTTP_200_OK, data=return_data) - - -def post_api_prefixes_token(request): - """Get Prefixes for a Token - - Get all available prefixes and their associated permissions for a given token. - The word 'Token' must be included in the header. - The token has already been validated, - so the user is guaranteed to exist. - A little expensive, but use the utility - we already have. Default will return flattened list of permissions. - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into - arbitrary media types. - """ - - prefixes = UserUtils.UserUtils().prefix_perms_for_user( - user_object=UserUtils.UserUtils().user_from_request(request=request).username, - flatten=False, - ) - return Response(status=status.HTTP_200_OK, data=prefixes) - - -def post_api_prefixes_token_flat(request): - """Get Prefixes for a Token - - Get all available prefixes and their associated permissions for a given token. - The word 'Token' must be included in the header. The token has already been - validated, so the user is guaranteed to exist. - A little expensive, but use the utility we already have. Default will return - flattened list of permissions. - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into - arbitrary media types. - """ - - prefixes = UserUtils.UserUtils().prefix_perms_for_user( - user_object=request.user, - flatten=True, - ) - - return Response(status=status.HTTP_200_OK, data=prefixes) - - -# --- Prefix --- # -@receiver(pre_save, sender=Prefix) -def create_permissions_for_prefix(sender, instance=None, **kwargs): - """Link prefix creation to permissions creation. - Check to see whether or not the permissions - have already been created for this prefix. - Create the macro-level, draft, and publish permissions. - Give FULL permissions to the prefix user owner - and their group. - - No try/except necessary here as the user's existence - has already been verified upstream. - - Source: https://stackoverflow.com/a/20361273 - """ - - # GroupInfo.objects.create( - # delete_members_on_group_deletion=False, - # description='Group administrators', - # group=Group.objects.get(name='group_admins'), - # max_n_members=-1, - # owner_user=User.objects.get(username='wheel') - # ) - - if 'test' in sys.argv or 'loaddata' in sys.argv: - return - - else: - owner_user = User.objects.get(username=instance.owner_user) - owner_group = Group.objects.get(name=instance.owner_group_id) - drafters = Group.objects.get(name=instance.prefix.lower() + "_drafter") - publishers = Group.objects.get(name=instance.prefix.lower() + "_publisher") - - try: - for perm in ["add", "change", "delete", "view", "draft", "publish"]: - Permission.objects.create( - name="Can " + perm + " BCOs with prefix " + instance.prefix, - content_type=ContentType.objects.get(app_label="api", model="bco"), - codename=perm + "_" + instance.prefix, - ) - new_perm = Permission.objects.get(codename=perm + "_" + instance.prefix) - owner_user.user_permissions.add(new_perm) - owner_group.permissions.add(new_perm) - publishers.permissions.add(new_perm) - if perm == "publish": - pass - else: - drafters.permissions.add(new_perm) - - except PermErrors.IntegrityError: - # The permissions already exist. - pass - - -@receiver(post_save, sender=Prefix) -def create_counter_for_prefix(sender, instance=None, created=False, **kwargs): - """Create prefix counter - - Creates a prefix counter for each prefix if it does not exist on save. - - Parameters - ---------- - sender: django.db.models.base.ModelBase - instance: api.model.prefix.Prefix - created: bool - """ - if 'test' in sys.argv or 'loaddata' in sys.argv or 'flush' in sys.argv: - return - else: - if created: - prefix_table.objects.create(n_objects=1, prefix=instance.prefix) - - -@receiver(post_delete, sender=Prefix) -def delete_permissions_for_prefix(sender, instance=None, **kwargs): - """Link prefix deletion to permissions deletion. - No risk of raising an error when using - a filter. - """ - - Permission.objects.filter(codename="add_" + instance.prefix).delete() - Permission.objects.filter(codename="change_" + instance.prefix).delete() - Permission.objects.filter(codename="delete_" + instance.prefix).delete() - Permission.objects.filter(codename="view_" + instance.prefix).delete() - Permission.objects.filter(codename="draft_" + instance.prefix).delete() - Permission.objects.filter(codename="publish_" + instance.prefix).delete() diff --git a/api/models.py b/api/models.py deleted file mode 100755 index e4b769ae..00000000 --- a/api/models.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python3 - -"""Models - -Explanation of optional fields: -https://stackoverflow.com/questions/16349545/optional-fields-in-django-models -TextField is used here because it has no character limit. - -Create a base model, then inherit for each table. -See the 4th example under "Model Inheritance" at -https://docs.djangoproject.com/en/3.1/topics/db/models/#model-inheritance - ---- Permissions imports --- # -Source: https://simpleisbetterthancomplex.com/tutorial/2016/07/22/how-to-extend-django-user-model.html -For setting the anonymous key. -The user model is straight from Django. -Referencing models. -Issue with timezones. -Source: https://stackoverflow.com/a/32411560 -Object-level permissions. -For token creation. -Source: https://www.django-rest-framework.org/api-guide/authentication/#generating-tokens -""" - -import sys -from django.db import models -from django.conf import settings -from django.contrib.auth.models import Group, User -from django.db.models.signals import post_save -from django.dispatch import receiver -from django.utils import timezone -from rest_framework.authtoken.models import Token - - -# Generic BCO model -class BCO(models.Model): - """BioComput Object Model. - - Attributes: - ----------- - contents: JSONField - BCO JSON contents - object_class: str - T.B.D. - object_id: str - BCO Object Identifier - owner_group: str - String representing the django.contrib.auth.models.Group that 'owns' the object - owner_user = models.ForeignKey(User, on_delete=models.CASCADE, to_field='username') - String representing the django.contrib.auth.models.User that 'owns' the object - prefix: str - Prefix for the BCO - schema: str - schema to which the BCO should be validated. Default is 'IEEE' - state:str - State of object. REFERENCED, PUBLISHED, DRAFT, and DELETE are currently accepted values. - last_update: DateTime - Date Time object for the last database change to this object - """ - - contents = models.JSONField() - object_class = models.TextField(blank=True, null=True) - object_id = models.TextField() - owner_group = models.ForeignKey(Group, on_delete=models.CASCADE, to_field="name") - owner_user = models.ForeignKey(User, on_delete=models.CASCADE, to_field="username") - prefix = models.CharField(max_length=5) - schema = models.TextField() - state = models.TextField() - last_update = models.DateTimeField() - - def __str__(self): - """String for representing the BCO model (in Admin site etc.).""" - return str(self.object_id) - -# --- Receivers --- # - - -# User and API Information are kept separate so that we can use it -# elsewhere easily. - -# Source: https://florimondmanca.github.io/djangorestframework-api-key/guide/#api-key-models -# Source: https://simpleisbetterthancomplex.com/tutorial/2016/07/22/how-to-extend-django-user-model.html - - -# --- User --- # - - -@receiver(post_save, sender=User) -def create_auth_token(sender, instance=None, created=False, **kwargs): - """Link user creation to token generation. - Source: https://www.django-rest-framework.org/api-guide/authentication/#generating-tokens - """ - if 'loaddata' in sys.argv: - return - else: - if created: - # The anonymous user's token is hard-coded - # in server.conf. - if instance.username == "anon": - # Create anon's record with the hard-coded key. - Token.objects.create(user=instance, key=settings.ANON_KEY) - else: - # Create a normal user's record. - Token.objects.create(user=instance) - - -# Link object deletion to object permissions deletion. - -# TODO:... diff --git a/api/permissions.py b/api/permissions.py deleted file mode 100644 index ff585bb9..00000000 --- a/api/permissions.py +++ /dev/null @@ -1,273 +0,0 @@ -# For getting objects out of the database. -# Apps -# Group object permissions -# Source: https://github.com/django-guardian/django-guardian#usage -# REST permissions. -# Source: https://stackoverflow.com/a/18646798 - -from django.apps import apps -from django.conf import settings -from api.scripts.utilities import DbUtils -from guardian.shortcuts import get_group_perms -from rest_framework import permissions -from rest_framework.authtoken.models import Token -from django.contrib.auth.models import User, Group - - -# ----- Admin Permissions ----- # - - -class RequestorInGroupAdminsGroup(permissions.BasePermission): - """Class docstring""" - - def has_permission(self, request, view): - """Check to see if the requester is in the group admins group. - Get the groups for this token (user). - This means getting the user ID for the token, - then the username.""" - - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - username = User.objects.get(id=user_id) - - # Get the prefix admins. - group_admins = Group.objects.filter(user=username, name="group_admins") - - return len(group_admins) > 0 - - -class RequestorInPrefixAdminsGroup(permissions.BasePermission): - """ - Check to see if the requester is in the prefix admins group. - - Get the groups for this token (user). - - Slight tweak in case the proper headers were not provided... - In particular, Swagger will cause an Internal Error 500 - if this logic is not here AND a view uses non-object-level - permissions (i.e. RequestorInPrefixAdminsGroup, see - ApiPrefixesPermissionsSet in views.py) - """ - - def has_permission(self, request, view): - """ - This means getting the user ID for the token, - then the username. - Get the prefix admins. - """ - - if settings.PREFIX is True: - return True - if "HTTP_AUTHORIZATION" in request.META: - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - - username = User.objects.get(id=user_id) - prefix_admins = Group.objects.filter(user=username, name="prefix_admins") - return len(prefix_admins) > 0 - - else: - return False - - -# ----- Table Permissions ----- # - - -# ----- Object Permissions ----- # - - -# Permissions based on REST. -# Source: https://stackoverflow.com/a/18646798 -class RequestorIsObjectOwner(permissions.BasePermission): - def has_object_permission(self, request, view, obj): - - # Check to see if the requester is in a particular owner group. - - # Get the groups for this token (user). - - # This means getting the user ID for the token, - # then the username. - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - username = User.objects.get(id=user_id) - - # Get the groups for this username (at a minimum the user - # group created when the account was created should show up). - - # Now get the user's groups. - groups = Group.objects.filter(user=username) - - # Check that the user is in the ownership group. - - # Note that view permissions are NOT checked because - # the owner automatically has full permissions on the - # object. - owner_group = ( - apps.get_model(app_label="api", model_name=request.data["table_name"]) - .objects.get(object_id=request.data["object_id"]) - .owner_group - ) - - # Note: could use https://docs.djangoproject.com/en/3.2/topics/auth/customizing/#custom-permissions - # to set these, but group membership confers all read - # permissions. - - # Is this user in the ownership group? - return groups.filter(name=owner_group).exists() - - -class RequestorInObjectOwnerGroup(permissions.BasePermission): - def has_object_permission(self, request, view, obj): - - # Check to see if the requester is in a particular owner group. - - # Get the groups for this token (user). - - # This means getting the user ID for the token, - # then the username. - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - username = User.objects.get(id=user_id) - - # Get the groups for this username (at a minimum the user - # group created when the account was created should show up). - - # Now get the user's groups. - groups = Group.objects.filter(user=username) - - # Check that the user is in the ownership group. - - # Note that view permissions are NOT checked because - # the owner automatically has full permissions on the - # object. - owner_group = ( - apps.get_model(app_label="api", model_name=request.data["table_name"]) - .objects.get(object_id=request.data["object_id"]) - .owner_group - ) - - # Note: could use https://docs.djangoproject.com/en/3.2/topics/auth/customizing/#custom-permissions - # to set these, but group membership confers all read - # permissions. - - # Is this user in the ownership group? - return groups.filter(name=owner_group).exists() - - -# Generic object-level permissions checker. -class HasObjectGenericPermission(permissions.BasePermission): - def has_object_permission(self, request, view, obj): - - # Check to see if the requester (group) has the given permission on the given object. - - # Don't need to check for table here as that is done in the step before... - - # *Must* return a True or False. - # Source: https://www.django-rest-framework.org/api-guide/permissions/#custom-permissions - - # This means getting the user ID for the token, - # then the username. - # Source: https://stackoverflow.com/questions/31813572/access-token-from-view - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - username = User.objects.get(id=user_id) - - # See if the group can do something with this object. - # Source: https://django-guardian.readthedocs.io/en/stable/userguide/check.html#get-perms - - # Get the group object first, then check. - if request.data["perm_type"] + "_" + request.data[ - "table_name" - ] in get_group_perms(username, obj): - - return True - - else: - - # User doesn't have the right permissions for this object. - return False - - -# Specific permissions (necessary to use logical operators -# when checking permissions). - -# These are all just specific cases of HasObjectGenericPermission -class HasObjectAddPermission(permissions.BasePermission): - def has_object_permission(self, request, view, obj): - - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - username = User.objects.get(id=user_id) - - # Get the group object first, then check. - if "add_" + request.data["table_name"] in get_group_perms(username, obj): - - return True - - else: - - # User doesn't have the right permissions for this object. - return False - - -class HasObjectChangePermission(permissions.BasePermission): - def has_object_permission(self, request, view, obj): - - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - username = User.objects.get(id=user_id) - - # Get the group object first, then check. - if "change_" + request.data["table_name"] in get_group_perms(username, obj): - - return True - - else: - - # User doesn't have the right permissions for this object. - return False - - -class HasObjectDeletePermission(permissions.BasePermission): - def has_object_permission(self, request, view, obj): - - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - username = User.objects.get(id=user_id) - - # Get the group object first, then check. - if "delete_" + request.data["table_name"] in get_group_perms(username, obj): - - return True - - else: - - # User doesn't have the right permissions for this object. - return False - - -class HasObjectViewPermission(permissions.BasePermission): - def has_object_permission(self, request, view, obj): - - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - username = User.objects.get(id=user_id) - - # Get the group object first, then check. - if "view_" + request.data["table_name"] in get_group_perms(username, obj): - - return True - - else: - - # User doesn't have the right permissions for this object. - return False diff --git a/api/rdb.sh b/api/rdb.sh deleted file mode 100755 index a510982a..00000000 --- a/api/rdb.sh +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/bash - -clear - -find . -path "./migrations/*.py" -not -name "__init__.py" -delete -find . -path "./migrations/*.pyc" -delete - -cd .. - -rm db.sqlite3 - -python3.9 manage.py makemigrations -python3.9 manage.py migrate -python3.9 manage.py loaddata ./api/fixtures/metafixtures.json - -# Clear out all the junk. -#clear - -# Print the keys? -if [[ $2 == '-k' ]] -then - - echo " " - echo " " - echo "Anon key for the installation is..." - sqlite3 db.sqlite3 'SELECT B.key FROM auth_user AS A JOIN authtoken_token AS B ON A.id = B.user_id WHERE A.username = "anon";' - echo " " - echo " " - echo "Wheel key for the installation is..." - sqlite3 db.sqlite3 'SELECT B.key FROM auth_user AS A JOIN authtoken_token AS B ON A.id = B.user_id WHERE A.username = "wheel";' - echo " " - echo " " - -fi - -if [[ $1 == '-r' ]] -then - - python3.9 manage.py runserver 8000 - -fi diff --git a/api/request_definitions/GET.schema b/api/request_definitions/GET.schema deleted file mode 100755 index 5a407a34..00000000 --- a/api/request_definitions/GET.schema +++ /dev/null @@ -1,20 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/GET.schema", - "title": "GET", - "description": "A GET request.", - "type": "object", - "properties": - { - "GET_activate_account": - { - "description": "Activate a previously requested account.", - "$ref": "request_definitions/templates/GET_activate_account.schema" - }, - "GET_retrieve_available_schema": - { - "description": "The template for seeing what schema are available on the server to validate against.", - "$ref": "request_definitions/templates/GET_retrieve_available_schema.schema" - } - } -} diff --git a/api/request_definitions/POST.schema b/api/request_definitions/POST.schema deleted file mode 100755 index cf62d9ad..00000000 --- a/api/request_definitions/POST.schema +++ /dev/null @@ -1,40 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/POST.schema", - "title": "POST", - "description": "A POST request.", - "type": "object", - "properties": - { - "POST_objects_draft": - { - "description": "The template for drafting a new object.", - "$ref": "request_definitions/templates/POST_objects_draft.schema" - }, - "POST_objects_publish": - { - "description": "The template for publishing an object.", - "$ref": "request_definitions/templates/POST_objects_draft.schema" - }, - "POST_api_accounts_new": - { - "description": "Ask for a new account on the API.", - "$ref": "request_definitions/templates/POST_api_accounts_new.schema" - }, - "POST_object_listing_by_token": - { - "description": "Get all available objects for a given token.", - "$ref": "request_definitions/templates/POST_object_listing_by_token.schema" - }, - "POST_read_object": - { - "description": "The template for reading a(n) object(s).", - "$ref": "request_definitions/templates/POST_read_object.schema" - }, - "POST_validate_payload_against_schema": - { - "description": "The template for validating an object against a schema.", - "$ref": "request_definitions/templates/POST_validate_payload_against_schema.schema" - } - } -} diff --git a/api/request_definitions/templates/DELETE_delete_object_by_id.schema b/api/request_definitions/templates/DELETE_delete_object_by_id.schema deleted file mode 100755 index 52ecf43c..00000000 --- a/api/request_definitions/templates/DELETE_delete_object_by_id.schema +++ /dev/null @@ -1,55 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://example.com/product.schema.json", - "title": "POST_create_new_object", - "description": "A POST request for creating a new object.", - "type": "array", - "items": - { - "type": "object", - "properties": - { - "template": - { - "description": "The name of the request template.", - "type": "string", - "pattern": "CREATE_NEW_OBJECT" - }, - "table": - { - "description": "The table to write the new object to.", - "type": "string" - }, - "object_id": - { - "description": "Either 'NEW' or an existing JSON ID in the repository.", - "type": "string" - }, - "schema": - { - "description": "The schema under which the POSTed JSON falls.", - "type": "string" - }, - "payload": - { - "description": "The JSON contents to be stored", - "type": "object" - }, - "state": - { - "description": "Either 'DRAFT' or 'PUBLISHED'.", - "type": "string", - "pattern": "^DRAFT$|^PUBLISHED$" - } - }, - "required": - [ - "template", - "table", - "object_id", - "schema", - "payload", - "state" - ] - } -} \ No newline at end of file diff --git a/api/request_definitions/templates/GET_activate_account.schema b/api/request_definitions/templates/GET_activate_account.schema deleted file mode 100755 index 0ca090d2..00000000 --- a/api/request_definitions/templates/GET_activate_account.schema +++ /dev/null @@ -1,25 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/templates/GET_activate_account.schema", - "title": "GET_activate_account", - "description": "A GET request for activating a previously requested account.", - "type": "object", - "properties": - { - "email": - { - "description": "The email to activate.", - "type": "string" - }, - "temp_identifier": - { - "description": "A temporary token used to activate an account.", - "type": "string" - } - }, - "required": - [ - "email", - "temp_identifier" - ] -} diff --git a/api/request_definitions/templates/GET_get_object_by_id.schema b/api/request_definitions/templates/GET_get_object_by_id.schema deleted file mode 100755 index 52ecf43c..00000000 --- a/api/request_definitions/templates/GET_get_object_by_id.schema +++ /dev/null @@ -1,55 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://example.com/product.schema.json", - "title": "POST_create_new_object", - "description": "A POST request for creating a new object.", - "type": "array", - "items": - { - "type": "object", - "properties": - { - "template": - { - "description": "The name of the request template.", - "type": "string", - "pattern": "CREATE_NEW_OBJECT" - }, - "table": - { - "description": "The table to write the new object to.", - "type": "string" - }, - "object_id": - { - "description": "Either 'NEW' or an existing JSON ID in the repository.", - "type": "string" - }, - "schema": - { - "description": "The schema under which the POSTed JSON falls.", - "type": "string" - }, - "payload": - { - "description": "The JSON contents to be stored", - "type": "object" - }, - "state": - { - "description": "Either 'DRAFT' or 'PUBLISHED'.", - "type": "string", - "pattern": "^DRAFT$|^PUBLISHED$" - } - }, - "required": - [ - "template", - "table", - "object_id", - "schema", - "payload", - "state" - ] - } -} \ No newline at end of file diff --git a/api/request_definitions/templates/GET_retrieve_available_schema.schema b/api/request_definitions/templates/GET_retrieve_available_schema.schema deleted file mode 100755 index 36af0e58..00000000 --- a/api/request_definitions/templates/GET_retrieve_available_schema.schema +++ /dev/null @@ -1,13 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/templates/GET_retrieve_available_schema.schema", - "title": "GET_retrieve_available_schema", - "description": "A GET request to ask the API what schema are available to validate against.", - "type": "object", - "properties": - { - }, - "required": - [ - ] -} \ No newline at end of file diff --git a/api/request_definitions/templates/POST_convert_existing_object_between_schemas.schema b/api/request_definitions/templates/POST_convert_existing_object_between_schemas.schema deleted file mode 100755 index 52ecf43c..00000000 --- a/api/request_definitions/templates/POST_convert_existing_object_between_schemas.schema +++ /dev/null @@ -1,55 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://example.com/product.schema.json", - "title": "POST_create_new_object", - "description": "A POST request for creating a new object.", - "type": "array", - "items": - { - "type": "object", - "properties": - { - "template": - { - "description": "The name of the request template.", - "type": "string", - "pattern": "CREATE_NEW_OBJECT" - }, - "table": - { - "description": "The table to write the new object to.", - "type": "string" - }, - "object_id": - { - "description": "Either 'NEW' or an existing JSON ID in the repository.", - "type": "string" - }, - "schema": - { - "description": "The schema under which the POSTed JSON falls.", - "type": "string" - }, - "payload": - { - "description": "The JSON contents to be stored", - "type": "object" - }, - "state": - { - "description": "Either 'DRAFT' or 'PUBLISHED'.", - "type": "string", - "pattern": "^DRAFT$|^PUBLISHED$" - } - }, - "required": - [ - "template", - "table", - "object_id", - "schema", - "payload", - "state" - ] - } -} \ No newline at end of file diff --git a/api/request_definitions/templates/POST_convert_payload_to_schema.schema b/api/request_definitions/templates/POST_convert_payload_to_schema.schema deleted file mode 100755 index 3b648aca..00000000 --- a/api/request_definitions/templates/POST_convert_payload_to_schema.schema +++ /dev/null @@ -1,44 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://example.com/product.schema.json", - "title": "POST_BCO", - "description": "A POST request to the BCO API.", - "type": "array", - "items": - { - "type": "object", - "properties": - { - "object_id": - { - "description": "Either 'NEW' or an existing BCO ID in the repository.", - "type": "string", - "pattern": "^https://nih.gov/NIH_(\\d+)_v_(\\d+)$|^ftp://nih.gov/NIH_(\\d+)_v_(\\d+)$|^doi://nih.gov/NIH_(\\d+)_v_(\\d+)$|^http://nih.gov/NIH_(\\d+)_v_(\\d+)$|^NEW$" - }, - "schema": - { - "description": "The schema under which the POSTed BCO falls.", - "type": "string", - "pattern": "^IEEE 2791-2020$" - }, - "bco": - { - "description": "The BCO.", - "$ref": "https://opensource.ieee.org/2791-object/ieee-2791-schema/raw/master/2791object.json" - }, - "state": - { - "description": "Either 'DRAFT' or 'PUBLISHED'.", - "type": "string", - "pattern": "^DRAFT$|^PUBLISHED$" - } - }, - "required": - [ - "object_id", - "schema", - "bco", - "state" - ] - } -} \ No newline at end of file diff --git a/api/request_definitions/templates/POST_new_account.schema b/api/request_definitions/templates/POST_new_account.schema deleted file mode 100755 index b82f0811..00000000 --- a/api/request_definitions/templates/POST_new_account.schema +++ /dev/null @@ -1,20 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/templates/POST_api_accounts_new.schema", - "title": "POST_api_accounts_new", - "description": "A POST request for creating a new account.", - "type": "object", - "properties": { - "email": { - "description": "The email to create a new account for.", - "type": "string" - }, - "hostname": { - "description": "Which host to activate the account on (send server information back to).", - "type": "string" - } - }, - "required": [ - "email" - ] -} diff --git a/api/request_definitions/templates/POST_object_listing_by_token.schema b/api/request_definitions/templates/POST_object_listing_by_token.schema deleted file mode 100755 index a3f9a951..00000000 --- a/api/request_definitions/templates/POST_object_listing_by_token.schema +++ /dev/null @@ -1,19 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/templates/POST_object_listing_by_token.schema", - "title": "POST_object_listing_by_token", - "description": "A POST request for getting all objects associated with a token.", - "type": "object", - "properties": - { - "token": - { - "description": "The token to get objects for.", - "type": "string" - } - }, - "required": - [ - "token" - ] -} diff --git a/api/request_definitions/templates/POST_objects_draft.schema b/api/request_definitions/templates/POST_objects_draft.schema deleted file mode 100755 index 2639faf2..00000000 --- a/api/request_definitions/templates/POST_objects_draft.schema +++ /dev/null @@ -1,65 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/templates/POST_create_new_object.schema", - "title": "POST_create_new_object", - "description": "A POST request for creating a new object.", - "type": "array", - "items": - { - "type": "object", - "properties": - { - - "contents": - { - "description": "The JSON contents to be stored.", - "type": "object" - }, - "from_draft": - { - "description": "The draft ID to use to publish the object.", - "type": "string" - }, - "object_id": - { - "description": "The object ID to create a new version for.", - "type": "string" - }, - "owner_group": - { - "description": "Which group should own the object", - "type": "string" - }, - "retain_draft": - { - "description": "Retain the draft after publishing?", - "type": "string" - }, - "schema": - { - "description": "The schema under which the POSTed JSON falls.", - "type": "string" - }, - "state": - { - "description": "Either 'DRAFT' or 'PUBLISHED'.", - "type": "string", - "pattern": "^DRAFT$|^PUBLISHED$" - }, - "table": - { - "description": "The table to write the new object to.", - "type": "string" - } - }, - "required": - [ - "from_draft", - "owner_group", - "retain_draft", - "schema", - "state", - "table" - ] - } -} diff --git a/api/request_definitions/templates/POST_objects_publish.schema b/api/request_definitions/templates/POST_objects_publish.schema deleted file mode 100755 index 2639faf2..00000000 --- a/api/request_definitions/templates/POST_objects_publish.schema +++ /dev/null @@ -1,65 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/templates/POST_create_new_object.schema", - "title": "POST_create_new_object", - "description": "A POST request for creating a new object.", - "type": "array", - "items": - { - "type": "object", - "properties": - { - - "contents": - { - "description": "The JSON contents to be stored.", - "type": "object" - }, - "from_draft": - { - "description": "The draft ID to use to publish the object.", - "type": "string" - }, - "object_id": - { - "description": "The object ID to create a new version for.", - "type": "string" - }, - "owner_group": - { - "description": "Which group should own the object", - "type": "string" - }, - "retain_draft": - { - "description": "Retain the draft after publishing?", - "type": "string" - }, - "schema": - { - "description": "The schema under which the POSTed JSON falls.", - "type": "string" - }, - "state": - { - "description": "Either 'DRAFT' or 'PUBLISHED'.", - "type": "string", - "pattern": "^DRAFT$|^PUBLISHED$" - }, - "table": - { - "description": "The table to write the new object to.", - "type": "string" - } - }, - "required": - [ - "from_draft", - "owner_group", - "retain_draft", - "schema", - "state", - "table" - ] - } -} diff --git a/api/request_definitions/templates/POST_read_object.schema b/api/request_definitions/templates/POST_read_object.schema deleted file mode 100755 index 094df6ab..00000000 --- a/api/request_definitions/templates/POST_read_object.schema +++ /dev/null @@ -1,33 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/templates/POST_read_object.schema", - "title": "POST_read_object", - "description": "A POST request for reading a(n) object(s).", - "type": "array", - "items": - { - "type": "object", - "properties": - { - "object_id": - { - "description": "The object ID to create a new version for.", - "type": "string" - }, - "table": - { - "description": "The table to write the new object to.", - "type": "string" - }, - "fields": - { - "description": "Which fields to read in the object", - "type": "string" - } - }, - "required": - [ - "table" - ] - } -} diff --git a/api/request_definitions/templates/POST_validate_payload_against_schema.schema b/api/request_definitions/templates/POST_validate_payload_against_schema.schema deleted file mode 100755 index a36f7e43..00000000 --- a/api/request_definitions/templates/POST_validate_payload_against_schema.schema +++ /dev/null @@ -1,33 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "request_definitions/templates/POST_validate_payload_against_schema.schema", - "title": "POST_validate_payload_against_schema", - "description": "A POST request to validate an object against a schema.", - "type": "array", - "items": - { - "type": "object", - "properties": - { - "payload": - { - "description": "The table to write the new object to.", - "type": "object" - }, - "schema_server": - { - "description": "The name of the schema on the server to use.", - "type": "string" - }, - "schema_own": - { - "description": "A schema provided with the request.", - "type": "object" - } - }, - "required": - [ - "payload" - ] - } -} \ No newline at end of file diff --git a/api/scripts/__init__.py b/api/scripts/__init__.py deleted file mode 100755 index e69de29b..00000000 diff --git a/api/scripts/method_specific/GET_draft_object_by_id.py b/api/scripts/method_specific/GET_draft_object_by_id.py deleted file mode 100755 index 11599062..00000000 --- a/api/scripts/method_specific/GET_draft_object_by_id.py +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env python3 -"""Get a draft by ID - -See if the object exists, and if so, -see if the requestor has permissions -for it. -""" - -from api.models import BCO -from api.scripts.utilities import UserUtils -from rest_framework import status, authtoken -from rest_framework.response import Response -from guardian.shortcuts import get_objects_for_user -from authentication.selectors import get_user_from_auth_token - -def get_draft_object_by_id(do_id, request): - """Get a draft object - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into - arbitrary media types. If the user has permission to view the object - it is returned. If not the response is HTTP_403_FORBIDDEN. - """ - - filtered = BCO.objects.filter(object_id__regex=rf"(.*?)/{do_id}", state="DRAFT") - - if filtered.exists(): - if len(filtered) > 1: - # There are multiple matches; this shouldn't be possible - return Response( - data="The contents of the draft could not be sent back because" - "there are multiple draft matches. Please contact and admin.", - status=status.HTTP_400_BAD_REQUEST, - ) - # Get the requestor's info. - try: - user = UserUtils.UserUtils().user_from_request(request=request) - except authtoken.models.Token.DoesNotExist: - user = get_user_from_auth_token(request.META.get("HTTP_AUTHORIZATION").split(" ")[1]) - user_perms = UserUtils.UserUtils().prefix_perms_for_user( - flatten=True, user_object=user, specific_permission=["view"] - ) - user_objects = get_objects_for_user( - user=user, perms=[], klass=BCO, any_perm=True - ) - - # Does the requestor have permissions for the object? - full_object_id = filtered.values_list("object_id", flat=True)[0] - objected = BCO.objects.get(object_id=full_object_id) - prefix = objected.prefix - object_permission = objected in user_objects - group_permission = ("view_" + prefix) in user_perms - - if object_permission is True or group_permission is True: - return Response(data=objected.contents, status=status.HTTP_200_OK) - - return Response( - data="The contents of the draft could not be sent back because" - " the requestor does not have appropriate permissions.", - status=status.HTTP_401_UNAUTHORIZED, - ) - # the root ID does not exist at all. - return Response( - data="That draft could not be found on the server.", - status=status.HTTP_404_NOT_FOUND, - ) diff --git a/api/scripts/method_specific/GET_published_object_by_id.py b/api/scripts/method_specific/GET_published_object_by_id.py deleted file mode 100755 index 93cb2250..00000000 --- a/api/scripts/method_specific/GET_published_object_by_id.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python3 - -import re -import json -from api.models import BCO -from django.forms.models import model_to_dict -from rest_framework import status -from rest_framework.response import Response -from semver import VersionInfo as Version -from typing import Optional, Tuple - -# TODO: This should be put into a universal place to grab from - also -# duplicated in POST_api_objects_drafts_token.py - -BASEVERSION = re.compile( - r"""[vV]? - (?P0|[1-9]\d*) - (\. - (?P0|[1-9]\d*) - (\. - (?P0|[1-9]\d*) - )? - )? - """, - re.VERBOSE, -) - - -def coerce(version: str) -> Tuple[Version, Optional[str]]: - """ - Convert an incomplete version string into a semver-compatible Version - object - - * Tries to detect a "basic" version string (``major.minor.patch``). - * If not enough components can be found, missing components are - set to zero to obtain a valid semver version. - - :param str version: the version string to convert - :return: a tuple with a :class:`Version` instance (or ``None`` - if it's not a version) and the rest of the string which doesn't - belong to a basic version. - :rtype: tuple(:class:`Version` | None, str) - """ - - match = BASEVERSION.search(version) - if not match: - return (None, version) - - ver = { - key: 0 if value is None else value for key, value in match.groupdict().items() - } - ver = Version(**ver) - rest = match.string[match.end() :] # noqa:E203 - return ver, rest - - -def GET_published_object_by_id(oi_root): - """ - Get a published object given a root. - - See if the root ID even exists. - - We have to query twice because we don't - have a field in the BCO model to hold - the object version... - - Note the trailing slash in the regex search to prevent - sub-string matches (e.g. http://127.0.0.1:8000/BCO_5 and - http://127.0.0.1:8000/BCO_53 would both match the regex - http://127.0.0.1:8000/BCO_5 if we did not have the trailing - slash). - - """ - - # Note: This is not needed - removing out the underscore breaks the regex below, leaving in for the moment - # since I'm not sure why it was ever added (maybe there is a reason?) - # oi_root = oi_root.split("_")[0] + '{:06d}'.format(int(oi_root.split("_")[1])) - all_versions = list( - BCO.objects.filter( - object_id__regex=rf"(.*?)/{oi_root}/", state="PUBLISHED" - ).values_list("object_id", flat=True) - ) - # Get the latest version for this object if we have any. - if len(all_versions) > 0: - - # There was at least one version of the root ID, - # so now perform some logic based on whether or - # not a version was also passed. - - # First find the latest version of the object. - latest_versions = [i.split("/")[-1:][0] for i in all_versions] - l_version, _ = coerce(max(latest_versions, key=coerce)) - latest_version = latest_versions[-1] - # Kick back the latest version. - return Response( - data=model_to_dict(BCO.objects.get( - # object_id__regex=rf"{oi_root}/{l_version.major}.{l_version.minor}?.?{l_version.patch}", - object_id__regex=f'{oi_root}/{latest_version}', - state="PUBLISHED", - )), - status=status.HTTP_200_OK, - ) - - else: - # If all_versions has 0 length, then the - # the root ID does not exist at all. - print("No objects were found for the root ID provided.") - return Response( - data="No objects were found for the root ID provided.", - status=status.HTTP_404_NOT_FOUND, - ) diff --git a/api/scripts/method_specific/GET_published_object_by_id_with_version.py b/api/scripts/method_specific/GET_published_object_by_id_with_version.py deleted file mode 100755 index 801320b7..00000000 --- a/api/scripts/method_specific/GET_published_object_by_id_with_version.py +++ /dev/null @@ -1,114 +0,0 @@ -# The BCO model -from ...models import BCO - -# Responses -from rest_framework import status -from rest_framework.response import Response - - -def GET_published_object_by_id_with_version(oi_root, oi_version): - """ - Fetch a published BCO by the PREFIX, BCO NAME, and VERSION ID - """ - - #### - # We are assuming the oi_root looks something like this - # BCO_28 - # Where - # `BCO` is the prefix - # and `28` is the object name - #### - - # Split by '_' - underscores = oi_root.count("_") - if underscores < 1: - # ERROR - there should be an underscore separating the prefix and the BCO name - return Response( - data="This API requires that the prefix and the BCO name be separated by an underscore '_' in the object_id_root PATH variable.", - status=status.HTTP_400_BAD_REQUEST, - ) - - # TODO: This allows BCO Names to support underscores - not sure if that is valid though - # This can be 'fixed' by adding in a check for > 1 above - # Might be a better idea to split prefix, BCO name, and version into a three part get - bco_prefix, bco_name = oi_root.split("_", maxsplit=1) - - # retrieved = list( - # BCO.objects.filter( - # # contents__search=bco_name, - # prefix=bco_prefix, - # contents__provenance_domain__name=bco_name, - # contents__provenance_domain__version=oi_version, - # state='PUBLISHED' - # ).values_list( - # 'contents', - # flat=True - # ) - # ) - # The object ID either exists or it does not. - retrieved = list( - BCO.objects.filter( - object_id__regex=rf"(.*?)/{oi_root}/{oi_version}", state="PUBLISHED" - ).values_list("contents", flat=True) - ) - # Was the object found? - if len(retrieved) > 0: - # Kick it back. - return Response(data=retrieved, status=status.HTTP_200_OK) - else: - # If all_versions has 0 length, then the - # the root ID does not exist at all. - print("No objects were found for the root ID and version provided.") - return Response( - data="No objects were found for the root ID and version provided.", - status=status.HTTP_400_BAD_REQUEST, - ) - - # TODO: This code from here on down appears to be unreachable? The above if/else will always return the request - # Maybe this is placeholder code for something? - # Instantiate any necessary imports. - db = DbUtils.DbUtils() - - # First, get the table based on the requested published object. - table_name = (oi_root.split("_")[0] + "_publish").lower() - - # Does the table exist? - # TODO: replace with better table call... - available_tables = settings.MODELS["json_object"] - - if table_name in available_tables: - - # Construct the object ID. - constructed = object_id = ( - settings.PUBLIC_HOSTNAME + "/" + oi_root + "/" + oi_version - ) - - # Does the object exist in the table? - if ( - apps.get_model(app_label="api", model_name=table_name) - .objects.filter(object_id=constructed) - .exists() - ): - - # Get the object, then check the permissions. - objected = apps.get_model( - app_label="api", model_name=table_name - ).objects.get(object_id=constructed) - - return Response( - data=serializers.serialize( - "json", - [ - objected, - ], - ), - status=status.HTTP_200_OK, - ) - - else: - - return Response(status=status.HTTP_400_BAD_REQUEST) - - else: - - return Response(status=status.HTTP_400_BAD_REQUEST) diff --git a/api/scripts/method_specific/GET_retrieve_available_schema.py b/api/scripts/method_specific/GET_retrieve_available_schema.py deleted file mode 100755 index bcd643cc..00000000 --- a/api/scripts/method_specific/GET_retrieve_available_schema.py +++ /dev/null @@ -1,49 +0,0 @@ -# For the folder search. -from ..utilities import FileUtils - -from ..utilities import JsonUtils -import json - -# Put try catch in later to indicate failure to load schema... - - -def GET_retrieve_available_schema(bulk_request): - - # We don't use settings.VALIDATION_TEMPLATES because - # that contains paths on the server which we don't - # want to reveal. - - # Get the schema from the validation_definitions folder. - folder_schema = FileUtils.FileUtils().get_folder_tree( - search_folder="validation_definitions/" - )["paths"] - - # Define a list to hold the processed paths. - processed_paths = [] - - # Strip out everything that is above the server folder level. - for path in folder_schema: - - # Split the path up to help construct the root folder. - file_name_split = path.split("/") - - # Where is the 'validation_definitions/' item? - vd_index = file_name_split.index("validation_definitions") - - # Collapse everything after this index. - collapsed = "/".join(file_name_split[vd_index + 1 :]) - - # Set the name. - processed_paths.append(collapsed) - - # Create a usable structure. - - # Source: https://stackoverflow.com/questions/9618862/how-to-parse-a-directory-structure-into-dictionary - dct = {} - - for item in processed_paths: - p = dct - for x in item.split("/"): - p = p.setdefault(x, {}) - - return {"request_status": "success", "contents": dct} diff --git a/api/scripts/method_specific/POST_api_objects_drafts_create.py b/api/scripts/method_specific/POST_api_objects_drafts_create.py deleted file mode 100755 index b9a74d27..00000000 --- a/api/scripts/method_specific/POST_api_objects_drafts_create.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python3 - -"""Create BCO Draft - --------------------- -Creates a new BCO draft object. -""" -from api.models import BCO -from api.scripts.utilities import DbUtils, UserUtils -from api.model.prefix import prefix_table -from django.conf import settings -from django.contrib.auth.models import Group -from django.utils import timezone -from rest_framework import status, authtoken -from rest_framework.response import Response -from authentication.selectors import get_user_from_auth_token - -def post_api_objects_drafts_create(request): - """Create BCO Draft - - Parameters - ---------- - request: rest_framework.request. - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into - arbitrary media types. - """ - - db_utils = DbUtils.DbUtils() - try: - user = UserUtils.UserUtils().user_from_request(request=request) - except authtoken.models.Token.DoesNotExist: - user = get_user_from_auth_token(request.META.get("HTTP_AUTHORIZATION").split(" ")[1]) - prefix_perms = UserUtils.UserUtils().prefix_perms_for_user( - flatten=True, user_object=user, specific_permission=["add"] - ) - - # Define the bulk request. - bulk_request = request.data["POST_api_objects_draft_create"] - - # Get the object naming information. - object_naming_info = settings.OBJECT_NAMING - root_uri = settings.OBJECT_NAMING["root_uri"] - # Construct an array to return the objects. - returning = [] - any_failed = False - - # Since bulk_request is an array, go over each - # item in the array. - - for creation_object in bulk_request: - prefix = creation_object["prefix"].upper() - # Require the macro-level and draft-specific permissions. - if "add_" + prefix in prefix_perms and "draft_" + prefix in prefix_perms: - prefix_counter = prefix_table.objects.get(prefix=prefix) - if "object_id" in creation_object: - if BCO.objects.filter(object_id=creation_object["object_id"]).exists(): - returning.append( - db_utils.messages( - parameters={"object_id": creation_object["object_id"]} - )["409_object_conflict"] - ) - any_failed = True - continue - constructed_obj_id = creation_object["object_id"] - else: - object_num = format(prefix_counter.n_objects, "06d") - constructed_obj_id = ( - root_uri + "/" + prefix + "_" + object_num + "/DRAFT" - ) - creation_object["object_id"] = constructed_obj_id - - if Group.objects.filter( - name=creation_object["owner_group"].lower() - ).exists(): - - # TODO: abstract this out to DbUtils. - # constructed_name = object_naming_info["uri_regex"].replace( - # "root_uri", object_naming_info["root_uri"] - # ) - # constructed_name = constructed_name.replace("prefix", prefix) - - # prefix_location = constructed_name.index(prefix) - # prefix_length = len(prefix) - # constructed_name = constructed_name[0 : prefix_location + prefix_length] - # - # creation_object["object_id"] = ( - # constructed_name - # + "_" - # + "{:06d}".format(prefix_counter.n_objects) - # + "/DRAFT" - # ) - # Make sure to create the object ID field in our draft. - creation_object["contents"]["object_id"] = constructed_obj_id - # Instantiate the owner group as we'll need it a few times here. - owner_group = Group.objects.get(name=creation_object["owner_group"]) - - # Django wants a primary key for the Group... - creation_object["owner_group"] = owner_group.name - - # Set the owner user (the requestor). - creation_object["owner_user"] = user.username - - # Give the creation object the prefix. - creation_object["prefix"] = prefix - - # This is a DRAFT. - creation_object["state"] = "DRAFT" - - # Set the datetime properly. - creation_object["last_update"] = timezone.now() - - # Write to the database. - objects_written = db_utils.write_object( - p_app_label="api", - p_model_name="bco", - p_fields=[ - "contents", - "last_update", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ], - p_data=creation_object, - ) - - if objects_written < 1: - # Issue with writing out to DB - returning.append( - db_utils.messages(parameters={})["400_bad_request"] - ) - any_failed = True - prefix_counter.n_objects = prefix_counter.n_objects + 1 - prefix_counter.save() - # Update the request status. - returning.append( - db_utils.messages( - parameters={"object_id": creation_object["object_id"]} - )["201_create"] - ) - - else: - # Update the request status. - returning.append(db_utils.messages(parameters={})["400_bad_request"]) - any_failed = True - - else: - # Update the request status. - returning.append( - db_utils.messages(parameters={"prefix": creation_object["prefix"]})[ - "401_prefix_unauthorized" - ] - ) - any_failed = True - - if any_failed: - return Response(status=status.HTTP_207_MULTI_STATUS, data=returning) - - return Response(status=status.HTTP_200_OK, data=returning) diff --git a/api/scripts/method_specific/POST_api_objects_drafts_delete.py b/api/scripts/method_specific/POST_api_objects_drafts_delete.py deleted file mode 100755 index 95d36b69..00000000 --- a/api/scripts/method_specific/POST_api_objects_drafts_delete.py +++ /dev/null @@ -1,117 +0,0 @@ -# BCO model -from ...models import bco - -# For getting objects out of the database. -from ..utilities import DbUtils - -# User information -from ..utilities import UserUtils - -# Permisions for objects -from guardian.shortcuts import get_perms - -# Responses -from rest_framework import status -from rest_framework.response import Response - -# Source: https://codeloop.org/django-rest-framework-course-for-beginners/ - - -def POST_api_objects_drafts_delete(incoming): - - # Take the bulk request and delete a draft object from it. - - # Instantiate any necessary imports. - db = DbUtils.DbUtils() - uu = UserUtils.UserUtils() - - # The token has already been validated, - # so the user is guaranteed to exist. - - # Get the User object. - user = uu.user_from_request(rq=incoming) - - # Get the user's prefix permissions. - px_perms = uu.prefix_perms_for_user( - flatten=True, user_object=user, specific_permission=["add"] - ) - - # Define the bulk request. - bulk_request = incoming.data["POST_api_objects_drafts_delete"] - - # Construct an array to return the objects. - returning = [] - - # Since bulk_request is an array, go over each - # item in the array. - for deletion_object in bulk_request: - - # Get the prefix for this draft. - standardized = deletion_object["object_id"].split("/")[-1].split("_")[0].upper() - - # Does the requestor have delete permissions for - # the *prefix*? - if "delete_" + standardized in px_perms: - - # The requestor has delete permissions for - # the prefix, but do they have object-level - # delete permissions? - - # This can be checked by seeing if the requestor - # is the object owner OR they are a user with - # object-level delete permissions OR if they are in a - # group that has object-level change permissions. - - # To check these options, we need the actual object. - if bco.objects.filter(object_id=deletion_object["object_id"]).exists(): - - objected = bco.objects.get(object_id=deletion_object["object_id"]) - - # We don't care where the delete permission comes from, - # be it a User permission or a Group permission. - all_permissions = get_perms(user, objected) - - if ( - user.username == objected.owner_user.username - or "delete_" + standardized in all_permissions - ): - - # Delete the object. - objected.delete() - - # Update the request status. - returning.append( - db.messages( - parameters={"object_id": deletion_object["object_id"]} - )["200_OK_object_delete"] - ) - - else: - - # Insufficient permissions. - returning.append( - db.messages(parameters={})["403_insufficient_permissions"] - ) - - else: - - # Couldn't find the object. - returning.append( - db.messages(parameters={"object_id": deletion_object["object_id"]}) - )["404_object_id"] - - else: - - # Update the request status. - returning.append( - db.messages(parameters={"prefix": standardized})[ - "401_prefix_unauthorized" - ] - ) - - # As this view is for a bulk operation, status 200 - # means that the request was successfully processed, - # but NOT necessarily each item in the request. - # For example, a table may not have been found for the first - # requested draft. - return Response(status=status.HTTP_200_OK, data=returning) diff --git a/api/scripts/method_specific/POST_api_objects_drafts_modify.py b/api/scripts/method_specific/POST_api_objects_drafts_modify.py deleted file mode 100755 index 73c55241..00000000 --- a/api/scripts/method_specific/POST_api_objects_drafts_modify.py +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python3 -"""Modify Draft Object - --------------------- -Modifies a BCO object. The BCO object must be a draft in order to be -modifiable. The contents of the BCO will be replaced with the new -contents provided in the request body. -""" - -from api.models import BCO -from api.scripts.utilities import DbUtils -from api.scripts.utilities import UserUtils - -from django.utils import timezone -from guardian.shortcuts import get_perms -from rest_framework import status -from rest_framework.response import Response - -# Source: https://codeloop.org/django-rest-framework-course-for-beginners/ - - -def post_api_objects_drafts_modify(request): - """Modify Draft - - Take the bulk request and modify a draft object from it. - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into arbitrary - media types. As this view is for a bulk operation, status 200 means - that the request was successfully processed for each item in the - request. A status of 207 means that some of the requests were - successfull. - """ - - db_utils = DbUtils.DbUtils() - user = UserUtils.UserUtils().user_from_request(request=request) - try: - bulk_request = request.data["POST_api_objects_drafts_modify"] - except KeyError as error: - return Response(status=status.HTTP_400_BAD_REQUEST, data={ - 'KeyError': f'{str(error)}' - }) - px_perms = UserUtils.UserUtils().prefix_perms_for_user( - flatten=True, user_object=user, specific_permission=["add"] - ) - - # Construct an array to return the objects. - returning = [] - any_failed = False - for draft_object in bulk_request: - try: - # Get the prefix for this draft. - prefix = draft_object["object_id"].split("/")[-2].split("_")[0].upper() - - # Does the requestor have change permissions for - # the *prefix*? - - # TODO: add permission setting view... - # if 'change_' + prefix in px_perms: - - if "add_" + prefix in px_perms: - - # The requestor has change permissions for - # the prefix, but do they have object-level - # change permissions? - - # This can be checked by seeing if the requestor - # is the object owner OR they are a user with - # object-level change permissions OR if they are in a - # group that has object-level change permissions. - # To check these options, we need the actual object. - if draft_object["object_id"] not in draft_object["contents"]["object_id"]: - returning.append( - db_utils.messages( - parameters={ - "object_id": draft_object["contents"]["object_id"], - "draft_object_id": draft_object["object_id"], - } - )["409_draft_object_id_conflict"] - ) - any_failed = True - continue - - if BCO.objects.filter( - object_id=draft_object["contents"]["object_id"] - ).exists(): - objected = BCO.objects.get( - object_id=draft_object["contents"]["object_id"] - ) - - # We don't care where the view permission comes from, - # be it a User permission or a Group permission. - all_permissions = get_perms(user, objected) - # TODO: add permission setting view... - if ( - user.username == objected.owner_user.username - or "add_" + prefix in px_perms - ): - - # # User does *NOT* have to be in the owner group! - # # to assign the object's group owner. - # if Group.objects.filter( - # name = draft_object['owner_group'].lower() - # ).exists(): - # - # Update the object. - # *** COMPLETELY OVERWRITES CONTENTS!!! *** - objected.contents = draft_object["contents"] - - if "state" in draft_object: - if draft_object["state"] == "DELETE": - objected.state = "DELETE" - - # Set the update time. - objected.last_update = timezone.now() - - # Save it. - objected.save() - - # Update the request status. - returning.append( - db_utils.messages( - parameters={"object_id": draft_object["object_id"]} - )["200_update"] - ) - else: - # Insufficient permissions. - returning.append( - db_utils.messages(parameters={ - })["403_insufficient_permissions"] - ) - any_failed = True - - else: - returning.append( - db_utils.messages( - parameters={"object_id": draft_object["object_id"]} - )["404_object_id"] - ) - any_failed = True - else: - returning.append( - db_utils.messages(parameters={"prefix": prefix})[ - "401_prefix_unauthorized" - ] - ) - any_failed = True - except: - returning.append( - db_utils.messages(parameters={})[ - "400_bad_request" - ] - ) - any_failed = True - - if any_failed and len(returning) == 1: - if returning[0]["status_code"] == "403": - return Response(status=status.HTTP_403_FORBIDDEN, data=returning) - else: - return Response(status=status.HTTP_207_MULTI_STATUS, data=returning) - if any_failed and len(returning) > 1: - return Response(status=status.HTTP_207_MULTI_STATUS, data=returning) - - return Response(status=status.HTTP_200_OK, data=returning) diff --git a/api/scripts/method_specific/POST_api_objects_drafts_permissions.py b/api/scripts/method_specific/POST_api_objects_drafts_permissions.py deleted file mode 100755 index e5395c99..00000000 --- a/api/scripts/method_specific/POST_api_objects_drafts_permissions.py +++ /dev/null @@ -1,158 +0,0 @@ -# BCO model -from api.models import BCO - -# For getting objects out of the database. -from ..utilities import DbUtils - -# User information -from ..utilities import UserUtils - -# Group info -from django.contrib.auth.models import Group - -# Permisions for objects -from guardian.shortcuts import get_groups_with_perms, get_perms, get_user_perms - -# Responses -from rest_framework import status -from rest_framework.response import Response - - -def POST_api_objects_drafts_permissions(incoming): - """ - Get BCO Permissions - - Returns the permissions for requested BCO objects. - """ - - # Instantiate any necessary imports. - db = DbUtils.DbUtils() - uu = UserUtils.UserUtils() - - # The token has already been validated, - # so the user is guaranteed to exist. - - # Get the User object. - user = uu.user_from_request(rq=incoming) - - # Get the user's prefix permissions. - px_perms = uu.prefix_perms_for_user(flatten=True, user_object=user) - - # Define the bulk request. - bulk_request = incoming.data["POST_api_objects_drafts_permissions"] - - # Construct an array to return the objects. - returning = [] - any_failed = False - - # Since bulk_request is an array, go over each - # item in the array. - for creation_object in bulk_request: - - # Get the prefix for this object. - standardized = creation_object["object_id"].split("/")[-1].split("_")[0].upper() - - # Does the requestor have view permissions for - # the *prefix*? - if "view_" + standardized in px_perms: - print( - "bulk_request", - list(BCO.objects.filter(object_id=creation_object["object_id"])), - ) - # The requestor has change view for - # the prefix, but do they have object-level - # view permissions? - - # This can be checked by seeing if the requestor - # is the object owner OR they are a user with - # object-level view permissions OR if they are in a - # group that has object-level view permissions. - - # To check these options, we need the actual object. - if BCO.objects.filter(object_id=creation_object["object_id"]).exists(): - objected = BCO.objects.get(object_id=creation_object["object_id"]) - - # We don't care where the view permission comes from, - # be it a User permission or a Group permission. - - # This is a bit redundant since we are getting the permissions - # again below but it's a quick fix to find - # basic view permissions for this user and object. - all_permissions = get_perms(user, objected) - - if ( - user.username == objected.owner_user.username - or "view_" + standardized in all_permissions - ): - - # Kick back the permissions, - # *** but only for this requestor (user) ***. - - # Create a dictionary to return the permissions. - perms = {"username": {}, "group_names": {}} - - # We want to return the permissions in fine detail - # by user permissions and group permissions. - up = get_user_perms(user, objected) - - perms["username"][user.username] = list(up) - - # Get user's groups. - user_groups = list( - Group.objects.filter(user=user.pk).values_list( - "name", flat=True - ) - ) - - gp = get_groups_with_perms(objected, attach_perms=True) - - # See which of the group permissions apply to - # the user's groups. - for g, p in gp.items(): - if g.name in user_groups: - perms["group_names"][g.name] = p - - # print(perms) - - # Update the request status. - returning.append( - db.messages( - parameters={ - "object_id": creation_object["object_id"], - "object_perms": perms, - } - )["200_OK_object_permissions"] - ) - else: - # Insufficient permissions. - returning.append( - db.messages(parameters={})["403_insufficient_permissions"] - ) - any_failed = True - else: - # Couldn't find the object. - returning.append( - db.messages(parameters={"object_id": creation_object["object_id"]})[ - "404_object_id" - ] - ) - any_failed = True - - else: - # Update the request status. - returning.append( - db.messages(parameters={"prefix": standardized})[ - "401_prefix_unauthorized" - ] - ) - any_failed = True - - # As this view is for a bulk operation, status 200 - # means that the request was successfully processed, - # but NOT necessarily each item in the request. - # For example, a table may not have been found for the first - # requested draft. - if any_failed: - # If any of the requests failed, lets let the caller know so they can parse the returning information. - return Response(status=status.HTTP_300_MULTIPLE_CHOICES, data=returning) - return Response(status=status.HTTP_200_OK, data=returning) diff --git a/api/scripts/method_specific/POST_api_objects_drafts_permissions_set.py b/api/scripts/method_specific/POST_api_objects_drafts_permissions_set.py deleted file mode 100755 index cf0aaaf1..00000000 --- a/api/scripts/method_specific/POST_api_objects_drafts_permissions_set.py +++ /dev/null @@ -1,242 +0,0 @@ -# BCO model -from ...models import BCO - -# For getting objects out of the database. -from ..utilities import DbUtils - -# User information -from ..utilities import UserUtils - -# Permisions for objects -from guardian.shortcuts import ( - assign_perm, - get_perms, - get_groups_with_perms, - get_users_with_perms, - remove_perm, -) -from django.contrib.auth.models import Group, User, Permission - -# Responses -from rest_framework import status -from rest_framework.response import Response - - -def POST_api_objects_drafts_permissions_set(incoming): - """ - Set the permissions for given objects. - - """ - - # Instantiate any necessary imports. - db = DbUtils.DbUtils() - uu = UserUtils.UserUtils() - - # The token has already been validated, - # so the user is guaranteed to exist. - - # Get the User object. - user = uu.user_from_request(rq=incoming) - - # Get the user's prefix permissions. - px_perms = uu.prefix_perms_for_user( - flatten=True, user_object=user, specific_permission=["change"] - ) - - # Define the bulk request. - bulk_request = incoming.data["POST_api_objects_drafts_permissions_set"] - - # Construct an array to return the objects. - returning = [] - - # Since bulk_request is an array, go over each - # item in the array. - for permission_object in bulk_request: - - # Get the prefix for this object. - standardized = ( - permission_object["object_id"].split("/")[-1].split("_")[0].upper() - ) - - # Does the requestor have any change - # permissions for the prefix? - - # Notice that we do not look for "add" - # or "delete" permissions even though - # these are also object-level permissions. - - # In essence, we are asking whether or not - # the requestor can change any object - # under this prefix. - if "change_" + standardized in px_perms: - - # The requestor has change for - # the prefix, but do they have object-level - # change permissions? - - # This can be checked by seeing if the requestor - # is the object owner OR they are a user with - # object-level change permissions OR if they are in a - # group that has object-level change permissions. - - # To check these options, we need the actual object. - if BCO.objects.filter(object_id=permission_object["object_id"]).exists(): - - objected = BCO.objects.get(object_id=permission_object["object_id"]) - - # We don't care where the change permission comes from, - # be it a User permission or a Group permission. - all_permissions = get_perms(user, objected) - - if ( - user.username == objected.owner_user.username - or "change_" + objected.object_id in all_permissions - ): - - if "actions" in permission_object: - - # Set the working object to the actions. - action_set = permission_object["actions"] - - # Removals are processed first, then additions. - - # Remove the permissions provided, if any. - # TODO: This doesn't look like it would work here. - if "remove_permissions" in action_set: - for perm, assignee in action_set["remove_permissions"]: - if assignee == "users": - # TODO: if assignee is actually a string users, this will just loop through the characters - for u in assignee: - if uu.check_user_exists(un=u): - remove_perm( - perm=Permission.objects.get( - codename=perm - + "_" - + objected.object_id - ), - user_or_group=User.objects.get( - username=u - ), - obj=objected, - ) - if assignee == "groups": - for g in assignee: - if uu.check_group_exists(n=g): - remove_perm( - perm=Permission.objects.get( - codename=perm - + "_" - + objected.object_id - ), - user_or_group=Group.objects.get(name=g), - obj=objected, - ) - - if "full_permissions" in action_set: - for up, perms in get_users_with_perms( - obj=objected, attach_perms=True - ).items(): - for perm in perms: - remove_perm( - perm=perm, user_or_group=up, obj=objected - ) - - for gp, perms in get_groups_with_perms( - obj=objected, attach_perms=True - ).items(): - for perm in perms: - remove_perm( - perm=perm, user_or_group=gp, obj=objected - ) - - for perm, assignee in action_set[ - "full_permissions" - ].items(): - if assignee == "users": - for u in assignee: - if uu.check_user_exists(un=u): - assign_perm( - perm=Permission.objects.get( - codename=perm - + "_" - + objected.object_id - ), - user_or_group=User.objects.get( - username=u - ), - obj=objected, - ) - - if assignee == "groups": - for g in assignee: - if uu.check_group_exists(n=g): - assign_perm( - perm=Permission.objects.get( - codename=perm - + "_" - + objected.object_id - ), - user_or_group=Group.objects.get(name=g), - obj=objected, - ) - - if "add_permissions" in action_set: - for perm, assignee in action_set["add_permissions"].items(): - if assignee == "users": - for u in assignee: - if uu.check_user_exists(un=u): - assign_perm( - perm=Permission.objects.get( - codename=perm - + "_" - + objected.object_id - ), - user_or_group=User.objects.get( - username=u - ), - obj=objected, - ) - if assignee == "groups": - for g in assignee: - if uu.check_group_exists(n=g): - assign_perm( - perm=Permission.objects.get( - codename=perm - + "_" - + objected.object_id - ), - user_or_group=Group.objects.get(name=g), - obj=objected, - ) - - returning.append( - db.messages(parameters={"object_id": objected.object_id})[ - "200_OK_object_permissions_set" - ] - ) - else: - # Insufficient permissions. - returning.append( - db.messages(parameters={})["403_insufficient_permissions"] - ) - else: - # Couldn't find the object. - returning.append( - db.messages( - parameters={"object_id": permission_object["object_id"]} - )["404_object_id"] - ) - else: - # Update the request status. - returning.append( - db.messages(parameters={"prefix": standardized})[ - "401_prefix_unauthorized" - ] - ) - - # As this view is for a bulk operation, status 200 - # means that the request was successfully processed, - # but NOT necessarily each item in the request. - # For example, a table may not have been found for the first - # requested draft. - return Response(status=status.HTTP_200_OK, data=returning) diff --git a/api/scripts/method_specific/POST_api_objects_drafts_publish.py b/api/scripts/method_specific/POST_api_objects_drafts_publish.py deleted file mode 100755 index e0ee71f4..00000000 --- a/api/scripts/method_specific/POST_api_objects_drafts_publish.py +++ /dev/null @@ -1,217 +0,0 @@ -#!/usr/bin/env python3 -"""Publish draft - -publish a draft -""" - -from api.models import BCO -from api.model.prefix import prefix_table -from api.scripts.utilities import DbUtils, UserUtils -from django.contrib.auth.models import Group -from django.utils import timezone -from guardian.shortcuts import get_perms -from rest_framework import status, authtoken -from rest_framework.response import Response -from authentication.selectors import get_user_from_auth_token - -def post_api_objects_drafts_publish(request): - """Publish draft - - publish a draft - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - rest_framework.response.Response - An HttpResponse that allows its data to be rendered into arbitrary - media types. As this view is for a bulk operation, status 200 means - that the request was successfully processed for each item in the - request. A status of 300 means that some of the requests were - successfull. - """ - - returning = [] - any_failed = False - db_utils = DbUtils.DbUtils() - - try: - user = UserUtils.UserUtils().user_from_request(request=request) - except authtoken.models.Token.DoesNotExist: - user = get_user_from_auth_token(request.META.get("HTTP_AUTHORIZATION").split(" ")[1]) - prefix_perms = UserUtils.UserUtils().prefix_perms_for_user( - flatten=True, user_object=user - ) - try: - bulk_request = request.data["POST_api_objects_drafts_publish"] - except: - return Response(status=status.HTTP_400_BAD_REQUEST, data={"Request format not accepted."}) - - for publish_object in bulk_request: - if "draft_id" not in publish_object: - returning.append( - db_utils.messages(parameters={})[ - "400_bad_request" - ] - ) - any_failed = True - continue - - draft_exists = BCO.objects.filter( - object_id=publish_object["draft_id"], state="DRAFT" - ).exists() - - if draft_exists is False: - returning.append( - db_utils.messages(parameters={"object_id": publish_object["draft_id"]})[ - "404_object_id" - ] - ) - any_failed = True - continue - - objected = BCO.objects.get(object_id=publish_object["draft_id"]) - new_version = objected.contents["provenance_domain"]["version"] - prefix = publish_object["prefix"].upper() - prefix_counter = prefix_table.objects.get(prefix=prefix) - draft_id = publish_object["draft_id"] - - if publish_object.get("delete_draft") is not None: - delete_draft = publish_object["delete_draft"] - else: - delete_draft = False - - if "object_id" not in publish_object: - object_id = publish_object["draft_id"].split("/")[0:4] - object_id.append(new_version) - object_id = "/".join(object_id) - else: - object_id = publish_object["object_id"] - - versioned = {"published_id": object_id} - # versioned = db_utils.check_version_rules( - # published_id=object_id - # ) - prefix_auth = "publish_" + prefix in prefix_perms - object_exists = BCO.objects.filter(object_id=object_id).exists() - - if object_exists is True: - print(object_id) - parameters = {"object_id": object_id} - returning.append(db_utils.messages(parameters)["409_object_conflict"]) - any_failed = True - continue - - if draft_exists is True: - all_permissions = get_perms(user, objected) - is_owner = user.username == objected.owner_user.username - owner_group = Group.objects.get(name=user.username) - # can_publish = 'publish_' + publish_object['draft_id'] in all_permissions - if prefix_auth is True: - # if is_owner is True or can_publish is True: - if delete_draft is True: - objected.last_update = timezone.now() - objected.state = "PUBLISHED" - objected.owner_group = owner_group - objected.object_id = versioned["published_id"] - objected.contents["object_id"] = versioned["published_id"] - objected.save() - - # Update the request status. - returning.append( - db_utils.messages(parameters=versioned)[ - "200_OK_object_publish_draft_deleted" - ] - ) - - else: - new_object = {} - new_object["contents"] = objected.contents - new_object["object_id"] = object_id - new_object["contents"]["object_id"] = object_id - new_object["owner_group"] = owner_group - new_object["owner_user"] = objected.owner_user - new_object["prefix"] = objected.prefix - new_object["last_update"] = timezone.now() - new_object["schema"] = "IEEE" - new_object["state"] = "PUBLISHED" - - # Write to the database. - objects_written = db_utils.write_object( - p_app_label="api", - p_model_name="BCO", - p_fields=[ - "contents", - "last_update", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ], - p_data=new_object, - ) - prefix_counter.n_objects = prefix_counter.n_objects + 1 - prefix_counter.save() - if objects_written < 1: - # Issue with writing out to DB - returning.append( - db_utils.messages(parameters={})["400_bad_request"] - ) - any_failed = True - else: - # Update the request status. - returning.append( - db_utils.messages(parameters=versioned)[ - "200_OK_object_publish_draft_not_deleted" - ] - ) - - # else: - # # Insufficient permissions. - # returning.append(db_utils.messages( - # parameters={ })['403_insufficient_permissions'] - # ) - # any_failed = True - - else: - # Update the request status. - returning.append( - db_utils.messages(parameters={"prefix": prefix})[ - "401_prefix_publish_unauthorized" - ] - ) - any_failed = True - - # published = db_utils.publish( - # owner_group=Group.objects.get( - # name=user.username - # ).name, - # owner_user = user.username, - # prefix = prefix, - # publishable = objected, - # publishable_id = object_id, - # replace_draft = delete_draft - # ) - - # # Did the publishing go well? - # if type(published) is dict: - # # Update the request status. - # returning.append(db_utils.messages( - # parameters=versioned)['200_OK_object_publish'] - # ) - - # # Lastly, if we were given the directive to delete - # # the draft on publish, process that. - - # # Does the requestor have delete permissions on - # # the object? - - if any_failed: - return Response(status=status.HTTP_207_MULTI_STATUS, data=returning) - - return Response(status=status.HTTP_200_OK, data=returning) diff --git a/api/scripts/method_specific/POST_api_objects_drafts_read.py b/api/scripts/method_specific/POST_api_objects_drafts_read.py deleted file mode 100755 index e68a2153..00000000 --- a/api/scripts/method_specific/POST_api_objects_drafts_read.py +++ /dev/null @@ -1,121 +0,0 @@ -# BCO model -from ...models import BCO - -# For getting objects out of the database. -from ..utilities import DbUtils - -# User information -from ..utilities import UserUtils - -# Permisions for objects -from guardian.shortcuts import get_perms - -# Responses -from rest_framework import status -from rest_framework.response import Response - - -def POST_api_objects_drafts_read(incoming): - """ - Take the bulk request and read a draft object from it. - """ - - # Instantiate any necessary imports. - db = DbUtils.DbUtils() - uu = UserUtils.UserUtils() - - # The token has already been validated, - # so the user is guaranteed to exist. - - # Get the User object. - user = uu.user_from_request(rq=incoming) - - # Get the user's prefix permissions. - px_perms = uu.prefix_perms_for_user( - flatten=True, user_object=user, specific_permission=["view"] - ) - - # Define the bulk request. - bulk_request = incoming.data["POST_api_objects_drafts_read"] - - # Construct an array to return the objects. - returning = [] - any_failed = False - - # Since bulk_request is an array, go over each - # item in the array. - for read_object in bulk_request: - # Get the prefix for this draft. - standardized = read_object["object_id"].split("/")[-1].split("_")[0].upper() - - # Does the requestor have view permissions for - # the *prefix*? - if "view_" + standardized in px_perms: - - # The requestor has view permissions for - # the prefix, but do they have object-level - # view permissions? - - # This can be checked by seeing if the requestor - # is the object owner OR they are a user with - # object-level view permissions OR if they are in a - # group that has object-level view permissions. - - # To check these options, we need the actual object. - if BCO.objects.filter(object_id=read_object["object_id"]).exists(): - objected = BCO.objects.get(object_id=read_object["object_id"]) - - # We don't care where the view permission comes from, - # be it a User permission or a Group permission. - all_permissions = get_perms(user, objected) - - if ( - user.username == objected.owner_user.username - or "view_" + standardized in all_permissions - ): - - # Read the object. - returning.append( - db.messages( - parameters={ - "contents": objected.contents, - "object_id": read_object["object_id"], - } - )["200_OK_object_delete"] - ) - - else: - - # Insufficient permissions. - returning.append( - db.messages(parameters={})["403_insufficient_permissions"] - ) - any_failed = True - - else: - - # Couldn't find the object. - returning.append( - db.messages(parameters={"object_id": read_object["object_id"]})[ - "404_object_id" - ] - ) - any_failed = True - - else: - # Update the request status. - returning.append( - db.messages(parameters={"prefix": standardized})[ - "401_prefix_unauthorized" - ] - ) - any_failed = True - - # As this view is for a bulk operation, status 200 - # means that the request was successfully processed, - # but NOT necessarily each item in the request. - # For example, a table may not have been found for the first - # requested draft. - if any_failed: - return Response(status=status.HTTP_300_MULTIPLE_CHOICES, data=returning) - return Response(status=status.HTTP_200_OK, data=returning) diff --git a/api/scripts/method_specific/POST_api_objects_drafts_token.py b/api/scripts/method_specific/POST_api_objects_drafts_token.py deleted file mode 100755 index 43438e2e..00000000 --- a/api/scripts/method_specific/POST_api_objects_drafts_token.py +++ /dev/null @@ -1,238 +0,0 @@ -#!/usr/bin/env python3 -"""Retrieve Draft From Token - - -""" - -import re - -# Concatenating QuerySets -from itertools import chain -from typing import Optional, Tuple -from api.models import BCO -from api.scripts.utilities import UserUtils - -# Object-level permissions -from guardian.shortcuts import get_objects_for_user - -# Responses -from rest_framework import status -from rest_framework.response import Response - -# Below is helper code to deal with how we are allowing non standard -# versions (i.e. 1.2 instead of 1.2.0, etc). - -import semver -from semver import VersionInfo as Version - -BASEVERSION = re.compile( - r"""[vV]? - (?P0|[1-9]\d*) - (\. - (?P0|[1-9]\d*) - (\. - (?P0|[1-9]\d*) - )? - )? - """, - re.VERBOSE, -) - - -def coerce(version: str) -> Tuple[Version, Optional[str]]: - """ - Convert an incomplete version string into a semver-compatible Version - object - - * Tries to detect a "basic" version string (``major.minor.patch``). - * If not enough components can be found, missing components are - set to zero to obtain a valid semver version. - - Parameters - ---------- - version: str - the version string to convert - - Returns - ------- - tuple(:class:`Version` | None, str) - a tuple with a :class:`Version` instance (or ``None`` - if it's not a version) and the rest of the string which doesn't - belong to a basic version. - """ - - match = BASEVERSION.search(version) - if not match: - return (None, version) - - ver = { - key: 0 if value is None else value for key, value in match.groupdict().items() - } - ver = Version(**ver) - rest = match.string[match.end() :] # noqa:E203 - return ver, rest - - -def POST_api_objects_drafts_token(rqst, internal=False): - """ - Get all DRAFT objects for a token. - - Parameters - ---------- - rqst: rest_framework.request.Request - Django request object. - internal: bool - denotes if the call being made to this handler internally - - Returns - ------- - - """ - - # The token has already been validated, - # so the user is guaranteed to exist. - - # Get the user's info. - # Instantiate UserUtils. - user_utils = UserUtils.UserUtils() - - # Get the user object. - user_info = user_utils.user_from_request(request=rqst) - # Any object that a user has access to - # in any way counts as an "object". - # That is, any permission counts as - # a "view" permission... - - # However, the prefix permissions must - # be in place for the user to view - # anything. Recall that prefix - # permissions override any object-level - # permissions. - - # We can't just use a straight filter here - # because we have to use two different - # models (the prefix permissions on the - # one hand and the BCO objects on the other). - - # First, get all prefixes available to the - # user. - user_prefixes = user_utils.prefixes_for_user(user_object=user_info) - - # Now get any object where the user has an - # object-level permission. - - # Use an empty list of perms to get ANY perm. - # Source: https://stackoverflow.com/a/24980558 - user_objects = get_objects_for_user( - user=user_info, perms=[], klass=BCO, any_perm=True - ) - - # Now get all objects under these prefixes. - prefix_objects = BCO.objects.filter(prefix__in=user_prefixes, state="DRAFT") - - # Assume all the values are supposed to be returned. - # Source: https://stackoverflow.com/a/51733590 - return_values = [ - "contents", - "last_update", - "object_class", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ] - - # If there are any valid keys in the request, - # use them to narrow down the fields. - - # Redundant logic here since the schema check - # would catch this... - if "fields" in rqst.data["POST_api_objects_drafts_token"]: - - # Take the fields and find their intersection with - # the available fields. - # Source: https://stackoverflow.com/a/3697438 - common_fields = list( - set(rqst.data["POST_api_objects_drafts_token"]["fields"]) - & set(return_values) - ) - - if len(common_fields) > 0: - return_values = common_fields - - # Return based on whether or not we're using an internal - # call. - if not internal: - print( - " Not Internal, user response: {}".format( - user_objects.intersection(prefix_objects).values(*return_values) - ) - ) - # Get the user's DRAFT objects. - return Response( - data=user_objects.intersection(prefix_objects).values(*return_values), - status=status.HTTP_200_OK, - ) - - elif internal: - # Concatenate the QuerySets. - # Source: https://stackoverflow.com/a/434755 - - # Get the user's DRAFT objects AND - # add in the published objects. - # TODO: This needs to only return the most recent PUBLISHED objects not all of the versions - - published = BCO.objects.filter(state="PUBLISHED").values() - # unique_published = [] - unique_published = set() - - # E.g. - # published[0]["contents"]["object_id"] = 'http://127.0.0.1:8000/BCO_000010/1.0' - - bcos = {} - for pub in published: - # TODO: We should move this out of a try except and try to handle various situations, - # this is currently assuming that the format is - # http://URL:PORT/BCO ACCESSION/BCO VERSION - this may not always be true - try: - bco_url, bco_id_accession, bco_id_version = pub["object_id"].rsplit( - "/", 2 - ) - bco_id_name = bco_url + "/" + bco_id_accession - except Exception as error: - print( - "Biocompute Name, Version, and URL not formatted as expected: {}".format( - error - ) - ) - return Response(status=status.HTTP_400_BAD_REQUEST) - if bco_id_name in bcos: - # Other version of this BCO object exists - current_version = bcos[bco_id_name]["bco_version"] - # if semver.compare(bco_id_version, current_version, key=coerce): - # # New one is newer version, set: - if float(current_version) < float(bco_id_version): - bcos[bco_id_name] = { - "bco_name": bco_id_name, - "bco_version": current_version, - "bco_object": pub, - } - else: - pass - else: - # Not in dictionary yet - bcos[bco_id_name] = { - "bco_name": bco_id_name, - "bco_version": bco_id_version, - "bco_object": pub, - } - for key, value in bcos.items(): - unique_published.add(value["bco_object"]["id"]) - unique_published = bco.objects.filter(id__in=unique_published) - result_list = chain( - unique_published.values(*return_values), - prefix_objects.values(*return_values), - ) - return Response(data=result_list, status=status.HTTP_200_OK) diff --git a/api/scripts/method_specific/POST_api_objects_publish.py b/api/scripts/method_specific/POST_api_objects_publish.py deleted file mode 100755 index ed5e5a0b..00000000 --- a/api/scripts/method_specific/POST_api_objects_publish.py +++ /dev/null @@ -1,183 +0,0 @@ -#!/usr/bin/env python3 -"""Bulk Publish - --------------------- -Take the bulk request and publish objects directly. -""" - -from api.models import BCO -from api.model.prefix import prefix_table, Prefix -from api.scripts.utilities.DbUtils import DbUtils as db_utils -from api.scripts.utilities.UserUtils import UserUtils as user_utils -from api.scripts.utilities.JsonUtils import parse_bco -from django.conf import settings -from django.utils import timezone -from rest_framework import status -from rest_framework.response import Response - - -def post_api_objects_publish(incoming): - """ - Take the bulk request and publish objects directly. - """ - - root_uri = settings.OBJECT_NAMING["root_uri"] - user = user_utils().user_from_request(request=incoming) - px_perms = user_utils().prefix_perms_for_user(flatten=True, user_object=user) - bulk_request = incoming.data["POST_api_objects_publish"] - returning = [] - any_failed = False - results = {} - for publish_object in bulk_request: - try: - results = parse_bco(publish_object["contents"], results) - except KeyError as error: - returning.append( - db_utils().messages(parameters={"errors": str(error)})[ - "400_non_publishable_object" - ] - ) - any_failed = True - continue - object_key = publish_object["contents"]["object_id"] - if results[object_key]["number_of_errors"] > 0: - returning.append( - db_utils().messages(parameters={"errors": results})[ - "400_non_publishable_object" - ] - ) - any_failed = True - continue - - prefix = publish_object["prefix"].upper() - if Prefix.objects.filter(prefix=prefix).exists(): - prefix_counter = prefix_table.objects.get(prefix=prefix) - - if "publish_" + prefix in px_perms: - if "object_id" in publish_object: - accession = publish_object["object_id"].split("/")[-2] - version = publish_object["object_id"].split("/")[-1] - object_num = int( - publish_object["object_id"].split("_")[1].split("/")[0] - ) - constructed_obj_id = ( - root_uri - + "/" - + accession - + "/" - + publish_object["contents"]["provenance_domain"]["version"] - ) - if BCO.objects.filter(object_id__contains=accession+'/'+version).exists(): - # import pdb; pdb.set_trace() - returning.append( - db_utils().messages(parameters={"object_id": accession+'/'+version})[ - "409_object_conflict" - ] - ) - any_failed = True - continue - if publish_object["object_id"] != constructed_obj_id: - returning.append( - db_utils().messages( - parameters={ - "object_id": publish_object["object_id"], - "constructed_obj_id": constructed_obj_id, - } - )["409_object_id_conflict"] - ) - any_failed = True - continue - new_object = {} - new_object["contents"] = publish_object["contents"] - new_object["object_id"] = constructed_obj_id - new_object["contents"]["object_id"] = constructed_obj_id - new_object["owner_group"] = publish_object["owner_group"] - new_object["owner_user"] = user.username - new_object["prefix"] = prefix - new_object["last_update"] = timezone.now() - new_object["schema"] = "IEEE" - new_object["state"] = "PUBLISHED" - - objects_written = db_utils().write_object( - p_app_label="api", - p_model_name="BCO", - p_fields=[ - "contents", - "last_update", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ], - p_data=new_object, - ) - if prefix_counter.n_objects < object_num: - prefix_counter.n_objects = object_num + 1 - prefix_counter.save() - returning.append( - db_utils().messages( - parameters={"object_id": constructed_obj_id} - )["201_create"] - ) - else: - object_num = format(prefix_counter.n_objects, "06d") - version = publish_object["contents"]["provenance_domain"]["version"] - constructed_obj_id = ( - root_uri + "/" + prefix + "_" + object_num + "/" + version - ) - - new_object = {} - new_object["contents"] = publish_object["contents"] - new_object["object_id"] = constructed_obj_id - new_object["contents"]["object_id"] = constructed_obj_id - new_object["owner_group"] = publish_object["owner_group"] - new_object["owner_user"] = user.username - new_object["prefix"] = prefix - new_object["last_update"] = timezone.now() - new_object["schema"] = "IEEE" - new_object["state"] = "PUBLISHED" - - objects_written = db_utils().write_object( - p_app_label="api", - p_model_name="BCO", - p_fields=[ - "contents", - "last_update", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ], - p_data=new_object, - ) - - prefix_counter.n_objects = prefix_counter.n_objects + 1 - prefix_counter.save() - returning.append( - db_utils().messages( - parameters={"object_id": constructed_obj_id} - )["201_create"] - ) - - else: - returning.append( - db_utils().messages(parameters={"prefix": prefix})[ - "401_prefix_unauthorized" - ] - ) - any_failed = True - - else: - returning.append( - db_utils().messages(parameters={"prefix": prefix})["404_missing_prefix"] - ) - any_failed = True - - if any_failed: - return Response(status=status.HTTP_207_MULTI_STATUS, data=returning) - - return Response(status=status.HTTP_200_OK, data=returning) diff --git a/api/scripts/method_specific/POST_api_objects_published.py b/api/scripts/method_specific/POST_api_objects_published.py deleted file mode 100644 index 3e9e8f30..00000000 --- a/api/scripts/method_specific/POST_api_objects_published.py +++ /dev/null @@ -1,115 +0,0 @@ -# BCOs -from ...models import BCO - -# User information -from ..utilities import UserUtils - -# Object-level permissions -from guardian.shortcuts import get_objects_for_user - -# Concatenating QuerySets -from itertools import chain - -# Responses -from rest_framework import status -from rest_framework.response import Response - -# Below is helper code to deal with how we are allowing non standard versions (i.e. 1.2 instead of 1.2.0, etc). -import re -import semver -from semver import VersionInfo as Version -from typing import Optional, Tuple - -# TODO: This is repeated code, should consolidate -BASEVERSION = re.compile( - r"""[vV]? - (?P0|[1-9]\d*) - (\. - (?P0|[1-9]\d*) - (\. - (?P0|[1-9]\d*) - )? - )? - """, - re.VERBOSE, -) - - -def coerce(version: str) -> Tuple[Version, Optional[str]]: - """ - Convert an incomplete version string into a semver-compatible Version - object - * Tries to detect a "basic" version string (``major.minor.patch``). - * If not enough components can be found, missing components are - set to zero to obtain a valid semver version. - :param str version: the version string to convert - :return: a tuple with a :class:`Version` instance (or ``None`` - if it's not a version) and the rest of the string which doesn't - belong to a basic version. - :rtype: tuple(:class:`Version` | None, str) - """ - match = BASEVERSION.search(version) - if not match: - return (None, version) - - ver = { - key: 0 if value is None else value for key, value in match.groupdict().items() - } - ver = Version(**ver) - rest = match.string[match.end() :] # noqa:E203 - return ver, rest - - -def POST_api_objects_published(): - """ - Get All published objects (publicly available) - """ - - published = BCO.objects.filter(state="PUBLISHED").values() - unique_published = [] - - # E.g. - # published[0]["contents"]["object_id"] = 'http://127.0.0.1:8000/BCO_000010/1.0' - - bcos = {} - for p in published: - # TODO: We should move this out of a try except and try to handle various situations, this is currently - # assuming that the format is http://URL:PORT/BCO NAME/BCO VERSION - this may not always be true - try: - bco_url, bco_id_name, bco_id_version = p["contents"]["object_id"].rsplit( - "/", 2 - ) - except Exception as e: - print( - "Biocompute Name, Version, and URL not formatted as expected: {}".format( - e - ) - ) - return Response(status=status.HTTP_400_BAD_REQUEST) - - if bco_url in bcos: - # Other version of this BCO object exists - current_version = bcos[bco_url]["bco_version"] - - if semver.compare(bco_id_version, current_version, key=coerce): - # New one is newer version, set: - bcos[bco_url] = { - "bco_name": bco_id_name, - "bco_version": bco_id_version, - "bco_object": p, - } - - else: - # Do nothing - pass - else: - # Not in dictionary yet - bcos[bco_url] = { - "bco_name": bco_id_name, - "bco_version": bco_id_version, - "bco_object": p, - } - for key, value in bcos.items(): - unique_published.append(value["bco_object"]) - - return Response(data=unique_published, status=status.HTTP_200_OK) diff --git a/api/scripts/method_specific/POST_api_objects_search.py b/api/scripts/method_specific/POST_api_objects_search.py deleted file mode 100755 index 6b3cfe27..00000000 --- a/api/scripts/method_specific/POST_api_objects_search.py +++ /dev/null @@ -1,120 +0,0 @@ -#!/usr/bin/env python3 -"""BCO Search - -""" - -from itertools import chain - -from api.models import BCO -from api.model.prefix import Prefix -from api.scripts.utilities import UserUtils -from guardian.shortcuts import get_objects_for_user -from rest_framework import status -from rest_framework.response import Response - - -def post_api_objects_search(request): - """Search for BCOs - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - List of BCOs that met search criteria - - """ - - return_values = [ - "contents", - "last_update", - "object_class", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ] - - query = request.data["POST_api_objects_search"][0] - search_type = query["type"] - try: - search_value = query["search"] - except KeyError: - search_value = "" - user_utils = UserUtils.UserUtils() - user_info = request._user - user_prefixes = user_utils.prefixes_for_user(user_object=user_info) - - prefix_perms = user_utils.prefix_perms_for_user( - flatten=True, user_object=user_info, specific_permission=["add"] - ) - - if search_type == "bco_id": - publish_list = BCO.objects.filter( - object_id__icontains=search_value, state="PUBLISHED" - ) - if user_info.username == "anon": - result_list = chain(publish_list.values(*return_values)) - else: - user_objects = get_objects_for_user( - user=user_info, perms=[], klass=BCO, any_perm=True - ) - draft_list = BCO.objects.filter( - object_id__icontains=search_value, - prefix__in=user_prefixes, - state="DRAFT", - ).exclude(state="DELETE") - bco_list = draft_list.union(publish_list) - result_list = chain(bco_list.values(*return_values)) - - if search_type == "prefix": - search_value = search_value.upper() - try: - prefix = Prefix.objects.get(prefix=search_value).prefix - - except Prefix.DoesNotExist: - return Response( - status=status.HTTP_404_NOT_FOUND, - data={ - "request_status": "FAILURE", - "status_code": "404", - "message": "That prefix was not found on this server.", - }, - ) - - if prefix in user_prefixes: - bco_list = ( - BCO.objects.filter(prefix=prefix).values().exclude(state="DELETE") - ) - result_list = chain(bco_list.values(*return_values)) - - else: - return Response( - status=status.HTTP_403_FORBIDDEN, - data={ - "request_status": "FAILURE", - "status_code": "403", - "message": "The token provided does not have sufficient" - " permissions for the requested prefix.", - }, - ) - - if search_type == "mine": - if user_info.username == "anon": - result_list = chain( - BCO.objects.filter(state="PUBLISHED").values(*return_values) - ) - - else: - result_list = chain( - BCO.objects.filter(owner_user=user_info) - .exclude(state="DELETE") - .values(*return_values) - ) - # print(len(list(result_list))) - - return Response(status=status.HTTP_200_OK, data=result_list) diff --git a/api/scripts/method_specific/POST_api_objects_token.py b/api/scripts/method_specific/POST_api_objects_token.py deleted file mode 100755 index a0a8a33e..00000000 --- a/api/scripts/method_specific/POST_api_objects_token.py +++ /dev/null @@ -1,25 +0,0 @@ -# Draft objects -from .POST_api_objects_drafts_token import POST_api_objects_drafts_token - -# Responses -from rest_framework import status -from rest_framework.response import Response - - -def POST_api_objects_token(rqst): - """ - Get all objects for a token. - - The token has already been validated, - so the user is guaranteed to exist. - - Make the internal call, but change - the request key so that we can re-use - POST_api_objects_draft_token, and mark the internal - flag as True so that we can get published - objects. - """ - rqst.data["POST_api_objects_drafts_token"] = rqst.data.pop("POST_api_objects_token") - - # Get the user's objects. - return POST_api_objects_drafts_token(rqst=rqst, internal=True) diff --git a/api/scripts/method_specific/POST_validate_payload_against_schema.py b/api/scripts/method_specific/POST_validate_payload_against_schema.py deleted file mode 100755 index a9171541..00000000 --- a/api/scripts/method_specific/POST_validate_payload_against_schema.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python3 -"""Bulk Validate BioCompute Objects -""" - -from rest_framework import status -from rest_framework.response import Response -from api.scripts.utilities.JsonUtils import parse_bco - - -def post_validate_bco(request): - """Bulk BCO Validation - - Take the bulk request and validate each BCO. - - Parameters - ---------- - request : rest_framework.request.Request - The bulk request object. - - Returns - ------- - Response : dict - A rest framework response object. The response data is a list of - dictionaries, each of which corisponding to one of the BCOs submitted - for validation. - """ - - bco_list = request.data["POST_validate_bco"] - - results = {} - any_failed = False - - for bco in bco_list: - try: - results = parse_bco(bco, results) - - if bco["object_id"] == '': - identifier = bco_list.index(bco) - results[identifier] = results[''] - del results[''] - else: - identifier = bco["object_id"] - - if results[identifier]["number_of_errors"] == 0: - results[identifier]["error_detail"] = ["BCO Valid"] - else: - any_failed = True - - except Exception as error: - results[bco_list.index(bco)] = { - "number_of_errors": 1, - "error_detail": ["Bad request. BCO is not formatted correctly."] - } - any_failed = True - - if any_failed is True: - return Response(status=status.HTTP_207_MULTI_STATUS, data=results) - - return Response(status=status.HTTP_200_OK, data=results) diff --git a/api/scripts/method_specific/__init__.py b/api/scripts/method_specific/__init__.py deleted file mode 100755 index e69de29b..00000000 diff --git a/api/scripts/utilities/DbUtils.py b/api/scripts/utilities/DbUtils.py deleted file mode 100755 index 398e86e1..00000000 --- a/api/scripts/utilities/DbUtils.py +++ /dev/null @@ -1,986 +0,0 @@ -#!/usr/bin/env python3 -"""DB Utilities -Functions for operations with DB -""" - -import random -import re -import uuid -import json -import datetime -import requests -from api.models import BCO -from api.serializers import getGenericSerializer -from api.scripts.utilities import UserUtils -from django.apps import apps -from django.conf import settings -from django.contrib.auth.models import Group, User -from django.contrib.contenttypes.models import ContentType -from django.utils import timezone - - -class DbUtils: - """Class Description - ----------------- - These methods are for interacting with our sqlite database. - Checking whether or not an object exists. - """ - - def check_object_id_exists(self, p_app_label, p_model_name, p_object_id): - """Simple existence check. - Source: https://stackoverflow.com/a/9089028 - Source: https://docs.djangoproject.com/en/3.1/ref/models/querysets/#exists - """ - - if ( - apps.get_model(app_label=p_app_label, model_name=p_model_name) - .objects.filter(object_id=p_object_id) - .exists() - ): - return None - else: - return 1 - - # Checking whether or not a user exists. - def check_user_exists(self, p_app_label, p_model_name, p_email): - """Simple existence check. - Source: https://stackoverflow.com/a/9089028 - Source: https://docs.djangoproject.com/en/3.1/ref/models/querysets/#exists - """ - - if ( - apps.get_model(app_label=p_app_label, model_name=p_model_name) - .objects.filter(email=p_email) - .exists() - ): - - return 1 - - else: - - return None - - # Check version rules - def check_version_rules(self, published_id): - """BCO Version Check - Potentially publishing a new version - of a published object, but we have to check to - see if the provided URI exists in the publishing table. - - We can take the exact version of the object ID OR - only the root version. For example, - 'http://hostname/some/other/paths/BCO_5' and - 'http://hostname/some/other/paths/BCO_5/3.4' would invoke the same - logic here, assuming that version 3.4 of BCO_5 is the latest version. - """ - - # Does the provided object ID exist? - if BCO.objects.filter(object_id=published_id).exists(): - - split_up = published_id.split("/") - # Get the version. - version = split_up[-1:][0] - if version == "DRAFT": - split_up[len(split_up) - 1] = "1.0" - return {"published_id": "/".join(split_up)} - - else: - # Increment the minor version. - incremented = version.split(".") - incremented[1] = int(incremented[1]) + 1 - incremented = incremented[0] + "." + str(incremented[1]) - - # Create the object ID. - split_up[len(split_up) - 1] = incremented - - # Kick back the minor-incremented object ID. - return {"published_id": "/".join(split_up)} - - else: - - # If the EXACT object ID wasn't found, then - # the user may have provided either a root version - # of the URI or a version of the same root URI. - - # If the provided version is larger - # than the version that would be generated automatically, - # then that provided version is used. - - # First determine whether or not the provided URI - # only has the root or has the root and the version. - - # Should do this by using settings.py root_uri - # information... - - # Split up the URI into the root ID and the version. - root_uri = "" - version = "" - - if re.match(r"(.*?)/[A-Z]+_(\d+)$", published_id): - - # Only the root ID was passed. - root_uri = published_id - - elif re.match(r"(.*?)/[A-Z]+_(\d+)/(\d+)\.(\d+)$", published_id): - - # The root ID and the version were passed. - split_up = published_id.split("/") - - root_uri = "/".join(split_up[:-1]) - - version = split_up[-1:] - - # See if the root ID even exists. - - # Note the trailing slash in the regex search to prevent - # sub-string matches (e.g. http://127.0.0.1:8000/BCO_5 and - # http://127.0.0.1:8000/BCO_53 would both match the regex - # http://127.0.0.1:8000/BCO_5 if we did not have the trailing - # slash). - all_versions = list( - BCO.objects.filter( - object_id__regex=rf"{root_uri}/", state="PUBLISHED" - ).values_list("object_id", flat=True) - ) - - # Get the latest version for this object if we have any. - if len(all_versions) > 0: - - # There was at least one version of the root ID, - # so now perform some logic based on whether or - # not a version was also passed. - - # First find the latest version of the object. - latest_major = 0 - latest_minor = 0 - - latest_version = [i.split("/")[-1:][0] for i in all_versions] - - for i in latest_version: - - major_minor_split = i.split(".") - - if int(major_minor_split[0]) >= latest_major: - if int(major_minor_split[1]) >= latest_minor: - latest_major = int(major_minor_split[0]) - latest_minor = int(major_minor_split[1]) - - # The version provided may fail, so create a flag to - # track this. - failed_version = False - - # If the root ID and the version were passed, check - # to see if the version given is greater than that which would - # be generated automatically. - if version != "": - - # We already have the automatically generated version - # number. Now we just need to compare it with the - # number that was provided. - if ( - int(version[0].split(".")[0]) - > latest_major & int(version[0].split(".")[1]) - > latest_minor - ): - - latest_major = int(version[0].split(".")[0]) - latest_minor = int(version[0].split(".")[1]) - - # Write with the version provided. - published_id = ( - published_id - + "/" - + str(latest_major) - + "." - + str(latest_minor) - ) - - else: - - # Bad version provided. - failed_version = True - - else: - - # If only the root ID was passed, find the latest - # version in the database, then increment the version. - - # Write with the minor version incremented. - published_id = ( - published_id - + "/" - + str(latest_major) - + "." - + str(latest_minor + 1) - ) - - # Did everything go properly with the version provided? - if failed_version is False: - - # The version was valid. - return {"published_id": published_id} - - else: - - # Bad request. - return "bad_version_number" - - else: - - # If all_versions has 0 length, then the - # the root ID does not exist at all. - # In this case, we have to return a failure flag - # because we cannot create a version for - # a root ID that does not exist. - return "non_root_id" - - def check_activation_credentials( - self, p_app_label, p_model_name, p_email, p_temp_identifier - ) -> bool: - """ - Simple existence check. - Checking whether or not a user exists and their - temp identifier matches. - Source: https://stackoverflow.com/a/9089028 - Source: https://docs.djangoproject.com/en/3.1/ref/models/querysets/#exists - """ - - user_info = apps.get_model( - app_label=p_app_label, model_name=p_model_name - ).objects.filter(email=p_email, temp_identifier=p_temp_identifier) - - if user_info.exists(): - - # The credentials exist, but is the request timely? - # Source: https://stackoverflow.com/a/7503368 - - # Take the time and add 2 days. - time_check = list(user_info.values_list("created", flat=True))[0] - - time_check = time_check + datetime.timedelta(hours=48) - - # Crappy timezone problems. - # Source: https://stackoverflow.com/a/25662061 - - # Is the time now less than the time check? - if datetime.datetime.now(datetime.timezone.utc) < time_check: - - # We can return that this user is OK to be activated. - return True - - else: - - # The time stamp has expired, so delete - # the entry in new_users. - user_info.delete() - - # We can't activate this user. - return False - - else: - - return False - - # Check that expiration dates are valid. - def check_expiration(self, dt_string): - """Split the string first.""" - try: - split_up = dt_string.split("-") - - if len(split_up) == 6: - - try: - - # Convert everything to integers. - split_up = [int(x) for x in split_up] - - exp_date = datetime.datetime( - split_up[0], - split_up[1], - split_up[2], - split_up[3], - split_up[4], - split_up[5], - ) - - if exp_date <= datetime.datetime.now(): - - return False - - except TypeError: - - return False - - else: - - return False - - except AttributeError: - - return False - - def get_api_models(self): - """Get all the ACCESSIBLE models in the API. - Source: https://stackoverflow.com/a/9407979 - """ - - api_models = [] - - # Define any tables to exclude here. - exclude = ["meta", "new_users"] - - for ct in ContentType.objects.all(): - m = ct.model_class() - - if m.__module__ == "api.models": - if m.__name__ not in exclude: - api_models.append(m.__name__) - - # Returns flat list... - return api_models - - def activate_account(self, p_email): - """p_email: which e-mail to activate. - - Activation means creating an entry in User. - - To comply with GDPR, we can't keep an e-mail - directly. So, split off the username part - of the e-mail and assign a random number. - """ - - valid_username = False - - while not valid_username: - new_username = p_email - if User.objects.filter(username=new_username): - valid_username = False - else: - valid_username = True - # We can't use the generic serializer here because of how - # django processes passwords. - # Source: https://docs.djangoproject.com/en/3.2/topics/auth/default/#changing-passwords - - # The password is also randomly generated. - new_password = uuid.uuid4().hex - - # Save the user. - # Source: https://docs.djangoproject.com/en/3.2/topics/auth/default/#creating-users - - user = User.objects.create_user(new_username) - - # Setting the password has to be done manually in - # order to encrypt it. - # Source: https://stackoverflow.com/a/39211961 - # Source: https://stackoverflow.com/questions/28347200/django-rest-http-400-error-on-getting-token-authentication-view - user.set_password(new_password) - - # Save the user. - user.save() - - # Automatically add the user to the bco_drafter and bco_publisher groups. - user.groups.add(Group.objects.get(name="bco_drafter")) - user.groups.add(Group.objects.get(name="bco_publisher")) - - # (OPTIONAL) Make a request to userdb on the portal so that - # the user's information can be stored there. - - # If a token was provided with the initial request, - # use it to make the update call to userdb. - token = ( - apps.get_model(app_label="api", model_name="new_users") - .objects.get(email=p_email) - .token - ) - - if token is not None: - # Send the new information to userdb. - # Get the user's information from the database. - uu = UserUtils.UserUtils() - # Set the headers. - # Source: https://docs.python-requests.org/en/master/user/quickstart/#custom-headers - headers = { - "Authorization": "JWT " + token, - "Content-type": "application/json; charset=UTF-8", - } - - # Set the data properly. - # Source: https://stackoverflow.com/a/56562567 - r = requests.post( - data=json.dumps(uu.get_user_info(username=new_username), default=str), - headers=headers, - url="http://127.0.0.1:8080/users/add_api/", - ) - - # Delete the record in the temporary table. - apps.get_model(app_label="api", model_name="new_users").objects.filter( - email=p_email - ).delete() - - # Return the username in a list, as this is - # easily checked for upstream (as opposed to - # some regex solution to check for username - # information). - return [new_username] - - # Messages associated with results from sub-requests. - def messages(self, parameters, p_content=False): - """TODO: abstract all of this up into the top level of the class. - - Define the return messages, if they don't - come in defined. - """ - - definable = [ - "errors", - "expiration_date", - "group", - "object_id", - "draft_object_id", - "constructed_obj_id", - "object_perms", - "prefix", - "published_id", - "table", - "username", - "contents", - "users_excluded", - ] - - for i in definable: - if i not in parameters: - parameters[i] = "" - - return { - "200_found": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "The object with ID '" - + parameters["object_id"] - + "' was found on table '" - + parameters["table"] - + "'.", - "content": p_content, - }, - "200_OK_group_delete": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "The group '" + parameters["group"] + "' was deleted.", - }, - "200_OK_group_modify": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "The group '" - + parameters["group"] - + "' was succesfully modified.", - }, - "200_OK_object_delete": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "The object with ID '" - + parameters["object_id"] - + "' was deleted.", - }, - "200_OK_object_read": { - "request_status": "SUCCESS", - "status_code": "200", - "contents": parameters["contents"], - "message": "The object with ID '" - + parameters["object_id"] - + "' was found on the server.", - }, - "200_OK": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "The prefix '" + parameters["prefix"] + "' was deleted.", - }, - "200_OK_object_permissions": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "Permissions for the object with ID '" - + parameters["object_id"] - + "' were found on the server.", - "object_id": parameters["object_id"], - "permissions": parameters["object_perms"], - }, - "200_OK_object_permissions_set": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "Permissions for the object with ID '" - + parameters["object_id"] - + "' were set on the server.", - "object_id": parameters["object_id"], - }, - "200_OK_object_publish": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "Successfully published '" - + parameters["published_id"] - + "' on the server.", - "published_id": parameters["published_id"], - }, - "200_OK_object_publish_draft_deleted": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "Successfully published '" - + parameters["published_id"] - + "' on the server and the draft was deleted.", - "published_id": parameters["published_id"], - }, - "200_OK_object_publish_draft_not_deleted": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "Successfully published '" - + parameters["published_id"] - + "' on the server and the draft was not deleted.", - "published_id": parameters["published_id"], - }, - "200_OK_prefix_delete": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "Successfully deleted prefix '" - + parameters["prefix"] - + "'.", - }, - "200_OK_prefix_permissions_update": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "Successfully updated prefix permissions on prefix '" - + parameters["prefix"] - + "'.", - }, - "200_update": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "The object with ID '" - + parameters["object_id"] - + "' was updated.", - }, - "201_create": { - "request_status": "SUCCESS", - "status_code": "201", - "message": "The object with ID '" - + parameters["object_id"] - + "' was created on the server.", - "object_id": parameters["object_id"], - }, - "201_prefix_modify": { - "request_status": "SUCCESS", - "status_code": "200", - "message": "The prefix '" + parameters["prefix"] + "' was updated.", - }, - "201_group_create": { - "request_status": "SUCCESS", - "status_code": "201", - "message": "The group '" - + parameters["group"] - + "' was successfully created.", - }, - "201_group_users_excluded": { - "request_status": "SUCCESS", - "status_code": "201", - "message": "The group '" - + parameters["group"] - + "' was successfully created, but the following users were excluded: " - + str(parameters["users_excluded"]), - }, - "201_prefix_create": { - "request_status": "SUCCESS", - "status_code": "201", - "message": "The prefix '" - + parameters["prefix"] - + "' was successfully created.", - }, - "202_Accepted": { - "request_status": "SUCCESS", - "status_code": "202", - "message": "The request you performed has been accepted.", - }, - "204_no_content": { - "request_status": "SUCCESS", - "status_code": "204", - "message": "The search you performed returned ZERO results.", - }, - "400_bad_request": { - "request_status": "FAILURE", - "status_code": "400", - "message": "The request could not be processed with the parameters provided.", - }, - "400_bad_request_malformed_prefix": { - "request_status": "FAILURE", - "status_code": "400", - "message": "The prefix '" - + parameters["prefix"] - + "' does not follow the naming rules for a prefix.", - }, - "400_bad_version_number": { - "request_status": "FAILURE", - "status_code": "400", - "message": "The provided version number for this object is not greater than the number that would be generated automatically and therefore the request to publish was denied.", - }, - "400_invalid_expiration_date": { - "request_status": "FAILURE", - "status_code": "400", - "message": "The expiration date '" - + parameters["expiration_date"] - + "' is not valid either because it does not match the required format 'YYYY-MM-DD-HH-MM-SS' or because it falls before the current time.", - }, - "400_non_publishable_object": { - "request_status": "FAILURE", - "status_code": "400", - "message": "The object provided was not valid against the schema provided. See key 'errors' for specifics of the non-compliance.", - "errors": parameters["errors"], - }, - "400_non_root_id": { - "request_status": "FAILURE", - "status_code": "400", - "message": "The provided object ID does not contain a URI with a valid prefix.", - }, - "400_unspecified_error": { - "request_status": "FAILURE", - "status_code": "400", - "message": "An unspecified error occurred.", - }, - "401_prefix_unauthorized": { - "request_status": "FAILURE", - "status_code": "401", - "message": "The token provided does not have draft permissions for this prefix '" - + parameters["prefix"] - + "'.", - }, - "401_prefix_publish_unauthorized": { - "request_status": "FAILURE", - "status_code": "401", - "message": "The token provided does not have publish permissions for this prefix '" - + parameters["prefix"] - + "'.", - }, - "403_insufficient_permissions": { - "request_status": "FAILURE", - "status_code": "403", - "message": "The token provided does not have sufficient permissions for the requested object.", - }, - "403_requestor_is_not_prefix_owner": { - "request_status": "FAILURE", - "status_code": "403", - "message": "The token provided is not the owner of the prefix '" - + parameters["prefix"] - + "' and therefore permissions for the prefix cannot be changed in this request.", - }, - "403_invalid_token": { - "request_status": "FAILURE", - "status_code": "403", - "message": "The token provided was not able to be used on this object.", - }, - "404_group_not_found": { - "request_status": "FAILURE", - "status_code": "404", - "message": "The group '" - + parameters["group"] - + "' was not found on the server.", - }, - "404_missing_bulk_parameters": { - "request_status": "FAILURE", - "status_code": "404", - "message": "One or more missing optional parameters are required for this call to have an effect.", - }, - "404_missing_prefix": { - "request_status": "FAILURE", - "status_code": "404", - "message": "The prefix '" - + parameters["prefix"] - + "' was not found on the server.", - }, - "404_object_id": { - "request_status": "FAILURE", - "status_code": "404", - "message": "The object ID '" - + parameters["object_id"] - + "' was not found on the server.", - }, - "404_table": { - "request_status": "FAILURE", - "status_code": "404", - "message": "The table with name '" - + parameters["table"] - + "' was not found on the server.", - }, - "404_user_not_found": { - "request_status": "FAILURE", - "status_code": "404", - "message": "The user '" - + parameters["username"] - + "' was not found on the server.", - }, - "409_group_conflict": { - "request_status": "FAILURE", - "status_code": "409", - "message": "The provided group '" - + parameters["group"] - + "' has already been created on this server.", - }, - "409_prefix_conflict": { - "request_status": "FAILURE", - "status_code": "409", - "message": "The provided prefix '" - + parameters["prefix"] - + "' has already been created on this server.", - }, - "409_object_conflict": { - "request_status": "FAILURE", - "status_code": "409", - "message": "The provided object " - + parameters["object_id"] - + " has already been created on this server." - + " If you wish to publish a new version of this BCO try" - + " to save the DRAFT with a different version number, and" - + " then resubmit.", - }, - "409_draft_object_id_conflict": { - "request_status": "FAILURE", - "status_code": "409", - "message": "The provided object_id " - + parameters["object_id"] - + " does not match the saved draft object_id " - + parameters["draft_object_id"] - + ". Once a draft is created you can not change the object id.", - }, - "409_object_id_conflict": { - "request_status": "FAILURE", - "status_code": "409", - "message": "The provided object_id " - + parameters["object_id"] - + " does not match the constructed object_id " - + parameters["constructed_obj_id"] - + ".", - }, - "418_too_many_deleted": { - "request_status": "FAILURE", - "status_code": "418", - "message": "Only one object was expected to be deleted, but multiple were removed.", - }, - } - - # Publish an object. - def publish(self, owner_group, owner_user, prefix, publishable, publishable_id): - """Publish BCO - - Parameters - ---------- - owner_group: str - Name of owner group - owner_user: str - Name of owner user - prfx: str - publishable: api.models.BCO - publishable_id: dict - - Returns - ------- - """ - # publishable is a draft object. - - # Define the object naming information. - object_naming_info = settings.OBJECT_NAMING - - # Define a variable to hold all information - # about the published object. - published = {} - - # A new published object or an existing one? - if publishable_id == "new": - - # TODO: put new object ID logic in its own function - # like check_version_rules()... - - # Define a variable which will hold the constructed name. - constructed_name = "" - - # This section was breaking the production/test Db. The contents of `object_naming_info` - # are modifies somewhere else before here so that this IF/ELSE is not needed and causes - # a break in the code. - - # Create the constructed name based on whether or not - # we're on a production server. - # if settings.PRODUCTION == 'True': - - # constructed_name = object_naming_info['uri_regex'].replace( - # 'prod_root_uri', # WTF MAAAN - # object_naming_info['prod_root_uri'] - # ) - - # elif settings.PRODUCTION == 'False': - - constructed_name = object_naming_info["uri_regex"].replace( - "root_uri", object_naming_info["root_uri"] - ) - - constructed_name = constructed_name.replace("prefix", prefix) - - # Get rid of the rest of the regex for the name. - prefix_location = constructed_name.index(prefix) - prefix_length = len(prefix) - constructed_name = constructed_name[0 : prefix_location + prefix_length] - - # Get the object number counter from meta information about the prefix. - prefix_counter = prefix_table.objects.get(prefix=prefix) - - # Create the contents field. - published["contents"] = publishable - - # Create a new ID based on the prefix counter. - published["object_id"] = ( - constructed_name - + "_" - + "{:06d}".format(prefix_counter.n_objects) - + "/1.0" - ) - - # Make sure to create the object ID field in our draft. - published["contents"]["object_id"] = published["object_id"] - - # Django wants a primary key for the Group... - published["owner_group"] = owner_group - - # Django wants a primary key for the User... - published["owner_user"] = owner_user - - # The prefix is passed through. - published["prefix"] = prefix - - # Schema is hard-coded for now... - published["schema"] = "IEEE" - - # This is PUBLISHED. - published["state"] = "PUBLISHED" - - # Set the datetime properly. - published["last_update"] = timezone.now() - - # Publish. - self.write_object( - p_app_label="api", - p_model_name="BCO", - p_fields=[ - "contents", - "last_update", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ], - p_data=published, - ) - - # Update the meta information about the prefix. - prefix_counter.n_objects = prefix_counter.n_objects + 1 - prefix_counter.save() - - # Successfuly saved the object. - return {"published_id": published["object_id"]} - - else: - # An object ID was provided, so go straight to publishing. - - # Create the contents field. - published["contents"] = publishable.contents - - # Set the object ID. - published["object_id"] = publishable_id - - # Make sure to create the object ID field in the BCO. - published["contents"]["object_id"] = publishable_id - - # Django wants a primary key for the Group... - published["owner_group"] = owner_group - - # Django wants a primary key for the User... - published["owner_user"] = owner_user - - # The prefix is passed through. - published["prefix"] = prefix - - # Schema is hard-coded for now... - published["schema"] = "IEEE" - - # Mark the object as published. - published["state"] = "PUBLISHED" - - # Set the datetime properly. - published["last_update"] = timezone.now() - - # Publish. - self.write_object( - p_app_label="api", - p_model_name="BCO", - p_fields=[ - "contents", - "last_update", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ], - p_data=publishable.contents, - ) - - # Successfully saved the object. - return {"published_id": published["object_id"]} - - # Write (update) either a draft or a published object to the database. - def write_object( - self, - p_app_label, - p_model_name, - p_fields, - p_data, - p_update=False, - p_update_field=False, - ): - - """Source: https://docs.djangoproject.com/en/3.1/topics/db/queries/#topics-db-queries-update - - Serialize our data.""" - serializer = getGenericSerializer( - incoming_model=apps.get_model( - app_label=p_app_label, model_name=p_model_name - ), - incoming_fields=p_fields, - ) - - serialized = serializer(data=p_data) - - # Save (update) it. - if p_update is False: - # Write a new object. - if serialized.is_valid(): - serialized.save() - return 1 - else: - print(serialized.errors) - return -1 - else: - # Update an existing object. - # apps.get_model( - # app_label = p_app_label, - # model_name = p_model_name - # ).objects.filter( - # object_id = p_data['object_id'] - # ).update( - # contents = p_data['contents'] - # ) - - objects_modified = ( - apps.get_model(app_label=p_app_label, model_name=p_model_name) - .objects.filter(object_id=p_data["object_id"]) - .update(contents=p_data["contents"]) - ) - - return objects_modified - - def convert_id_form(self, oi_root): - return oi_root.split("_")[0] + "{:06d}".format(int(oi_root.split("_")[1])) diff --git a/api/scripts/utilities/FileUtils.py b/api/scripts/utilities/FileUtils.py deleted file mode 100755 index 2fbb32a1..00000000 --- a/api/scripts/utilities/FileUtils.py +++ /dev/null @@ -1,167 +0,0 @@ -# --- SOURCES --- # - -# For finding files. -import glob - -# For writing. -import os - - -# --- MAIN --- # - - -class FileUtils: - def pathalizer(self, directory, pattern): - - # Description - # ----------- - - # Construct a search path with regex. - - # Arguments - # --------- - - # directory - # --------- - # - # Description: where to look within the project directory. - # Values: any folder - - # pattern - # ------- - # - # Description: the regex. - # Values: any regex - - # Outputs - # ------- - - # A directory + pattern string. - - # Kick back the string. - return os.path.join( - os.path.dirname(os.path.dirname(os.path.abspath(__file__))), - directory + pattern, - ) - - def create_files(self, payload, output_directory, file_extension): - - # Description - - # Write a list of files a list of files in a directory matching a regex. - - # Arguments - - # payload - # ---------------- - # - # Description: what are we writing? - # Values: must be a dictionary where the keys are *ORIGINAL* full file names and values are file contents. - - # output_directory - # ---------------- - # - # Description: where are we writing to? - # Values: any extant directory - MUST BE AN ABSOLUTE PATH - - # file_extension - # ---------------- - # - # Description: what extension are we appending to the *ORIGINAL* file name? - # Values: any string - - # Outputs - - # A list of files. - - # Construct the output path for each file and write. - for original_filename, contents in payload.items(): - with open( - self.pathalizer(output_directory, original_filename + file_extension), - mode="w", - ) as f: - f.write(contents) - - def find_files(self, input_directory, regex): - - # Description - - # Retrieve a list of files in a directory matching a regex. - - # Arguments - - # input_directory - # ---------------- - # - # Description: where are the files we're assigning? - # Values: any extant directory - MUST BE AN ABSOLUTE PATH - - # regex - # ---------------- - # - # Description: what regex are we using to search the directory? - # Values: any regex - - # Outputs - - # A list of matching files. - - # Search the input directory for matching files. - - # Source: https://stackoverflow.com/questions/39293968/python-how-do-i-search-directories-and-find-files-that-match-regex - # Source: https://stackoverflow.com/questions/30218802/get-parent-of-current-directory-from-python-script - - return glob.glob(self.pathalizer(input_directory, regex)) - - # Find the entire tree of a folder based on an extension. - def get_folder_tree_by_extension(self, search_folder, search_extension): - - # search_folder: where we're looking. - # search_extension: the extension we're looking for. - - # Source: https://www.tutorialspoint.com/python/os_walk.htm - - # Set the root directory. - root_directory = os.path.join( - os.path.dirname(os.path.dirname(os.path.abspath(__file__))), search_folder - ) - - # Create a dictionary to return. - returning = {"root_directory": root_directory, "paths": []} - - for root, dirs, files in os.walk(root_directory): - for name in files: - returning["paths"].append(os.path.join(root, name)) - for name in dirs: - returning["paths"].append(os.path.join(root, name)) - - returning["paths"] = [ - x for x in returning["paths"] if x.find(search_extension) != -1 - ] - - return returning - - # Find the entire tree of a folder, regardless of extension. - def get_folder_tree(self, search_folder): - - # search_folder: where we're looking. - - # Source: https://www.tutorialspoint.com/python/os_walk.htm - - # Set the root directory. - root_directory = os.path.join( - os.path.dirname(os.path.dirname(os.path.abspath(__file__))), search_folder - ) - - # Create a dictionary to return. - returning = {"root_directory": root_directory, "paths": []} - - for root, dirs, files in os.walk(root_directory): - for name in files: - returning["paths"].append(os.path.join(root, name)) - for name in dirs: - returning["paths"].append(os.path.join(root, name)) - - returning["paths"] = [x for x in returning["paths"] if 1] - - return returning diff --git a/api/scripts/utilities/JsonUtils.py b/api/scripts/utilities/JsonUtils.py deleted file mode 100755 index b34bac54..00000000 --- a/api/scripts/utilities/JsonUtils.py +++ /dev/null @@ -1,308 +0,0 @@ -#!/usr/bin/env python3 -"""JSON Utils - -For JSON parsing and schema validation. -""" - -import os -import sys -import json -import jsonref -import jsonschema -from simplejson.errors import JSONDecodeError -from requests.exceptions import ConnectionError as ErrorConnecting - - -def get_schema(schema_uri): - """Retrieve JSON Schema - - Parameters - ---------- - schema_uri : str - A URI that is used to pull the JSON schema for validation. - - Returns - ------- - schema : dict - A dictionary of the JSON schema definition, or detail on the error loading the schema. - """ - - try: - schema = jsonref.load_uri(schema_uri) - return schema - - except JSONDecodeError: - return {schema_uri: ["Failed to load extension schema. JSON Decode Error."]} - - except TypeError: - return {schema_uri: ["Failed to load extension schema. Invalid format."]} - - except ErrorConnecting: - return {schema_uri: ["Failed to load extension schema. Connection Error."]} - - -def validate(schema, json_object, results): - """BCO/extension Validator - - Parameters - ---------- - schema : dict - A dictionary of the JSON schema definition. - json_object : dict - A dictionary of the BCO/extension JSON for validation. - results : dict - A dictionary that is used to collect the validation results. - - Returns - ------- - results : dict - A dictionary that is used to collect the validation results. - """ - if "object_id" in json_object: - identifier = json_object["object_id"] - - if "extension_schema" in json_object: - identifier = json_object["extension_schema"] - - validator = jsonschema.Draft7Validator(schema) - errors = validator.iter_errors(json_object) - for error in errors: - values = "".join(f"[{v}]" for v in error.path) - results[identifier]["number_of_errors"] += 1 - if len(values) == 0: - error_string = {"top_level": error.message} - else: - error_string = {values: error.message} - results[identifier]["error_detail"].append(error_string) - - return results - - -def parse_bco(bco: dict, results: dict): - """BCO Parsing for Validation - - Parameters - ---------- - bco : dict - The BCO JSON to be processed for validation. - results : dict - A dictionary to be populated with the BCO validation results - - Returns - ------- - results : dict - A dictionary with the BCO validation results - """ - - identifier = bco["object_id"] - results[identifier] = {"number_of_errors": 0, "error_detail": []} - try: - spec_version = get_schema(bco["spec_version"]) - - except AttributeError: - file_path = os.path.dirname( - os.path.abspath("api/validation_definitions/IEEE/2791object.json") - ) - - ieee = "api/validation_definitions/IEEE/2791object.json" - with open(ieee, "r", encoding="utf-8") as file: - spec_version = jsonref.load( - file, base_uri=f"file://{file_path}/", jsonschema=True - ) - - except ErrorConnecting: - file_path = os.path.dirname( - os.path.abspath("api/validation_definitions/IEEE/2791object.json") - ) - - ieee = "api/validation_definitions/IEEE/2791object.json" - with open(ieee, "r", encoding="utf-8") as file: - spec_version = jsonref.load( - file, base_uri=f"file://{file_path}/", jsonschema=True - ) - - results = validate(spec_version, bco, results) - if "extension_domain" in bco.keys(): - if isinstance(bco["extension_domain"], list) is False: - results[identifier]["extension_domain"] = { - "number_of_errors": 1, - "error_detail": ["extension_doamin invalid"], - } - - return results - for extension in bco["extension_domain"]: - extension_id = extension["extension_schema"] - results[identifier][extension_id] = { - "number_of_errors": 0, - "error_detail": [], - } - extension_schema = get_schema(extension_id) - if extension_id in extension_schema: - results[identifier][extension_id] = { - "number_of_errors": 1, - "error_detail": extension_schema, - } - else: - results[identifier] = validate( - extension_schema, extension, results[identifier] - ) - if results[identifier][extension_id]["number_of_errors"] == 0: - results[identifier][extension_id]["error_detail"] = ["Extension Valid"] - - results[identifier]["number_of_errors"] += results[identifier][ - extension_id - ]["number_of_errors"] - - return results - - -class JsonUtils: - """Class Description - ----------------- - - These are methods for checking for valid JSON objects. - """ - - # Check for a set of keys. - def check_key_set_exists(self, data_pass, key_set): - """ - Arguments - --------- - - data_pass: the 'raw' data. - - Go over each key in the key set and see if it exists - the in request data. - - Returns - ------- - - None: all keys were present - dict: items 'error' and 'associated_key' - - Assume all keys are present. - """ - missing_keys = [] - - for current_key in key_set: - - # Was this key found? - try: - - data_pass[current_key] - - except: - - # Append the error. - missing_keys.append( - { - "error": "INVALID_" + current_key.upper() + "_FAILURE", - "associated_key": current_key, - "error_message": "Key " + current_key + " not found.", - } - ) - - # Return value is based on whether or not there were errors. - if not missing_keys: - return missing_keys - - # Check that what was provided was JSON. - def check_json_exists(self, data_pass, key_set): - - # Arguments - # -------- - - # data_pass: the 'raw' request data. - # key_set: the keys to check for JSON. - - # Simply check if what was provided was actually JSON. - - # Returns - # ------- - - # None: the provided data was JSON. - # JSON_CONVERSION_ERROR: the provided data was not JSON. - - # Assume all data is JSON. - not_json = [] - - for current_key in key_set: - - # Was this key found? - try: - - # First, try to convert the payload string into a JSON object. - json.loads(s=data_pass[current_key]) - - except: - - # Append the error. - not_json.append( - {"error": "JSON_CONVERSION_ERROR", "associated_key": current_key} - ) - - # Return value is based on whether or not there were errors. - if not_json is not []: - return not_json - - def load_schema_refs(self, schema_pass): - - # Load the references for a given schema. - - # Arguments - # --------- - - # schema_pass: the schema for which we are loading references. - - # The jsonschema documentation doesn't give any examples. - # Source: https://www.programcreek.com/python/example/83374/jsonschema.RefResolver - - # Define the resolver. - resolver = jsonschema.RefResolver(referrer=schema_pass, base_uri="./") - - def check_object_against_schema(self, object_pass, schema_pass): - - # Check for schema compliance. - - # Arguments - # --------- - - # object_pass: the object being checked. - # schema_pass: the schema to check against. - - # Check the object against the provided schema. - - # Define a validator. - validator = jsonschema.Draft7Validator(schema_pass) - - # Define the errors list. - errors = validator.iter_errors(object_pass) - error_string = "" - - # We have to use a bit of tricky output re-direction, see https://www.kite.com/python/answers/how-to-redirect-print-output-to-a-variable-in-python - - old_stdout = sys.stdout - new_stdout = io.StringIO() - sys.stdout = new_stdout - - # We ALSO have to use a bit of tricky flagging to indicate - # that there were errors since generators can't use the normal len(list(generator)) idiom. - error_flag = 0 - - for e in errors: - - # There is at least 1 error. - error_flag = 1 - - # These aren't deleted when preparing the code for production... - print(e) - print("=================") - - error_string = error_string + new_stdout.getvalue() - sys.stdout = old_stdout - - # Return based on whether or not there were any errors. - if error_flag != 0: - - # Collapse and return the errors. - return error_string diff --git a/api/scripts/utilities/RequestUtils.py b/api/scripts/utilities/RequestUtils.py deleted file mode 100755 index be305c58..00000000 --- a/api/scripts/utilities/RequestUtils.py +++ /dev/null @@ -1,30 +0,0 @@ -# Utilities -import json -from . import JsonUtils - -# For checking request formats -from django.conf import settings - - -class RequestUtils: - - # Check for a valid template. - def check_request_templates(self, method, request): - - # Arguments - - # method: one of DELETE, GET, PATCH, POST - # request: the request itself - - # We need to check for a valid template. - - # Define the request templates. - request_templates = settings.REQUEST_TEMPLATES - - # Subset the templates to the ones for this request method. - request_templates = request_templates[method] - - # Validate against the templates. - return JsonUtils.JsonUtils().check_object_against_schema( - object_pass=request, schema_pass=request_templates - ) diff --git a/api/scripts/utilities/ResponseUtils.py b/api/scripts/utilities/ResponseUtils.py deleted file mode 100755 index 74b36a87..00000000 --- a/api/scripts/utilities/ResponseUtils.py +++ /dev/null @@ -1,53 +0,0 @@ -class ResponseUtils: - - # Class Description - # ----------------- - - # These are methods to help with sending back a (formatted) response. - - # Clean up the response string. - def beautify_error_set(self, errors): - - # Arguments - # --------- - - # errors: a list of lists, with each list having items in dictionary format {item_id: number, errors: {error: string, associated_key: string, error_message: string}} - - # Returns - # ------- - - # A line for each item_id and the associated errors. - - # Define a list which will be collapsed to return - # an error string. - error_string = [] - - # Go through each error set. - for item_index in range(0, len(errors)): - - # Create the error header for ID. - string_helper = ( - "Errors for item ID: " - + str(item_index) - + "\n-------------------------\n" - ) - - # Define a list of all errors which will be collapsed. - all_errors = [] - - # Now create each line of the error report. - for error_subset in errors[item_index]: - - # Append this error. - all_errors.append( - error_subset["error"] + ": " + error_subset["error_message"] - ) - - # Collapse the errors into new lines. - string_helper = string_helper + "\n".join(all_errors) - - # Append to the error string. - error_string.append(string_helper) - - # Collapse all errors for all items and return. - return "\n".join(error_string) diff --git a/api/scripts/utilities/SettingsUtils.py b/api/scripts/utilities/SettingsUtils.py deleted file mode 100755 index 83245feb..00000000 --- a/api/scripts/utilities/SettingsUtils.py +++ /dev/null @@ -1,146 +0,0 @@ -# Utilities -from . import FileUtils - -# For testing only. -import json -import os - -# For loading schema. -import jsonref - - -class SettingsUtils: - - # Class Description - # ----------------- - - # These are methods for initializing the program. - - # Create a dictionary to hold schema information. - def load_schema_local(self, search_parameters, mode): - - # search_parameters: dictionary of file search locations and file endings. - - # mode: loading for requests or for validation? - - # A more advanced version of this would set the schema $id based on - # where the schema resides, negating the need for manual entry of the $id. - - # Define a dictionary to hold top-level folder/file information. - schema = {} - - # Iterate over the search parameters. - for folder, extension in search_parameters.items(): - raw_files = FileUtils.FileUtils().get_folder_tree_by_extension( - search_folder=folder, search_extension=extension - ) - - # We now have the files, so load the schema. - - # First, initialize schema. - schema[folder] = {} - - # Now go over each path. - for current_file in raw_files["paths"]: - - # We can now set keys. - with open(current_file, mode="r") as f: - - schema[folder][current_file] = json.load(f) - - # Set the id. - schema[folder][current_file]["$id"] = "file:" + current_file - - # Now go through and define the absolute reference paths. - # We have to do this recursively as we do not know - # where we will see "$ref$. - - # The jsonschema library does NOT support relative references - # within the document, see https://json-schema.org/understanding-json-schema/structuring.html#using-id-with-ref - # Therefore, we must manually resolve the paths. This is actually - # a stronger solution, however, as it allows for referencing - # schema anywhere within the project directory as opposed to - # referencing schema within the same folder level only (as is - # the case with the relative reference examples given at the link above). - - # The schema_files are separated at the top level - # by the folders provided in search_parameters. - - # Source: https://stackoverflow.com/questions/10756427/loop-through-all-nested-dictionary-values - def set_refs(d, root_folder): - - # Set the keys. - if "$ref" in d: - - # If the reference is internal to the document, ignore it. - # Otherwise, define the reference. - if d["$ref"][0] != "#": - d["$ref"] = "file:" + os.getcwd() + "/" + root_folder + d["$ref"] - - for k, v in d.items(): - if isinstance(v, dict): - set_refs(v, root_folder) - - # Kick it back. - return d - - # A more advanced implementation would allow for referencing schema - # outside of the hosting folder. - - # Are we defining for requests or for validations? - - if mode == "requests": - - # Call set refs by each top-level folder. - for folder, contents in schema.items(): - schema[folder] = set_refs(schema[folder], root_folder="api/") - - elif mode == "validations": - - # Call set refs by each top-level folder. - for file, contents in schema["validation_definitions/"].items(): - - # Split the file name up to help construct the root folder. - file_name_split = file.split("/") - - # Where is the 'validation_definitions/' item? - vd_index = file_name_split.index("validation_definitions") - - # Collapse everything after this index but before the file name. - collapsed = ( - "/".join(file_name_split[vd_index + 1 : len(file_name_split) - 1]) - + "/" - ) - - # Set the name. - schema["validation_definitions/"][file] = set_refs( - schema["validation_definitions/"][file], - root_folder="api/validation_definitions/" + collapsed, - ) - - # Return the public-facing schema AND the processed schema? - return schema - - # Define the schema for each request type. - def define_request_schema(self, schema): - - # schema: everything found in self.load_local_schema. - - # Create a dictionary to return all the request types. - returning = {"DELETE": {}, "GET": {}, "PATCH": {}, "POST": {}} - - # Now go through the schema to locate the request information. - for k, v in schema.items(): - - # If the object title is a given request type, update returning. - if v["title"] == "DELETE": - returning["DELETE"] = v - elif v["title"] == "GET": - returning["GET"] = v - elif v["title"] == "PATCH": - returning["PATCH"] = v - elif v["title"] == "POST": - returning["POST"] = v - - # Kick it back. - return returning diff --git a/api/scripts/utilities/UserUtils.py b/api/scripts/utilities/UserUtils.py deleted file mode 100755 index 4ff5b375..00000000 --- a/api/scripts/utilities/UserUtils.py +++ /dev/null @@ -1,268 +0,0 @@ -#!/usr/bin/env python3 -"""User Utilities -Functions for operations with Users -""" - -from django.conf import settings -from django.contrib.auth.models import Group, User -from django.contrib.auth.models import Permission -from rest_framework.authtoken.models import Token - - -class UserUtils: - """ - Methods for interacting with user information. - - Attributes - ---------- - - Methods - ------- - - """ - - def check_permission_exists(self, perm): - """Does the permission exist?""" - return Permission.objects.filter(codename=perm).exists() - - def check_group_exists(self, name): - """Does the user exist?""" - return Group.objects.filter(name=name).exists() - - def check_user_exists(self, user_name): - """Does the user exist?""" - return User.objects.filter(username=user_name).exists() - - def check_user_in_group(self, user_name, group_name): - """Check if a user is in a group. - - First check that the user exists. - Then check that the groups exists. - Finally, check that the user is in - the group. - - Try/except is preferred because - the query is only run one time. - """ - - try: - user = User.objects.get(username=user_name).username - try: - group = Group.objects.get(name=group_name).name - if group_name in list( - User.objects.get(username=user_name).groups.values_list( - "name", flat=True - ) - ): - return {"user": user, "group": group} - else: - return False - except Group.DoesNotExist: - return False - except User.DoesNotExist: - return False - - def check_user_owns_prefix(self, user_name, prfx): - """Check if a user owns a prefix.""" - - return Prefix.objects.filter(owner_user=user_name, prefix=prfx).exists() - - def get_user_groups_by_token(self, token): - """Takes token to give groups. - First, get the groups for this token. - This means getting the user ID for the token, - then the username.""" - - user_id = Token.objects.get(key=token).user_id - username = User.objects.get(id=user_id) - - # Get the groups for this username (at a minimum the user - # group created when the account was created should show up). - return Group.objects.filter(user=username) - - def get_user_groups_by_username(self, user_name): - """Takes usernames to give groups. - Get the groups for this username (at a minimum the user - group created when the account was created should show up). - """ - return Group.objects.filter(user=User.objects.get(username=user_name)) - - # Get all user information. - def get_user_info(self, username): - """Get User Info - - Arguments - --------- - - username: the username. - - Returns - ------- - - A dict with the user information. - - Slight error the the django-rest-framework documentation - as we need the user id and not the username. - Source: https://www.django-rest-framework.org/api-guide/authentication/#generating-tokens - No token creation as the user has to specifically - confirm their account before a token is created - for them. - - Get the other information for this user. - Source: https://stackoverflow.com/a/48592813 - First, get the django-native User object. - Group permissions - Get each group's permissions separately, - then append them to other_info. - Try to get the permissions for the user, - split by user and group. - Define a dictionary to hold the permissions. - First, by the user. - Keep the model and the codename. - Next, by the group. - username.get_group_permissions() sheds the group - name (a design flaw in django), so we have to - invoke some inefficient logic here. - In general, django isn't good at retaining - groups and permissions in one step. - See the first comment at https://stackoverflow.com/a/27538767 - for a partial solution. - Alternatively, in models.py, we could define - our own permissions class, but this is a bit - burdensome. - Add the group name automatically. - """ - user_id = User.objects.get(username=username).pk - token = Token.objects.get(user=user_id) - other_info = { - "permissions": {}, - "account_creation": "", - "account_expiration": "", - } - - user = User.objects.get(username=username) - user_perms = {"user": [], "groups": []} - - for permission in user.user_permissions.all(): - if permission.name not in user_perms["user"]: - user_perms["user"].append(permission.name) - - for group in user.groups.all(): - if group.name not in user_perms["groups"]: - user_perms["groups"].append(group.name) - for permission in Permission.objects.filter(group=group): - if permission.name not in user_perms["user"]: - user_perms["user"].append(permission.name) - - other_info["permissions"] = user_perms - - other_info["account_creation"] = user.date_joined - return { - "hostname": settings.ALLOWED_HOSTS[0], - "human_readable_hostname": settings.HUMAN_READABLE_HOSTNAME, - "public_hostname": settings.PUBLIC_HOSTNAME, - "token": token.key, - "username": user.username, - "other_info": other_info, - } - - def prefixes_for_user(self, user_object): - """Prefix for a given user. - Simple function to return prefixes - that a user has ANY permission on. - - Recall that having any permission on - a prefix automatically means viewing - permission. - """ - - return list(set([i.split("_")[1] for i in user_object.get_all_permissions()])) - - def prefix_perms_for_user( - self, user_object, flatten=True, specific_permission=None - ): - """Prefix permissions for a given user.""" - - if specific_permission is None: - specific_permission = [ - "add", - "change", - "delete", - "view", - "draft", - "publish", - ] - - prefixed = self.get_user_info(user_object)["other_info"]["permissions"] - permissions = [] - for pre in prefixed["user"]: - permissions.append(Permission.objects.get(name=pre).codename) - - return permissions - - # # To store flattened permissions - # flat_perms = [] - - # # We only need the permissions that are specific - # # to the bco model. - - # bco_specific = { - # 'user' : { }, - # 'groups': { } - # } - - # if 'bco' in prefixed['user']: - # if flatten: - # flat_perms = prefixed['user']['bco'] - # else: - # bco_specific['user']['bco'] = prefixed['user']['bco'] - # else: - # if not flatten: - # bco_specific['user']['bco'] = { } - - # for k, v in prefixed['groups']: - # if 'bco' in prefixed['groups'][k]: - # if flatten: - # for perm in v['bco']: - # if perm not in flat_perms: - # flat_perms.append(perm) - # else: - # bco_specific['groups'][k] = { - # 'bco': v['bco'] - # } - # else: - # bco_specific['groups'][k] = { } - - # # Get the permissions. - # # Source: https://stackoverflow.com/a/952952 - - # # Flatten the permissions so that we can - # # work with them more easily. - - # # Return based on what we need. - # if flatten == True: - - # # Only unique permissions are returned. - # return flat_perms - - # elif flatten == False: - - # return bco_specific - - def user_from_request(self, request): - """Returns a user object from a request. - - Parameters - ---------- - request: rest_framework.request.Request - Django request object. - - Returns - ------- - django.contrib.auth.models.User - """ - - user_id = Token.objects.get( - key=request.META.get("HTTP_AUTHORIZATION").split(" ")[1] - ).user_id - return User.objects.get(id=user_id) diff --git a/api/scripts/utilities/__init__.py b/api/scripts/utilities/__init__.py deleted file mode 100755 index e69de29b..00000000 diff --git a/api/serializers.py b/api/serializers.py deleted file mode 100755 index f7b337cb..00000000 --- a/api/serializers.py +++ /dev/null @@ -1,27 +0,0 @@ -from rest_framework import serializers - - -# ----- Request Serializers ----- # - - -# Serializers must be abstracted in order to use abstracted models. -# Source (last solution): https://stackoverflow.com/questions/33137165/django-rest-framework-abstract-class-serializer - -# Base serializers to be inherited by each model. - -# Abstract so that any model can be used. - -# Source (4th response): https://stackoverflow.com/questions/30831731/create-a-generic-serializer-with-a-dynamic-model-in-meta - - -def getGenericSerializer(incoming_model, incoming_fields): - class GenericObjectSerializer(serializers.ModelSerializer): - - # Arguments - # incoming_table: the table to write to. - - class Meta: - model = incoming_model - fields = incoming_fields - - return GenericObjectSerializer diff --git a/api/signals.py b/api/signals.py deleted file mode 100644 index 486aaf9d..00000000 --- a/api/signals.py +++ /dev/null @@ -1,116 +0,0 @@ -# Source: https://stackoverflow.com/a/42744626/5029459 - - -def populate_models(sender, **kwargs): - """Initial DB setup""" - - from api.models import BCO - from api.model.groups import GroupInfo - from api.scripts.utilities import DbUtils - - # The BCO groups need to be created FIRST because - # models.py listens for user creation and automatically - # adds any new user to bco_drafter and bco_publishers. - from django.contrib.auth.models import Group, Permission, User - - # # Set permissions for all of the groups. - # # Source: https://stackoverflow.com/a/18797715/5029459 - # from django.contrib.auth.models import Permission - # from django.contrib.contenttypes.models import ContentType - - # Custom publishing permissions which use the model name. - # Source: https://stackoverflow.com/a/9940053/5029459 - - # Create a bco drafter and publisher if they don't exist. - - # The groups are automatically created for these two users - # in models.py - - # NO password is set here... - if User.objects.filter(username="bco_drafter").count() == 0: - User.objects.create_user(username="bco_drafter") - - if User.objects.filter(username="bco_publisher").count() == 0: - User.objects.create_user(username="bco_publisher") - - # BCO is the anon (public) prefix. - - # Note that user creation is listened for in - # models.py by associate_user_group. - - # Create the anonymous user if they don't exist. - if User.objects.filter(username="anon").count() == 0: - User.objects.create_user(username="anon") - - # Create an administrator if they don't exist. - if User.objects.filter(username="wheel").count() == 0: - User.objects.create_superuser(username="wheel", password="wheel") - - # Make bco_publisher the group owner of the prefix 'BCO'. - if BCO.objects.filter(prefix="BCO").count() == 0: - # Django wants a primary key for the Group... - group = Group.objects.get(name="bco_publisher").name - - # Django wants a primary key for the User... - user = User.objects.get(username="bco_publisher").username - - DbUtils.DbUtils().write_object( - p_app_label="api", - p_model_name="Prefix", - p_fields=["created_by", "owner_group", "owner_user", "prefix"], - p_data={ - "created_by": user, - "owner_group": group, - "owner_user": user, - "prefix": "BCO", - }, - ) - - # Create the default (non-anon, non-wheel) groups if they don't exist. - # Group administrators - if Group.objects.filter(name="group_admins").count() == 0: - Group.objects.create(name="group_admins") - GroupInfo.objects.create( - delete_members_on_group_deletion=False, - description="Group administrators", - group=Group.objects.get(name="group_admins"), - max_n_members=-1, - owner_user=User.objects.get(username="wheel"), - ) - # Create the permissions for group administrators. - for perm in ["add", "change", "delete", "view"]: - - # Permissions already come with the system, - # so just associated them. - - # Give the group administrators the permissions. - Group.objects.get(name="group_admins").permissions.add( - Permission.objects.get(codename=perm + "_group") - ) - - # Prefix administrators - if Group.objects.filter(name="prefix_admins").count() == 0: - Group.objects.create(name="prefix_admins") - GroupInfo.objects.create( - delete_members_on_group_deletion=False, - description="Prefix administrators", - group=Group.objects.get(name="prefix_admins"), - max_n_members=-1, - owner_user=User.objects.get(username="wheel"), - ) - - # Create the permissions for prefix administrators. - for perm in ["add", "change", "delete", "view"]: - - # Permissions already come with the system, - # so just associated them. - - # Give the group administrators the permissions. - Group.objects.get(name="prefix_admins").permissions.add( - Permission.objects.get(codename=perm + "_prefix") - ) - - # Associate wheel with all groups. - group = Group.objects.all() - for g in group: - User.objects.get(username="wheel").groups.add(g) diff --git a/api/templates/api/account_activation_message.html b/api/templates/api/account_activation_message.html deleted file mode 100644 index b6a8e816..00000000 --- a/api/templates/api/account_activation_message.html +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - Portal Account Activation - - -
- {% if activation_success == True %} -

Successful activation! You may close this window or open Portal in a new tab.

- - {% else %} -

Unsuccessful activation! The account may not have been requested or may have already been activated. Please request another activation e-mail on the Portal.

- {% endif %} - -
- - \ No newline at end of file diff --git a/api/urls.py b/api/urls.py deleted file mode 100755 index f3489c8d..00000000 --- a/api/urls.py +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env python3 -"""BCODB URLs - -URL access points for API -""" - -# For importing configuration files -import configparser -from django.conf import settings - -# For favicon and any other static files -from django.urls import path, re_path -from django.contrib.staticfiles.storage import staticfiles_storage -from django.views.generic.base import RedirectView - -from rest_framework import permissions -from drf_yasg.views import get_schema_view -from drf_yasg import openapi - -from api.views import ( - ApiGroupsCreate, - ApiGroupsInfo, - ApiGroupsDelete, - ApiGroupsModify, - ApiObjectsDraftsCreate, - ApiObjectsDraftsModify, - ApiObjectsDraftsPermissions, - ApiObjectsDraftsPermissionsSet, - ApiObjectsDraftsPublish, - ApiObjectsDraftsRead, - ApiObjectsPublished, - ApiObjectsSearch, - ApiObjectsToken, - ApiPrefixesCreate, - ApiPrefixesDelete, - ApiPrefixesPermissionsSet, - ApiPrefixesToken, - ApiPrefixesTokenFlat, - ApiPrefixesModify, - ApiObjectsPublish, - ApiObjectsDraftsToken, - ApiPublicDescribe, - DraftObjectId, - ObjectIdRootObjectId, - ObjectIdRootObjectIdVersion, - ValidateBCO, -) - -# Load the server config file. -server_config = configparser.ConfigParser() -server_config.read(settings.BASE_DIR + "/server.conf") - -PUBLISH_ONLY = server_config["PUBLISHONLY"]["publishonly"] -VERSION = server_config["VERSION"]["version"] - -ShcemaView = get_schema_view( - openapi.Info( - title="BioCompute Object Data Base API (BCODB API)", - default_version=VERSION, - description="A web application that can be used to create, store and " - "edit BioCompute objects based on BioCompute schema described " - "in the BCO specification document.", - terms_of_service="https://github.com/biocompute-objects/bco_api/blob/master/LICENSE", - contact=openapi.Contact(email="object.biocompute@gmail.com"), - license=openapi.License(name="MIT License"), - ), - public=True, - permission_classes=(permissions.AllowAny,), -) - -urlpatterns = [] - -# Do we have a publish-only server? -if PUBLISH_ONLY == "True": - urlpatterns = [ - re_path( - r"^api/doc(?P\.json|\.yaml)$", - ShcemaView.without_ui(cache_timeout=0), - name="schema-json", - ), - path( - "api/docs/", - ShcemaView.with_ui("swagger", cache_timeout=0), - name="schema-swagger-ui", - ), - path( - "api/redocs/", - ShcemaView.with_ui("redoc", cache_timeout=0), - name="schema-redoc", - ), - path("", ObjectIdRootObjectId.as_view()), - path( - "/", - ObjectIdRootObjectIdVersion.as_view(), - ), - path("api/objects/publish/", ApiObjectsPublish.as_view()), - path("api/objects/published/", ApiObjectsPublished.as_view()), - path("api/public/describe/", ApiPublicDescribe.as_view()), - ] - -elif PUBLISH_ONLY == "False": - urlpatterns = [ - re_path( - r"^api/docs(?P\.json|\.yaml)$", - ShcemaView.without_ui(cache_timeout=0), - name="schema-json", - ), - path( - "favicon.ico", - RedirectView.as_view(url=staticfiles_storage.url("img/favicon.ico")), - ), - path( - "api/docs/", - ShcemaView.with_ui("swagger", cache_timeout=0), - name="schema-swagger-ui", - ), - path( - "api/redocs/", - ShcemaView.with_ui("redoc", cache_timeout=0), - name="schema-redoc", - ), - path("/DRAFT", DraftObjectId.as_view()), - path( - "/", - ObjectIdRootObjectIdVersion.as_view(), - ), - path("", ObjectIdRootObjectId.as_view()), - path("api/groups/group_info/", ApiGroupsInfo.as_view()), - path("api/groups/create/", ApiGroupsCreate.as_view()), - path("api/groups/delete/", ApiGroupsDelete.as_view()), - path("api/groups/modify/", ApiGroupsModify.as_view()), - path("api/objects/drafts/create/", ApiObjectsDraftsCreate.as_view()), - path("api/objects/drafts/modify/", ApiObjectsDraftsModify.as_view()), - path("api/objects/drafts/permissions/", ApiObjectsDraftsPermissions.as_view()), - path( - "api/objects/drafts/permissions/set/", - ApiObjectsDraftsPermissionsSet.as_view(), - ), - path("api/objects/drafts/publish/", ApiObjectsDraftsPublish.as_view()), - path("api/objects/drafts/read/", ApiObjectsDraftsRead.as_view()), - path("api/objects/drafts/token/", ApiObjectsDraftsToken.as_view()), - path("api/objects/publish/", ApiObjectsPublish.as_view()), - path("api/objects/search/", ApiObjectsSearch.as_view()), - path("api/objects/validate/", ValidateBCO.as_view()), - path("api/objects/token/", ApiObjectsToken.as_view()), - path("api/objects/published/", ApiObjectsPublished.as_view()), - path("api/prefixes/create/", ApiPrefixesCreate.as_view()), - path("api/prefixes/delete/", ApiPrefixesDelete.as_view()), - path("api/prefixes/permissions/set/", ApiPrefixesPermissionsSet.as_view()), - path("api/prefixes/token/", ApiPrefixesToken.as_view()), - path("api/prefixes/token/flat/", ApiPrefixesTokenFlat.as_view()), - path("api/prefixes/modify/", ApiPrefixesModify.as_view()), - path("api/public/describe/", ApiPublicDescribe.as_view()), - ] diff --git a/api/validation_definitions/IEEE/2791object.json b/api/validation_definitions/IEEE/2791object.json deleted file mode 100755 index 7c0c25b0..00000000 --- a/api/validation_definitions/IEEE/2791object.json +++ /dev/null @@ -1,178 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/2791object.json", - "type": "object", - "title": "Base type for all IEEE-2791 Objects", - "description": "All IEEE-2791 object types must adhear to this type in order to be compliant with IEEE-2791 standard", - "required": [ - "object_id", - "spec_version", - "etag", - "provenance_domain", - "usability_domain", - "description_domain", - "execution_domain", - "io_domain" - ], - "definitions": { - "object_id": { - "type": "string", - "description": "A unique identifier that should be applied to each IEEE-2791 Object instance, generated and assigned by a IEEE-2791 database engine. IDs should never be reused" - }, - "uri": { - "type": "object", - "description": "Any of the four Resource Identifers defined at https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-7.3.5", - "additionalProperties": false, - "required": [ - "uri" - ], - "properties": { - "filename": { - "type": "string" - }, - "uri": { - "type": "string", - "format": "uri" - }, - "access_time": { - "type": "string", - "description": "Time stamp of when the request for this data was submitted", - "format": "date-time" - }, - "sha1_checksum": { - "type": "string", - "description": "output of hash function that produces a message digest", - "pattern": "[A-Za-z0-9]+" - } - } - }, - "contributor": { - "type": "object", - "description": "Contributor identifier and type of contribution (determined according to PAV ontology) is required", - "required": [ - "contribution", - "name" - ], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "Name of contributor", - "examples": [ - "Charles Darwin" - ] - }, - "affiliation": { - "type": "string", - "description": "Organization the particular contributor is affiliated with", - "examples": [ - "HMS Beagle" - ] - }, - "email": { - "type": "string", - "description": "electronic means for identification and communication purposes", - "examples": [ - "name@example.edu" - ], - "format": "email" - }, - "contribution": { - "type": "array", - "description": "type of contribution determined according to PAV ontology", - "reference": "https://doi.org/10.1186/2041-1480-4-37", - "items": { - "type": "string", - "enum": [ - "authoredBy", - "contributedBy", - "createdAt", - "createdBy", - "createdWith", - "curatedBy", - "derivedFrom", - "importedBy", - "importedFrom", - "providedBy", - "retrievedBy", - "retrievedFrom", - "sourceAccessedBy" - ] - } - }, - "orcid": { - "type": "string", - "description": "Field to record author information. ORCID identifiers allow for the author to curate their information after submission. ORCID identifiers must be valid and must have the prefix ‘https://orcid.org/’", - "examples": [ - "http://orcid.org/0000-0002-1825-0097" - ], - "format": "uri" - } - } - } - }, - "additionalProperties": false, - "properties": { - "object_id": { - "$ref": "#/definitions/object_id", - "readOnly": true - }, - "spec_version": { - "type": "string", - "description": "Version of the IEEE-2791 specification used to define this document", - "examples": [ - "https://w3id.org/ieee/ieee-2791-schema/" - ], - "readOnly": true, - "format": "uri" - }, - "etag": { - "type": "string", - "description": "See https://tools.ietf.org/html/rfc7232#section-2.1 for full description. It is recommended that the ETag be deleted or updated if the object file is changed (except in cases using weak ETags in which the entirety of the change comprises a simple re-writing of the JSON).", - "examples": [ - "5986B05969341343E77A95B4023600FC8FEF48B7E79F355E58B0B404A4F50995" - ], - "readOnly": true, - "pattern": "^([A-Za-z0-9]+)$" - }, - "provenance_domain": { - "$ref": "provenance_domain.json" - }, - "usability_domain": { - "$ref": "usability_domain.json" - }, - "extension_domain": { - "type": "array", - "description": "An optional domain that contains user-defined fields.", - "items":{ - "required":[ - "extension_schema" - ], - "additionalProperties": true, - "properties": { - "extension_schema":{ - "title": "Extension Schema", - "description": "resolving this URI should provide this extension's JSON Schema", - "type": "string", - "format": "uri" - } - } - } - }, - "description_domain": { - "$ref": "description_domain.json" - }, - "execution_domain": { - "$ref": "execution_domain.json" - }, - "parametric_domain": { - "$ref": "parametric_domain.json" - }, - "io_domain": { - "$ref": "io_domain.json" - }, - "error_domain": { - "$ref": "error_domain.json" - } - } -} \ No newline at end of file diff --git a/api/validation_definitions/IEEE/description_domain.json b/api/validation_definitions/IEEE/description_domain.json deleted file mode 100755 index f22610e8..00000000 --- a/api/validation_definitions/IEEE/description_domain.json +++ /dev/null @@ -1,165 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/description_domain.json", - "type": "object", - "title": "Description Domain", - "description": "Structured field for description of external references, the pipeline steps, and the relationship of I/O objects.", - "required": [ - "keywords", - "pipeline_steps" - ], - "properties": { - "keywords": { - "type": "array", - "description": "Keywords to aid in search-ability and description of the object.", - "items": { - "type": "string", - "description": "This field should take free text value using common biological research terminology.", - "examples": [ - "HCV1a", - "Ledipasvir", - "antiviral resistance", - "SNP", - "amino acid substitutions" - ] - } - }, - "xref": { - "type": "array", - "description": "List of the databases or ontology IDs that are cross-referenced in the IEEE-2791 Object.", - "items": { - "type": "object", - "description": "External references are stored in the form of prefixed identifiers (CURIEs). These CURIEs map directly to the URIs maintained by Identifiers.org.", - "reference": "https://identifiers.org/", - "required": [ - "namespace", - "name", - "ids", - "access_time" - ], - "properties": { - "namespace": { - "type": "string", - "description": "External resource vendor prefix", - "examples": [ - "pubchem.compound" - ] - }, - "name": { - "type": "string", - "description": "Name of external reference", - "examples": [ - "PubChem-compound" - ] - }, - "ids": { - "type": "array", - "description": "List of reference identifiers", - "items": { - "type": "string", - "description": "Reference identifier", - "examples": [ - "67505836" - ] - } - }, - "access_time": { - "type": "string", - "description": "Date and time the external reference was accessed", - "format": "date-time" - } - } - } - }, - "platform": { - "type": "array", - "description": "reference to a particular deployment of an existing platform where this IEEE-2791 Object can be reproduced.", - "items": { - "type": "string", - "examples": [ - "hive" - ] - } - }, - "pipeline_steps": { - "type": "array", - "description": "Each individual tool (or a well defined and reusable script) is represented as a step. Parallel processes are given the same step number.", - "items": { - "additionalProperties": false, - "type": "object", - "required": [ - "step_number", - "name", - "description", - "input_list", - "output_list" - ], - "properties": { - "step_number": { - "type": "integer", - "description": "Non-negative integer value representing the position of the tool in a one-dimensional representation of the pipeline." - }, - "name": { - "type": "string", - "description": "This is a recognized name of the software tool", - "examples": [ - "HIVE-hexagon" - ] - }, - "description": { - "type": "string", - "description": "Specific purpose of the tool.", - "examples": [ - "Alignment of reads to a set of references" - ] - }, - "version": { - "type": "string", - "description": "Version assigned to the instance of the tool used corresponding to the upstream release.", - "examples": [ - "1.3" - ] - }, - "prerequisite": { - "type": "array", - "description": "Reference or required prereqs", - "items": { - "type": "object", - "description": "Text value to indicate a package or prerequisite for running the tool used.", - "required": [ - "name", - "uri" - ], - "properties": { - "name": { - "type": "string", - "description": "Public searchable name for reference or prereq.", - "examples": [ - "Hepatitis C virus genotype 1" - ] - }, - "uri": { - "$ref": "2791object.json#/definitions/uri" - } - } - } - }, - "input_list": { - "type": "array", - "description": "URIs (expressed as a URN or URL) of the input files for each tool.", - "items": { - "$ref": "2791object.json#/definitions/uri" - } - }, - "output_list": { - "type": "array", - "description": "URIs (expressed as a URN or URL) of the output files for each tool.", - "items": { - "$ref": "2791object.json#/definitions/uri" - } - } - } - } - } - } -} diff --git a/api/validation_definitions/IEEE/error_domain.json b/api/validation_definitions/IEEE/error_domain.json deleted file mode 100755 index c0be62b0..00000000 --- a/api/validation_definitions/IEEE/error_domain.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/error_domain.json", - "type": "object", - "title": "Error Domain", - "description": "Fields in the Error Domain are open-ended and not restricted nor defined by the IEEE-2791 standard. It is RECOMMENDED that the keys directly under empirical_error and algorithmic_error use a full URI. Resolving the URI SHOULD give a JSON Schema or textual definition of the field. Other keys are not allowed error_domain", - "additionalProperties": false, - "required": [ - "empirical_error", - "algorithmic_error" - ], - "properties": { - "empirical_error": { - "type": "object", - "title": "Empirical Error", - "description": "empirically determined values such as limits of detectability, false positives, false negatives, statistical confidence of outcomes, etc. This can be measured by running the algorithm on multiple data samples of the usability domain or through the use of carefully designed in-silico data." - }, - "algorithmic_error": { - "type": "object", - "title": "Algorithmic Error", - "description": "descriptive of errors that originate by fuzziness of the algorithms, driven by stochastic processes, in dynamically parallelized multi-threaded executions, or in machine learning methodologies where the state of the machine can affect the outcome." - } - } -} diff --git a/api/validation_definitions/IEEE/execution_domain.json b/api/validation_definitions/IEEE/execution_domain.json deleted file mode 100755 index 858cad2d..00000000 --- a/api/validation_definitions/IEEE/execution_domain.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/execution_domain.json", - "type": "object", - "title": "Execution Domain", - "description": "The fields required for execution of the IEEE-2791 Object are herein encapsulated together in order to clearly separate information needed for deployment, software configuration, and running applications in a dependent environment", - "required": [ - "script", - "script_driver", - "software_prerequisites", - "external_data_endpoints", - "environment_variables" - ], - "additionalProperties": false, - "properties": { - "script": { - "type": "array", - "description": "points to a script object or objects that was used to perform computations for this IEEE-2791 Object instance.", - "items": { - "additionalProperties": false, - "properties": { - "uri": { - "$ref": "2791object.json#/definitions/uri" - } - } - } - }, - "script_driver": { - "type": "string", - "description": "Indication of the kind of executable that can be launched in order to perform a sequence of commands described in the script in order to run the pipelin", - "examples": [ - "hive", - "cwl-runner", - "shell" - ] - }, - "software_prerequisites": { - "type": "array", - "description": "Minimal necessary prerequisites, library, tool versions needed to successfully run the script to produce this IEEE-2791 Object.", - "items": { - "type": "object", - "description": "A necessary prerequisite, library, or tool version.", - "required": [ - "name", - "version", - "uri" - ], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "Names of software prerequisites", - "examples": [ - "HIVE-hexagon" - ] - }, - "version": { - "type": "string", - "description": "Versions of the software prerequisites", - "examples": [ - "babajanian.1" - ] - }, - "uri": { - "$ref": "2791object.json#/definitions/uri" - } - } - } - }, - "external_data_endpoints": { - "type": "array", - "description": "Minimal necessary domain-specific external data source access in order to successfully run the script to produce this IEEE-2791 Object.", - "items": { - "type": "object", - "description": "Requirement for network protocol endpoints used by a pipeline’s scripts, or other software.", - "required": [ - "name", - "url" - ], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "Description of the service that is accessed", - "examples": [ - "HIVE", - "access to e-utils" - ] - }, - "url": { - "type": "string", - "description": "The endpoint to be accessed.", - "examples": [ - "https://hive.biochemistry.gwu.edu/dna.cgi?cmd=login" - ] - } - } - } - }, - "environment_variables": { - "type": "object", - "description": "Environmental parameters that are useful to configure the execution environment on the target platform.", - "additionalProperties": false, - "patternProperties": { - "^[a-zA-Z_]+[a-zA-Z0-9_]*$": { - "type": "string" - } - } - } - } -} diff --git a/api/validation_definitions/IEEE/io_domain.json b/api/validation_definitions/IEEE/io_domain.json deleted file mode 100755 index c460e576..00000000 --- a/api/validation_definitions/IEEE/io_domain.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/io_domain.json", - "type": "object", - "title": "Input and Output Domain", - "description": "The list of global input and output files created by the computational workflow, excluding the intermediate files. Custom to every specific IEEE-2791 Object implementation, these fields are pointers to objects that can reside in the system performing the computation or any other accessible system.", - "required": [ - "input_subdomain", - "output_subdomain" - ], - "properties": { - "input_subdomain": { - "type": "array", - "title": "input_domain", - "description": "A record of the references and input files for the entire pipeline. Each type of input file is listed under a key for that type.", - "items": { - "additionalProperties": false, - "type": "object", - "required": [ - "uri" - ], - "properties": { - "uri": { - "$ref": "2791object.json#/definitions/uri" - } - } - } - }, - "output_subdomain": { - "type": "array", - "title": "output_subdomain", - "description": "A record of the outputs for the entire pipeline.", - "items": { - "type": "object", - "title": "The Items Schema", - "required": [ - "mediatype", - "uri" - ], - "properties": { - "mediatype": { - "type": "string", - "title": "mediatype", - "description": "https://www.iana.org/assignments/media-types/", - "default": "application/octet-stream", - "examples": [ - "text/csv" - ], - "pattern": "^(.*)$" - }, - "uri": { - "$ref": "2791object.json#/definitions/uri" - } - } - } - } - } -} diff --git a/api/validation_definitions/IEEE/parametric_domain.json b/api/validation_definitions/IEEE/parametric_domain.json deleted file mode 100755 index cde0644b..00000000 --- a/api/validation_definitions/IEEE/parametric_domain.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/parametric_domain.json", - "type": "array", - "title": "Parametric Domain", - "description": "This represents the list of NON-default parameters customizing the computational flow which can affect the output of the calculations. These fields can be custom to each kind of analysis and are tied to a particular pipeline implementation", - "items":{ - "required": [ - "param", - "value", - "step" - ], - "additionalProperties": false, - "properties": { - "param": { - "type": "string", - "title": "param", - "description": "Specific variables for the computational workflow", - "examples": [ - "seed" - ] - }, - "value": { - "type": "string", - "description": "Specific (non-default) parameter values for the computational workflow", - "title": "value", - "examples": [ - "14" - ] - }, - "step": { - "type": "string", - "title": "step", - "description": "Refers to the specific step of the workflow relevant to the parameters specified in 'param' and 'value'", - "examples": [ - "1" - ], - "pattern": "^(.*)$" - } - } - } -} diff --git a/api/validation_definitions/IEEE/provenance_domain.json b/api/validation_definitions/IEEE/provenance_domain.json deleted file mode 100755 index 0c1aa5ac..00000000 --- a/api/validation_definitions/IEEE/provenance_domain.json +++ /dev/null @@ -1,126 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/provenance_domain.json", - "type": "object", - "title": "Provenance Domain", - "description": "Structured field for tracking data through transformations, including contributors, reviewers, and versioning.", - "required": [ - "name", - "version", - "created", - "modified", - "contributors", - "license" - ], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "Public searchable name for IEEE-2791 Object. This public field should take free text value using common biological research terminology supporting the terminology used in the usability_domain, external references (xref), and keywords sections.", - "examples": [ - "HCV1a ledipasvir resistance SNP detection" - ] - }, - "version": { - "type": "string", - "description": "Records the versioning of this IEEE-2791 Object instance. IEEE-2791 Object Version should adhere to semantic versioning as recommended by Semantic Versioning 2.0.0.", - "reference": "https://semver.org/spec/v2.0.0.html", - "examples": [ - "2.9" - ] - }, - "review": { - "type": "array", - "description": "Description of the current verification status of an object in the review process. The unreviewed flag indicates that the object has been submitted, but no further evaluation or verification has occurred. The in-review flag indicates that verification is underway. The approved flag indicates that the IEEE-2791 Object has been verified and reviewed. The suspended flag indicates an object that was once valid is no longer considered valid. The rejected flag indicates that an error or inconsistency was detected in the IEEE-2791 Object, and it has been removed or rejected. The fields from the contributor object (described in section 2.1.10) is inherited to populate the reviewer section.", - "items": { - "type": "object", - "required": [ - "status", - "reviewer" - ], - "additionalProperties": false, - "properties": { - "date": { - "type": "string", - "format": "date-time" - }, - "reviewer": { - "$ref": "2791object.json#/definitions/contributor", - "description": "Contributer that assigns IEEE-2791 review status." - }, - "reviewer_comment": { - "type": "string", - "description": "Optional free text comment by reviewer", - "examples": [ - "Approved by research institution staff. Waiting for approval from regulator" - ] - }, - "status": { - "type": "string", - "enum": [ - "unreviewed", - "in-review", - "approved", - "rejected", - "suspended" - ], - "description": "Current verification status of the IEEE-2791 Object", - "default": "unreviewed" - } - } - } - }, - "derived_from": { - "description": "value of `ieee2791_id` field of another IEEE-2791 that this object is partially or fully derived from", - "$ref": "2791object.json#/definitions/object_id" - }, - "obsolete_after": { - "type": "string", - "description": "If the object has an expiration date, this optional field will specify that using the ‘datetime’ type described in ISO-8601 format, as clarified by W3C https://www.w3.org/TR/NOTE-datetime.", - "format": "date-time" - }, - "embargo": { - "type": "object", - "description": "If the object has a period of time during which it shall not be made public, that range can be specified using these optional fields. Using the datetime type, a start and end time are specified for the embargo.", - "additionalProperties": false, - "properties": { - "start_time": { - "type": "string", - "description": "Beginning date of embargo period.", - "format": "date-time" - }, - "end_time": { - "type": "string", - "description": "End date of embargo period.", - "format": "date-time" - } - } - }, - "created": { - "type": "string", - "description": "Date and time of the IEEE-2791 Object creation", - "readOnly": true, - "format": "date-time" - }, - "modified": { - "type": "string", - "description": "Date and time the IEEE-2791 Object was last modified", - "readOnly": true, - "format": "date-time" - }, - "contributors": { - "type": "array", - "description": "This is a list to hold contributor identifiers and a description of their type of contribution, including a field for ORCIDs to record author information, as they allow for the author to curate their information after submission. The contribution type is a choice taken from PAV ontology: provenance, authoring and versioning, which also maps to the PROV-O.", - "items": { - "$ref": "2791object.json#/definitions/contributor" - } - }, - "license": { - "type": "string", - "description": "Creative Commons license or other license information (text) space. The default or recommended license can be Attribution 4.0 International as shown in example", - "examples": [ - "https://spdx.org/licenses/CC-BY-4.0.html" - ] - } - } -} diff --git a/api/validation_definitions/IEEE/usability_domain.json b/api/validation_definitions/IEEE/usability_domain.json deleted file mode 100755 index 54e936e4..00000000 --- a/api/validation_definitions/IEEE/usability_domain.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/usability_domain.json", - "type": "array", - "title": "Usability Domain", - "description": "Author-defined usability domain of the IEEE-2791 Object. This field is to aid in search-ability and provide a specific description of the function of the object.", - "items": { - "type": "string", - "description": "Free text values that can be used to provide scientific reasoning and purpose for the experiment", - "examples": [ - "Identify baseline single nucleotide polymorphisms SNPs [SO:0000694], insertions [so:SO:0000667], and deletions [so:SO:0000045] that correlate with reduced ledipasvir [pubchem.compound:67505836] antiviral drug efficacy in Hepatitis C virus subtype 1 [taxonomy:31646]", - "Identify treatment emergent amino acid substitutions [so:SO:0000048] that correlate with antiviral drug treatment failure", - "Determine whether the treatment emergent amino acid substitutions [so:SO:0000048] identified correlate with treatment failure involving other drugs against the same virus" - ] - } -} diff --git a/api/validation_definitions/IEEE_sub/IEEE2791-2020.schema b/api/validation_definitions/IEEE_sub/IEEE2791-2020.schema deleted file mode 100755 index 2506be6b..00000000 --- a/api/validation_definitions/IEEE_sub/IEEE2791-2020.schema +++ /dev/null @@ -1,178 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/2791object.json", - "type": "object", - "title": "Base type for all IEEE-2791 Objects", - "description": "All IEEE-2791 object types must adhear to this type in order to be compliant with IEEE-2791 standard", - "required": [ - "object_id", - "spec_version", - "etag", - "provenance_domain", - "usability_domain", - "description_domain", - "execution_domain", - "io_domain" - ], - "definitions": { - "object_id": { - "type": "string", - "description": "A unique identifier that should be applied to each IEEE-2791 Object instance, generated and assigned by a IEEE-2791 database engine. IDs should never be reused" - }, - "uri": { - "type": "object", - "description": "Any of the four Resource Identifers defined at https://tools.ietf.org/html/draft-handrews-json-schema-validation-01#section-7.3.5", - "additionalProperties": false, - "required": [ - "uri" - ], - "properties": { - "filename": { - "type": "string" - }, - "uri": { - "type": "string", - "format": "uri" - }, - "access_time": { - "type": "string", - "description": "Time stamp of when the request for this data was submitted", - "format": "date-time" - }, - "sha1_checksum": { - "type": "string", - "description": "output of hash function that produces a message digest", - "pattern": "[A-Za-z0-9]+" - } - } - }, - "contributor": { - "type": "object", - "description": "Contributor identifier and type of contribution (determined according to PAV ontology) is required", - "required": [ - "contribution", - "name" - ], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "Name of contributor", - "examples": [ - "Charles Darwin" - ] - }, - "affiliation": { - "type": "string", - "description": "Organization the particular contributor is affiliated with", - "examples": [ - "HMS Beagle" - ] - }, - "email": { - "type": "string", - "description": "electronic means for identification and communication purposes", - "examples": [ - "name@example.edu" - ], - "format": "email" - }, - "contribution": { - "type": "array", - "description": "type of contribution determined according to PAV ontology", - "reference": "https://doi.org/10.1186/2041-1480-4-37", - "items": { - "type": "string", - "enum": [ - "authoredBy", - "contributedBy", - "createdAt", - "createdBy", - "createdWith", - "curatedBy", - "derivedFrom", - "importedBy", - "importedFrom", - "providedBy", - "retrievedBy", - "retrievedFrom", - "sourceAccessedBy" - ] - } - }, - "orcid": { - "type": "string", - "description": "Field to record author information. ORCID identifiers allow for the author to curate their information after submission. ORCID identifiers must be valid and must have the prefix ‘https://orcid.org/’", - "examples": [ - "http://orcid.org/0000-0002-1825-0097" - ], - "format": "uri" - } - } - } - }, - "additionalProperties": false, - "properties": { - "object_id": { - "$ref": "#/definitions/object_id", - "readOnly": true - }, - "spec_version": { - "type": "string", - "description": "Version of the IEEE-2791 specification used to define this document", - "examples": [ - "https://w3id.org/ieee/ieee-2791-schema/" - ], - "readOnly": true, - "format": "uri" - }, - "etag": { - "type": "string", - "description": "See https://tools.ietf.org/html/rfc7232#section-2.1 for full description. It is recommended that the ETag be deleted or updated if the object file is changed (except in cases using weak ETags in which the entirety of the change comprises a simple re-writing of the JSON).", - "examples": [ - "5986B05969341343E77A95B4023600FC8FEF48B7E79F355E58B0B404A4F50995" - ], - "readOnly": true, - "pattern": "^([A-Za-z0-9]+)$" - }, - "provenance_domain": { - "$ref": "domains/provenance_domain.json" - }, - "usability_domain": { - "$ref": "domains/usability_domain.json" - }, - "extension_domain": { - "type": "array", - "description": "An optional domain that contains user-defined fields.", - "items":{ - "required":[ - "extension_schema" - ], - "additionalProperties": true, - "properties": { - "extension_schema":{ - "title": "Extension Schema", - "description": "resolving this URI should provide this extension's JSON Schema", - "type": "string", - "format": "uri" - } - } - } - }, - "description_domain": { - "$ref": "domains/description_domain.json" - }, - "execution_domain": { - "$ref": "domains/execution_domain.json" - }, - "parametric_domain": { - "$ref": "domains/parametric_domain.json" - }, - "io_domain": { - "$ref": "domains/io_domain.json" - }, - "error_domain": { - "$ref": "domains/error_domain.json" - } - } -} \ No newline at end of file diff --git a/api/validation_definitions/IEEE_sub/domains/description_domain.json b/api/validation_definitions/IEEE_sub/domains/description_domain.json deleted file mode 100755 index a9eac520..00000000 --- a/api/validation_definitions/IEEE_sub/domains/description_domain.json +++ /dev/null @@ -1,165 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/description_domain.json", - "type": "object", - "title": "Description Domain", - "description": "Structured field for description of external references, the pipeline steps, and the relationship of I/O objects.", - "required": [ - "keywords", - "pipeline_steps" - ], - "properties": { - "keywords": { - "type": "array", - "description": "Keywords to aid in search-ability and description of the object.", - "items": { - "type": "string", - "description": "This field should take free text value using common biological research terminology.", - "examples": [ - "HCV1a", - "Ledipasvir", - "antiviral resistance", - "SNP", - "amino acid substitutions" - ] - } - }, - "xref": { - "type": "array", - "description": "List of the databases or ontology IDs that are cross-referenced in the IEEE-2791 Object.", - "items": { - "type": "object", - "description": "External references are stored in the form of prefixed identifiers (CURIEs). These CURIEs map directly to the URIs maintained by Identifiers.org.", - "reference": "https://identifiers.org/", - "required": [ - "namespace", - "name", - "ids", - "access_time" - ], - "properties": { - "namespace": { - "type": "string", - "description": "External resource vendor prefix", - "examples": [ - "pubchem.compound" - ] - }, - "name": { - "type": "string", - "description": "Name of external reference", - "examples": [ - "PubChem-compound" - ] - }, - "ids": { - "type": "array", - "description": "List of reference identifiers", - "items": { - "type": "string", - "description": "Reference identifier", - "examples": [ - "67505836" - ] - } - }, - "access_time": { - "type": "string", - "description": "Date and time the external reference was accessed", - "format": "date-time" - } - } - } - }, - "platform": { - "type": "array", - "description": "reference to a particular deployment of an existing platform where this IEEE-2791 Object can be reproduced.", - "items": { - "type": "string", - "examples": [ - "hive" - ] - } - }, - "pipeline_steps": { - "type": "array", - "description": "Each individual tool (or a well defined and reusable script) is represented as a step. Parallel processes are given the same step number.", - "items": { - "additionalProperties": false, - "type": "object", - "required": [ - "step_number", - "name", - "description", - "input_list", - "output_list" - ], - "properties": { - "step_number": { - "type": "integer", - "description": "Non-negative integer value representing the position of the tool in a one-dimensional representation of the pipeline." - }, - "name": { - "type": "string", - "description": "This is a recognized name of the software tool", - "examples": [ - "HIVE-hexagon" - ] - }, - "description": { - "type": "string", - "description": "Specific purpose of the tool.", - "examples": [ - "Alignment of reads to a set of references" - ] - }, - "version": { - "type": "string", - "description": "Version assigned to the instance of the tool used corresponding to the upstream release.", - "examples": [ - "1.3" - ] - }, - "prerequisite": { - "type": "array", - "description": "Reference or required prereqs", - "items": { - "type": "object", - "description": "Text value to indicate a package or prerequisite for running the tool used.", - "required": [ - "name", - "uri" - ], - "properties": { - "name": { - "type": "string", - "description": "Public searchable name for reference or prereq.", - "examples": [ - "Hepatitis C virus genotype 1" - ] - }, - "uri": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/uri" - } - } - } - }, - "input_list": { - "type": "array", - "description": "URIs (expressed as a URN or URL) of the input files for each tool.", - "items": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/uri" - } - }, - "output_list": { - "type": "array", - "description": "URIs (expressed as a URN or URL) of the output files for each tool.", - "items": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/uri" - } - } - } - } - } - } -} diff --git a/api/validation_definitions/IEEE_sub/domains/error_domain.json b/api/validation_definitions/IEEE_sub/domains/error_domain.json deleted file mode 100755 index c0be62b0..00000000 --- a/api/validation_definitions/IEEE_sub/domains/error_domain.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/error_domain.json", - "type": "object", - "title": "Error Domain", - "description": "Fields in the Error Domain are open-ended and not restricted nor defined by the IEEE-2791 standard. It is RECOMMENDED that the keys directly under empirical_error and algorithmic_error use a full URI. Resolving the URI SHOULD give a JSON Schema or textual definition of the field. Other keys are not allowed error_domain", - "additionalProperties": false, - "required": [ - "empirical_error", - "algorithmic_error" - ], - "properties": { - "empirical_error": { - "type": "object", - "title": "Empirical Error", - "description": "empirically determined values such as limits of detectability, false positives, false negatives, statistical confidence of outcomes, etc. This can be measured by running the algorithm on multiple data samples of the usability domain or through the use of carefully designed in-silico data." - }, - "algorithmic_error": { - "type": "object", - "title": "Algorithmic Error", - "description": "descriptive of errors that originate by fuzziness of the algorithms, driven by stochastic processes, in dynamically parallelized multi-threaded executions, or in machine learning methodologies where the state of the machine can affect the outcome." - } - } -} diff --git a/api/validation_definitions/IEEE_sub/domains/execution_domain.json b/api/validation_definitions/IEEE_sub/domains/execution_domain.json deleted file mode 100755 index 26a7930b..00000000 --- a/api/validation_definitions/IEEE_sub/domains/execution_domain.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/execution_domain.json", - "type": "object", - "title": "Execution Domain", - "description": "The fields required for execution of the IEEE-2791 Object are herein encapsulated together in order to clearly separate information needed for deployment, software configuration, and running applications in a dependent environment", - "required": [ - "script", - "script_driver", - "software_prerequisites", - "external_data_endpoints", - "environment_variables" - ], - "additionalProperties": false, - "properties": { - "script": { - "type": "array", - "description": "points to a script object or objects that was used to perform computations for this IEEE-2791 Object instance.", - "items": { - "additionalProperties": false, - "properties": { - "uri": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/uri" - } - } - } - }, - "script_driver": { - "type": "string", - "description": "Indication of the kind of executable that can be launched in order to perform a sequence of commands described in the script in order to run the pipelin", - "examples": [ - "hive", - "cwl-runner", - "shell" - ] - }, - "software_prerequisites": { - "type": "array", - "description": "Minimal necessary prerequisites, library, tool versions needed to successfully run the script to produce this IEEE-2791 Object.", - "items": { - "type": "object", - "description": "A necessary prerequisite, library, or tool version.", - "required": [ - "name", - "version", - "uri" - ], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "Names of software prerequisites", - "examples": [ - "HIVE-hexagon" - ] - }, - "version": { - "type": "string", - "description": "Versions of the software prerequisites", - "examples": [ - "babajanian.1" - ] - }, - "uri": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/uri" - } - } - } - }, - "external_data_endpoints": { - "type": "array", - "description": "Minimal necessary domain-specific external data source access in order to successfully run the script to produce this IEEE-2791 Object.", - "items": { - "type": "object", - "description": "Requirement for network protocol endpoints used by a pipeline’s scripts, or other software.", - "required": [ - "name", - "url" - ], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "Description of the service that is accessed", - "examples": [ - "HIVE", - "access to e-utils" - ] - }, - "url": { - "type": "string", - "description": "The endpoint to be accessed.", - "examples": [ - "https://hive.biochemistry.gwu.edu/dna.cgi?cmd=login" - ] - } - } - } - }, - "environment_variables": { - "type": "object", - "description": "Environmental parameters that are useful to configure the execution environment on the target platform.", - "additionalProperties": false, - "patternProperties": { - "^[a-zA-Z_]+[a-zA-Z0-9_]*$": { - "type": "string" - } - } - } - } -} diff --git a/api/validation_definitions/IEEE_sub/domains/io_domain.json b/api/validation_definitions/IEEE_sub/domains/io_domain.json deleted file mode 100755 index 1f163e75..00000000 --- a/api/validation_definitions/IEEE_sub/domains/io_domain.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/io_domain.json", - "type": "object", - "title": "Input and Output Domain", - "description": "The list of global input and output files created by the computational workflow, excluding the intermediate files. Custom to every specific IEEE-2791 Object implementation, these fields are pointers to objects that can reside in the system performing the computation or any other accessible system.", - "required": [ - "input_subdomain", - "output_subdomain" - ], - "properties": { - "input_subdomain": { - "type": "array", - "title": "input_domain", - "description": "A record of the references and input files for the entire pipeline. Each type of input file is listed under a key for that type.", - "items": { - "additionalProperties": false, - "type": "object", - "required": [ - "uri" - ], - "properties": { - "uri": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/uri" - } - } - } - }, - "output_subdomain": { - "type": "array", - "title": "output_subdomain", - "description": "A record of the outputs for the entire pipeline.", - "items": { - "type": "object", - "title": "The Items Schema", - "required": [ - "mediatype", - "uri" - ], - "properties": { - "mediatype": { - "type": "string", - "title": "mediatype", - "description": "https://www.iana.org/assignments/media-types/", - "default": "application/octet-stream", - "examples": [ - "text/csv" - ], - "pattern": "^(.*)$" - }, - "uri": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/uri" - } - } - } - } - } -} diff --git a/api/validation_definitions/IEEE_sub/domains/parametric_domain.json b/api/validation_definitions/IEEE_sub/domains/parametric_domain.json deleted file mode 100755 index cde0644b..00000000 --- a/api/validation_definitions/IEEE_sub/domains/parametric_domain.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/parametric_domain.json", - "type": "array", - "title": "Parametric Domain", - "description": "This represents the list of NON-default parameters customizing the computational flow which can affect the output of the calculations. These fields can be custom to each kind of analysis and are tied to a particular pipeline implementation", - "items":{ - "required": [ - "param", - "value", - "step" - ], - "additionalProperties": false, - "properties": { - "param": { - "type": "string", - "title": "param", - "description": "Specific variables for the computational workflow", - "examples": [ - "seed" - ] - }, - "value": { - "type": "string", - "description": "Specific (non-default) parameter values for the computational workflow", - "title": "value", - "examples": [ - "14" - ] - }, - "step": { - "type": "string", - "title": "step", - "description": "Refers to the specific step of the workflow relevant to the parameters specified in 'param' and 'value'", - "examples": [ - "1" - ], - "pattern": "^(.*)$" - } - } - } -} diff --git a/api/validation_definitions/IEEE_sub/domains/provenance_domain.json b/api/validation_definitions/IEEE_sub/domains/provenance_domain.json deleted file mode 100755 index c2406158..00000000 --- a/api/validation_definitions/IEEE_sub/domains/provenance_domain.json +++ /dev/null @@ -1,126 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/provenance_domain.json", - "type": "object", - "title": "Provenance Domain", - "description": "Structured field for tracking data through transformations, including contributors, reviewers, and versioning.", - "required": [ - "name", - "version", - "created", - "modified", - "contributors", - "license" - ], - "additionalProperties": false, - "properties": { - "name": { - "type": "string", - "description": "Public searchable name for IEEE-2791 Object. This public field should take free text value using common biological research terminology supporting the terminology used in the usability_domain, external references (xref), and keywords sections.", - "examples": [ - "HCV1a ledipasvir resistance SNP detection" - ] - }, - "version": { - "type": "string", - "description": "Records the versioning of this IEEE-2791 Object instance. IEEE-2791 Object Version should adhere to semantic versioning as recommended by Semantic Versioning 2.0.0.", - "reference": "https://semver.org/spec/v2.0.0.html", - "examples": [ - "2.9" - ] - }, - "review": { - "type": "array", - "description": "Description of the current verification status of an object in the review process. The unreviewed flag indicates that the object has been submitted, but no further evaluation or verification has occurred. The in-review flag indicates that verification is underway. The approved flag indicates that the IEEE-2791 Object has been verified and reviewed. The suspended flag indicates an object that was once valid is no longer considered valid. The rejected flag indicates that an error or inconsistency was detected in the IEEE-2791 Object, and it has been removed or rejected. The fields from the contributor object (described in section 2.1.10) is inherited to populate the reviewer section.", - "items": { - "type": "object", - "required": [ - "status", - "reviewer" - ], - "additionalProperties": false, - "properties": { - "date": { - "type": "string", - "format": "date-time" - }, - "reviewer": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/contributor", - "description": "Contributer that assigns IEEE-2791 review status." - }, - "reviewer_comment": { - "type": "string", - "description": "Optional free text comment by reviewer", - "examples": [ - "Approved by research institution staff. Waiting for approval from regulator" - ] - }, - "status": { - "type": "string", - "enum": [ - "unreviewed", - "in-review", - "approved", - "rejected", - "suspended" - ], - "description": "Current verification status of the IEEE-2791 Object", - "default": "unreviewed" - } - } - } - }, - "derived_from": { - "description": "value of `ieee2791_id` field of another IEEE-2791 that this object is partially or fully derived from", - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/object_id" - }, - "obsolete_after": { - "type": "string", - "description": "If the object has an expiration date, this optional field will specify that using the ‘datetime’ type described in ISO-8601 format, as clarified by W3C https://www.w3.org/TR/NOTE-datetime.", - "format": "date-time" - }, - "embargo": { - "type": "object", - "description": "If the object has a period of time during which it shall not be made public, that range can be specified using these optional fields. Using the datetime type, a start and end time are specified for the embargo.", - "additionalProperties": false, - "properties": { - "start_time": { - "type": "string", - "description": "Beginning date of embargo period.", - "format": "date-time" - }, - "end_time": { - "type": "string", - "description": "End date of embargo period.", - "format": "date-time" - } - } - }, - "created": { - "type": "string", - "description": "Date and time of the IEEE-2791 Object creation", - "readOnly": true, - "format": "date-time" - }, - "modified": { - "type": "string", - "description": "Date and time the IEEE-2791 Object was last modified", - "readOnly": true, - "format": "date-time" - }, - "contributors": { - "type": "array", - "description": "This is a list to hold contributor identifiers and a description of their type of contribution, including a field for ORCIDs to record author information, as they allow for the author to curate their information after submission. The contribution type is a choice taken from PAV ontology: provenance, authoring and versioning, which also maps to the PROV-O.", - "items": { - "$ref": "IEEE_sub/IEEE2791-2020.schema#/definitions/contributor" - } - }, - "license": { - "type": "string", - "description": "Creative Commons license or other license information (text) space. The default or recommended license can be Attribution 4.0 International as shown in example", - "examples": [ - "https://spdx.org/licenses/CC-BY-4.0.html" - ] - } - } -} diff --git a/api/validation_definitions/IEEE_sub/domains/usability_domain.json b/api/validation_definitions/IEEE_sub/domains/usability_domain.json deleted file mode 100755 index 54e936e4..00000000 --- a/api/validation_definitions/IEEE_sub/domains/usability_domain.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://w3id.org/ieee/ieee-2791-schema/usability_domain.json", - "type": "array", - "title": "Usability Domain", - "description": "Author-defined usability domain of the IEEE-2791 Object. This field is to aid in search-ability and provide a specific description of the function of the object.", - "items": { - "type": "string", - "description": "Free text values that can be used to provide scientific reasoning and purpose for the experiment", - "examples": [ - "Identify baseline single nucleotide polymorphisms SNPs [SO:0000694], insertions [so:SO:0000667], and deletions [so:SO:0000045] that correlate with reduced ledipasvir [pubchem.compound:67505836] antiviral drug efficacy in Hepatitis C virus subtype 1 [taxonomy:31646]", - "Identify treatment emergent amino acid substitutions [so:SO:0000048] that correlate with antiviral drug treatment failure", - "Determine whether the treatment emergent amino acid substitutions [so:SO:0000048] identified correlate with treatment failure involving other drugs against the same virus" - ] - } -} diff --git a/api/validation_definitions/uri_external b/api/validation_definitions/uri_external deleted file mode 100755 index e69de29b..00000000 diff --git a/api/views.py b/api/views.py deleted file mode 100755 index bc49a237..00000000 --- a/api/views.py +++ /dev/null @@ -1,1640 +0,0 @@ -#!/usr/bin/env python3 -"""BCODB views - -Django views for BCODB API -""" - -from drf_yasg import openapi -from drf_yasg.utils import swagger_auto_schema -from rest_framework import status -from rest_framework.permissions import IsAuthenticated -from rest_framework.response import Response -from rest_framework.views import APIView -from api.permissions import RequestorInPrefixAdminsGroup -from api.scripts.method_specific.GET_draft_object_by_id import get_draft_object_by_id -from api.scripts.method_specific.GET_published_object_by_id import ( - GET_published_object_by_id, -) -from api.scripts.method_specific.GET_published_object_by_id_with_version import ( - GET_published_object_by_id_with_version, -) -from api.scripts.method_specific.POST_validate_payload_against_schema import ( - post_validate_bco, -) - -# Request-specific methods -from api.model.groups import ( - post_api_groups_modify, - post_api_groups_delete, - post_api_groups_info, - post_api_groups_create, -) -from api.model.prefix import ( - post_api_prefixes_create, - post_api_prefixes_delete, - post_api_prefixes_modify, - post_api_prefixes_permissions_set, - post_api_prefixes_token_flat, -) - -from api.scripts.method_specific.POST_api_objects_drafts_create import ( - post_api_objects_drafts_create, -) -from api.scripts.method_specific.POST_api_objects_drafts_modify import ( - post_api_objects_drafts_modify, -) -from api.scripts.method_specific.POST_api_objects_drafts_permissions import ( - POST_api_objects_drafts_permissions, -) -from api.scripts.method_specific.POST_api_objects_drafts_permissions_set import ( - POST_api_objects_drafts_permissions_set, -) -from api.scripts.method_specific.POST_api_objects_drafts_publish import ( - post_api_objects_drafts_publish, -) -from api.scripts.method_specific.POST_api_objects_drafts_read import ( - POST_api_objects_drafts_read, -) -from api.scripts.method_specific.POST_api_objects_drafts_token import ( - POST_api_objects_drafts_token, -) -from api.scripts.method_specific.POST_api_objects_publish import ( - post_api_objects_publish, -) -from api.scripts.method_specific.POST_api_objects_published import ( - POST_api_objects_published, -) -from api.scripts.method_specific.POST_api_objects_search import post_api_objects_search -from api.scripts.method_specific.POST_api_objects_token import POST_api_objects_token - -# For helper functions -from api.scripts.utilities import UserUtils - - -################################################################################################ -# NOTES -################################################################################################ -# Permissions -# We can't use the examples given in -# https://www.django-rest-framework.org/api-guide/permissions/#djangomodelpermissions -# because our permissions system is not tied to -# the request type (DELETE, GET, PATCH, POST). -################################################################################################ - - -# TODO: This is a helper function so might want to go somewhere else -def check_post_and_process(request, PostFunction) -> Response: - """ - Helper function to perform the verification that a request is a POST and to then - make a call to the callback function with the request body. - - Returns: An HTTP Response Object - """ - # checked is suppressed for the milestone. - - # Check the request - # checked = RequestUtils.RequestUtils().check_request_templates( - # method = 'POST', - # request = request.data - # ) - - checked = None - if checked is None: - # Pass the request to the handling function. - return PostFunction(request) - else: - return Response(data=checked, status=status.HTTP_400_BAD_REQUEST) - - -# TODO: This is currently commented out; need to see what checking is meant to do -def check_get(request) -> Response: - """ - Helper function to perform the verification that a request is a GET - - Returns: An HTTP Response Object - """ - # Check the request - # checked = RequestUtils.RequestUtils().check_request_templates( - # method = 'GET', - # request = request.data - # ) - - # Placeholder - return Response(status=status.HTTP_200_OK) - -class ApiGroupsInfo(APIView): - """Group Info - - -------------------- - - This API call checks a user's groups and permissions in ths system. The User token is - required. - - ```JSON - { - "POST_api_groups_info": { - "names": [ - "bco_drafter", "bco_publisher" - ] - } - } - ``` - """ - - POST_api_groups_info_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["names"], - properties={ - "names": openapi.Schema( - type=openapi.TYPE_ARRAY, - description="List of groups to delete.", - items=openapi.Schema(type=openapi.TYPE_STRING), - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Group Information Schema", - description="API call checks a user's groups and permissions" - " in this system.", - required=["POST_api_groups_info"], - properties={"POST_api_groups_info": POST_api_groups_info_schema}, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Success. Group permissions returned", - 400: "Bad request. Request is not formatted correctly.", - 403: "Forbidden. Invalid token or authentication credentials were not provided.", - }, - tags=["Group Management"], - ) - def post(self, request): - return check_post_and_process(request, post_api_groups_info) - - -class ApiGroupsCreate(APIView): - """Create group - - -------------------- - This API call creates a BCO group in ths system. The name of the group is - required but all other parameters are optional. - """ - - POST_api_groups_create_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["name"], - properties={ - "name": openapi.Schema( - type=openapi.TYPE_STRING, description="The name of the group to create" - ), - "usernames": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=openapi.Schema(type=openapi.TYPE_STRING), - description="List of users to add to the group.", - ), - "delete_members_on_group_deletion": openapi.Schema( - type=openapi.TYPE_BOOLEAN, - description="Delete the members of the group if the group is deleted.", - ), - "description": openapi.Schema( - type=openapi.TYPE_STRING, description="Description of the group." - ), - "expiration": openapi.Schema( - type=openapi.TYPE_STRING, - description="Expiration date and time of the group. Note, " - "this needs to be in a Python DateTime compatible format.", - ), - "max_n_members": openapi.Schema( - type=openapi.TYPE_INTEGER, - description="Maximum number of members to allow in the group.", - ), - }, - description="Groups to create along with associated information.", - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Group Creation Schema", - description="Parameters that are supported when trying to create a group.", - required=["POST_api_groups_create"], - properties={ - "POST_api_groups_create": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_groups_create_schema, - description="Groups and actions to take on them.", - ) - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Group creation is successful.", - 400: "Bad request.", - 403: "Invalid token.", - 409: "Group conflict. There is already a group with this name.", - }, - tags=["Group Management"], - ) - def post(self, request): - """ "Post?""" - return check_post_and_process(request, post_api_groups_create) - - -class ApiGroupsDelete(APIView): - """ - Delete group - - -------------------- - - Deletes one or more groups from the BCO API database. Even if not all - requests are successful, the API can return success. If a 300 response is - returned then the caller should loop through the response to understand - which deletes failed and why. - """ - - POST_api_groups_delete_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["names"], - properties={ - "names": openapi.Schema( - type=openapi.TYPE_ARRAY, - description="List of groups to delete.", - items=openapi.Schema(type=openapi.TYPE_STRING), - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Group Deletion Schema", - description="Parameters that are supported when trying to delete " - "one or more groups.", - required=["POST_api_groups_delete"], - properties={"POST_api_groups_delete": POST_api_groups_delete_schema}, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Group deletion is successful.", - 300: "Mixture of successes and failures in a bulk delete.", - 400: "Bad request.", - 403: "Invalid token.", - 404: "Missing optional bulk parameters, this request has no effect.", - 418: "More than the expected one group was deleted.", - }, - tags=["Group Management"], - ) - def post(self, request): - return check_post_and_process(request, post_api_groups_delete) - - -class ApiGroupsModify(APIView): - """Bulk Modify groups - - -------------------- - Modifies one or more existing BCO groups. An array of objects are taken - where each of these objects represents the instructions to modify a - specific group. Within each of these objects, along with the group name, - the set of modifications to that group exists in a dictionary indecated by - the following 'actions': 'rename', 'redescribe', 'add_users', - 'remove_users', and 'owner_user'. - - Example request body which encodes renaming a group named `myGroup1` to - `myGroup2`: - ``` - "POST_api_groups_modify": [ - { - "name": "myGroup1", - "actions": { - "rename": "myGroup2" - } - } - ] - ``` - - More than one action can be included for a specific group name, and more - than one group can be modified with a request. To modify multiple groups - they must each have their own request object. - """ - - POST_api_groups_modify_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["name"], - properties={ - "name": openapi.Schema( - type=openapi.TYPE_STRING, description="The name of the group to modify" - ), - "actions": openapi.Schema( - type=openapi.TYPE_OBJECT, - properties={ - "rename": openapi.Schema(type=openapi.TYPE_STRING, description=""), - "redescribe": openapi.Schema( - type=openapi.TYPE_STRING, - description="Change the description of the group to this.", - ), - "owner_user": openapi.Schema( - type=openapi.TYPE_STRING, - description="Change the owner of the group to this user.", - ), - "remove_users": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=openapi.Schema(type=openapi.TYPE_STRING), - description="Users to remove from the group.", - ), - "disinherit_from": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=openapi.Schema(type=openapi.TYPE_STRING), - description="Groups to disinherit permissions from.", - ), - "add_users": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=openapi.Schema(type=openapi.TYPE_STRING), - description="Users to add to the group.", - ), - "inherit_from": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=openapi.Schema(type=openapi.TYPE_STRING), - description="Groups to inherit permissions from.", - ), - }, - description="Actions to take upon the group.", - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Group Modification Schema", - description="Parameters that are supported when trying to modify one or more groups.", - required=["POST_api_groups_modify"], - properties={ - "POST_api_groups_modify": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_groups_modify_schema, - description="Groups and actions to take on them.", - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Group modification is successful.", - 400: "Bad request.", - 403: "Insufficient privileges.", - }, - tags=["Group Management"], - ) - def post(self, request): - return check_post_and_process(request, post_api_groups_modify) - -class ApiObjectsDraftsCreate(APIView): - """ - Create BCO Draft - - -------------------- - - Creates a new BCO draft object. - """ - - POST_api_objects_draft_create_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["prefix", "owner_group", "schema", "contents"], - properties={ - "prefix": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Prefix to use" - ), - "owner_group": openapi.Schema( - type=openapi.TYPE_STRING, description="Group which owns the BCO draft." - ), - "object_id": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Object ID." - ), - "schema": openapi.Schema( - type=openapi.TYPE_STRING, description="Which schema the BCO satisfies." - ), - "contents": openapi.Schema( - type=openapi.TYPE_OBJECT, - additional_properties=True, - description="Contents of the BCO.", - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Create BCO Draft Schema", - description="Parameters that are supported when trying to create a draft BCO.", - required=["POST_api_objects_draft_create"], - properties={ - "POST_api_objects_draft_create": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_objects_draft_create_schema, - description="BCO Drafts to create.", - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Creation of BCO draft is successful.", - 300: "Some requests failed and some succeeded.", - 400: "Bad request.", - 403: "Invalid token.", - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_objects_drafts_create) - - -class ApiObjectsDraftsModify(APIView): - """ - Bulk Modify BCO Objects - - -------------------- - - Modifies one or more BCO objects. The BCO objects must be a draft in order - to be modifiable. WARNING: The contents of the BCO will be replaced with - the new contents provided in the request body. - """ - - POST_api_objects_drafts_modify_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["object_id", "contents"], - properties={ - "object_id": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Object ID." - ), - "contents": openapi.Schema( - type=openapi.TYPE_OBJECT, - description="Contents of the BCO.", - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Modify BCO Draft Schema", - description="Parameters that are supported when trying to modify a draft BCO.", - required=["POST_api_objects_drafts_modify"], - properties={ - "POST_api_objects_drafts_modify": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_objects_drafts_modify_schema, - description="BCO Drafts to modify.", - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "All modifications of BCO drafts are successful.", - 207: "Some or all BCO modifications failed. Each object submitted" - " will have it's own response object with it's own status" - " code and message:\n" - "200: Success. The object with ID <'object_id'> was" - "updated.\n" - "400: Bad request. The request could not be processed with" - "the parameters provided.\n " - "401: Prefix unauthorized. The token provided does not " - "have draft permissions for this prefix <'prefix'>.\n" - "404: Not Found. The object ID <'object_id'> was not found " - "on the server.\n" - "409: Conflict. The provided object_id <'object_id'> does " - "not match the saved draft object_id <'object_id'>. " - "Once a draft is created you can not change the " - "object_id.\n", - 400: "Bad request.", - 403: "Forbidden. Authentication credentials were not provided, or the token is invalid." - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_objects_drafts_modify) - - -class ApiObjectsDraftsPermissions(APIView): - """ - Get Permissions for a BCO Object - - -------------------- - - Gets the permissions for a BCO object. - """ - - POST_api_objects_drafts_permissions_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["object_id", "contents"], - properties={ - "object_id": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Object ID." - ), - "contents": openapi.Schema( - type=openapi.TYPE_OBJECT, - additional_properties=True, - description="Contents of the BCO.", - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Get BCO Permissions Schema", - description="Parameters that are supported when fetching draft BCO permissions.", - required=["POST_api_objects_drafts_permissions"], - properties={ - "POST_api_objects_drafts_permissions": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_objects_drafts_permissions_schema, - description="BCO Drafts to fetch permissions for.", - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Checking BCO permissions is successful.", - 300: "Some requests failed.", - 400: "Bad request.", - 403: "Invalid token.", - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, POST_api_objects_drafts_permissions) - - -class ApiObjectsDraftsPermissionsSet(APIView): - """ - Set Permissions for a BCO Object - - -------------------- - - Sets the permissions for a BCO object. The BCO object must be in draft form. - - NOTE: This is currently a work in progress and may not yet work. - """ - - # TODO: The POST_api_objects_draft_permissions_set call needs to be fixed, doesn't appear to work - POST_api_objects_drafts_permissions_set_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["object_id"], - properties={ - "object_id": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Object ID." - ), - "actions": openapi.Schema( - type=openapi.TYPE_OBJECT, - properties={ - "remove_permissions": openapi.Schema( - type=openapi.TYPE_STRING, - description="Remove permissions from these users.", - ), - "full_permissions": openapi.Schema( - type=openapi.TYPE_STRING, - description="Give users full permissions.", - ), - "add_permissions": openapi.Schema( - type=openapi.TYPE_STRING, - description="Add permissions to these users.", - ), - }, - description="Actions to modify BCO permissions.", - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Set BCO Permissions Schema", - description="Parameters that are supported when setting draft BCO permissions.", - required=["POST_api_objects_drafts_permissions_set"], - properties={ - "POST_api_objects_drafts_permissions_set": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_objects_drafts_permissions_set_schema, - description="BCO Drafts to set permissions for.", - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Setting BCO permissions is successful.", - 300: "Some requests failed.", - 400: "Bad request.", - 403: "Invalid token.", - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, POST_api_objects_drafts_permissions_set) - - -# TODO: What is the difference between this and ApiObjectsPublish? -class ApiObjectsDraftsPublish(APIView): - """ - Publish a BCO - - -------------------- - - Publish a draft BCO object. Once published, a BCO object becomes immutable. - """ - - # TODO: This seems to be missing group, which I would expect to be part of the publication - permission_classes = [IsAuthenticated] - - POST_api_objects_drafts_publish_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["draft_id", "prefix"], - properties={ - "prefix": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Prefix to publish with." - ), - "draft_id": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Object Draft ID." - ), - "object_id": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Object ID." - ), - "delete_draft": openapi.Schema( - type=openapi.TYPE_BOOLEAN, - description="Whether or not to delete the draft." " False by default.", - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Publish Draft BCO Schema", - description="Parameters that are supported when setting publishing BCOs.", - required=["POST_api_objects_drafts_publish"], - properties={ - "POST_api_objects_drafts_publish": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_objects_drafts_publish_schema, - description="BCO drafts to publish.", - ) - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "BCO Publication is successful.", - 300: "Some requests failed.", - 400: "Bad request.", - 403: "Invalid token.", - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_objects_drafts_publish) - - -class ApiObjectsDraftsRead(APIView): - """ - Read BCO - - -------------------- - - Reads a draft BCO object. - """ - - POST_api_objects_drafts_read_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["object_id"], - properties={ - "object_id": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Object ID." - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Read BCO Schema", - description="Parameters that are supported when reading BCOs.", - required=["POST_api_objects_drafts_read"], - properties={ - "POST_api_objects_drafts_read": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_objects_drafts_read_schema, - description="BCO objects to read.", - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Read BCO is successful.", - 300: "Some requests failed.", - 400: "Bad request.", - 403: "Invalid token.", - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, POST_api_objects_drafts_read) - - -# TODO: This should probably also be a GET (or only a GET) -class ApiObjectsDraftsToken(APIView): - """Get Draft BCOs - - -------------------- - Get all the draft objects for a given token. - You can specify which information should be returned with this. - """ - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Get Draft BCO Schema", - description="Parameters that are supported when fetching a draft BCO.", - required=["POST_api_objects_drafts_token"], - properties={ - "POST_api_objects_drafts_token": openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["fields"], - properties={ - "fields": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=openapi.Schema( - type=openapi.TYPE_STRING, - description="Field to return", - enum=[ - "contents", - "last_update", - "object_class", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ], - ), - description="Fields to return.", - ) - }, - ) - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Fetch BCO drafts is successful.", - 400: "Bad request.", - 403: "Invalid token.", - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - # TODO: Not checking for authorization here? - # No schema for this request since only - # the Authorization header is required. - return POST_api_objects_drafts_token(rqst=request) - - -class ApiObjectsPublish(APIView): - """Directly publish a BCO - - -------------------- - Take the bulk request and publish objects directly. - """ - - POST_api_objects_publish_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["prefix", "owner_group", "schema", "contents"], - properties={ - "prefix": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Prefix to use" - ), - "owner_group": openapi.Schema( - type=openapi.TYPE_STRING, description="Group which owns the BCO." - ), - "object_id": openapi.Schema( - type=openapi.TYPE_STRING, description="BCO Object ID." - ), - "schema": openapi.Schema( - type=openapi.TYPE_STRING, description="Which schema the BCO satisfies." - ), - "contents": openapi.Schema( - type=openapi.TYPE_OBJECT, - description="Contents of the BCO.", - ), - }, - ) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="BCO Publication Schema", - description="Parameters that are supported when trying to create a published BCO.", - properties={ - "POST_api_objects_publish": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_objects_publish_schema, - description="BCO Drafts to create.", - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "BCO publication is successful.", - 400: "Bad request.", - 403: "Invalid token.", - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_objects_publish) - -class ApiObjectsSearch(APIView): - """ - Search for BCO - - -------------------- - - Search for available BCO objects that match criteria. - - `type` can be one of 3 different values => mine | prefix | bco_id - `search` should be an empty string if you are doing the mine search as that is for "My BCOs" - For prefix `search` should be the name of the prefix. - For `bco_id` it should be some substring that is present in the desired `bco_id` or SET of `bco_ids` - - Shell - ```shell - curl -X POST "http://localhost:8000/api/objects/search/" -H "accept: application/json" -H "Authorization: Token ${token}" -H "Content-Type: application/json" -d "{\"POST_api_objects_search\":[{\"type\": \"prefix\",\"search\": \"TEST\"}]}" - ``` - - JavaScript - ```javascript - axios.post("http://localhost:8000/api/objects/search/", { - "POST_api_objects_search":[ - { - "type": "prefix", - "search": "TEST" - } - ] - }, { - headers: { - "Authorization": "Token ${token}, - "Content-Type": "application/json" - } - }); - ``` - """ - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="BCO Search Schema", - description="Search for BCOs", - properties={ - "type": openapi.Schema( - type=openapi.TYPE_STRING, description="Type of search to perform" - ), - "search": openapi.Schema( - type=openapi.TYPE_STRING, description="Search value" - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Search successful.", - 404: "That prefix was not found on this server." - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_objects_search) - - -class ApiObjectsToken(APIView): - """ - Get User Draft and Published BCOs - - -------------------- - - Get all BCOs available for a specific token, including published ones. - """ - - # auth = [] - # auth.append( - # openapi.Parameter('Token', openapi.IN_HEADER, description="Authorization Token", type=openapi.TYPE_STRING)) - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Get BCO Schema", - description="Parameters that are supported when fetching a BCOs.", - required=["POST_api_objects_token"], - properties={ - "POST_api_objects_token": openapi.Schema( - type=openapi.TYPE_OBJECT, - required=["fields"], - properties={ - "fields": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=openapi.Schema( - type=openapi.TYPE_STRING, - description="Field to return", - enum=[ - "contents", - "last_update", - "object_class", - "object_id", - "owner_group", - "owner_user", - "prefix", - "schema", - "state", - ], - ), - description="Fields to return.", - ) - }, - ) - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Fetch BCOs is successful.", - 400: "Bad request.", - 403: "Invalid token.", - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - # No schema for this request since only - # the Authorization header is required. - return POST_api_objects_token(rqst=request) - - -class ApiObjectsPublished(APIView): - """ - Get Published BCOs - - -------------------- - - Get all BCOs available for a specific token, including published ones. - """ - - authentication_classes = [] - permission_classes = [] - auth = [] - - @swagger_auto_schema( - manual_parameters=auth, - responses={ - 200: "Success.", - 400: "Internal Error. BCO Name and Version are not properly formatted.", - }, - tags=["BCO Management"], - ) - def get(self, request) -> Response: - return POST_api_objects_published() - # return POST_api_objects_token(rqst=request) - - -class ApiPrefixesCreate(APIView): - """ - Create a Prefix - - -------------------- - Create a prefix to be used to classify BCOs and to determine permissions - for objects created under that prefix. The requestor *must* be in the group - prefix_admins to create a prefix. - - ```JSON - { - "POST_api_prefixes_create": [ - { - "owner_group": "bco_publisher", - "owner_user": "anon", - "prefixes": [ - { - "description": "Just a test prefix.", - "expiration_date": "2023-01-01-01-01-01", - "prefix": "testR" - }, - { - "description": "Just another prefix.", - "expiration_date": "2023-01-01-01-01-01", - "prefix": "othER" - } - ] - } - ] - } - ``` - """ - - # Permissions - prefix admins only - permission_classes = [RequestorInPrefixAdminsGroup, IsAuthenticated] - - # TYPE_ARRAY explanation - # Source: https://stackoverflow.com/questions/53492889/drf-yasg-doesnt-take-type-array-as-a-valid-type - - # TODO: Need to get the schema that is being sent here from FE - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Prefix Creation Schema", - description="Several parameters are required to create a prefix.", - required=["owner_user", "prefix"], - properties={ - "description": openapi.Schema( - type=openapi.TYPE_STRING, - description="A description of what this prefix should represent. For example, the prefix 'GLY' would be related to BCOs which were derived from GlyGen workflows.", - ), - "expiration_date": openapi.Schema( - type=openapi.TYPE_STRING, - description="The datetime at which this prefix expires in the format YYYY-MM-DD-HH-MM-SS.", - ), - "owner_group": openapi.Schema( - type=openapi.TYPE_STRING, - description="Which group should own the prefix. *The requestor does not have to be in owner_group to assign this.*", - ), - "owner_user": openapi.Schema( - type=openapi.TYPE_STRING, - description="Which user should own the prefix. *The requestor does not have to be owner_user to assign this.*", - ), - "prefixes": openapi.Schema( - type=openapi.TYPE_ARRAY, - description="Any prefix which satsifies the naming standard (see link...)", - items=openapi.Items(type=openapi.TYPE_STRING), - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 201: "The prefix was successfully created.", - 400: "Bad request for one of two reasons: \n1) the prefix does not" - "follow the naming standard, or \n2) owner_user and/or" - "owner_group do not exist.", - 401: "Unauthorized. Authentication credentials were not provided.", - 403: "Forbidden. User doesnot have permission to perform this action", - 409: "The prefix the requestor is attempting to create already exists.", - }, - tags=["Prefix Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_prefixes_create) - - -class ApiPrefixesDelete(APIView): - """ - Delete a Prefix - - # Deletes a prefix for BCOs. - -------------------- - The requestor *must* be in the group prefix_admins to delete a prefix. - - __Any object created under this prefix will have its permissions "locked out." This means that any other view which relies on object-level permissions, such as /api/objects/drafts/read/, will not allow any requestor access to particular objects.__ - - ```JSON - { - "POST_api_prefixes_delete": [ - "OTHER", - "TESTR" - ] - } - ``` - - """ - - # Permissions - prefix admins only - permission_classes = [RequestorInPrefixAdminsGroup] - - # TODO: Need to get the schema that is being sent here from FE - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Prefix Deletion Schema", - description="Provide a list of prefixes to delete.", - required=["prefixes"], - properties={ - "prefixes": openapi.Schema( - type=openapi.TYPE_ARRAY, - description="Any prefix in the API.", - items=openapi.Items(type=openapi.TYPE_STRING), - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "Deleting a prefix was successful.", - 401: "Unauthorized. Authentication credentials were not provided.", - 403: "Forbidden. User doesnot have permission to perform this action", - 404: "The prefix couldn't be found so therefore it could not be deleted.", - }, - tags=["Prefix Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_prefixes_delete) - - -class ApiPrefixesModify(APIView): - """ - Modify a Prefix - - -------------------- - - Modify a prefix which already exists. - - The requestor *must* be in the group prefix_admins to modify a prefix. - - ```JSON - { - "POST_api_prefixes_modify": [ - { - "owner_group": "bco_drafter", - "owner_user": "wheel", - "prefixes": [ - { - "description": "Just another description here.", - "expiration_date": "2025-01-01-01-01-01", - "prefix": "testR" - }, - { - "description": "Just another prefix description here as well.", - "expiration_date": "2025-01-01-01-01-01", - "prefix": "othER" - } - ] - } - ] - } - ``` - - """ - - # Permissions - prefix admins only - permission_classes = [RequestorInPrefixAdminsGroup] - prefixes_object_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=[], - properties={ - "description": openapi.Schema( - type=openapi.TYPE_STRING, - description="A description of what this prefix should" - " represent. For example, the prefix 'GLY' would be " - "related to BCOs which were derived from GlyGen workflows.", - ), - "expiration_date": openapi.Schema( - type=openapi.TYPE_STRING, - description="The datetime at which this prefix expires in the" - " format YYYY-MM-DD-HH-MM-SS.", - ), - "prefix": openapi.Schema( - type=openapi.TYPE_STRING, - description="Any prefix which satsifies the naming standard", - ), - }, - ) - POST_api_prefixes_modify_schema = openapi.Schema( - type=openapi.TYPE_OBJECT, - required=[], - properties={ - "owner_group": openapi.Schema( - type=openapi.TYPE_STRING, - description="Which group should own the prefix. *The" - " requestor does not have to be in the owner group to" - " assign this.*", - ), - "owner_user": openapi.Schema( - type=openapi.TYPE_STRING, - description="Which user should own the prefix. *The requestor" - " does not have to be owner_user but owner_user must be in" - " owner_group*.", - ), - "prefixes": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=prefixes_object_schema, - description="Any prefix which satsifies the naming standard", - ), - }, - ) - - # TODO: Need to get the schema that is being sent here from FE - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Prefix Modification Schema", - description="Several parameters are required to modify a prefix.", - required=["POST_api_prefixes_modify"], - properties={ - "POST_api_prefixes_modify": openapi.Schema( - type=openapi.TYPE_ARRAY, - items=POST_api_prefixes_modify_schema, - description="", - ) - }, - ) # TODO: ADD LINK FOR PREFIX DOCUMENTATION - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "The prefix was successfully modified.", - 400: "Bad request because owner_user and/or owner_group do not exist.", - 404: "The prefix provided could not be found.", - }, - tags=["Prefix Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_prefixes_modify) - - -class ApiPrefixesPermissionsSet(APIView): - """ - Set Prefix Permissions - - -------------------- - - # Set prefix permissions by user, group, or both. - - The requestor *must* be the owner_user of the prefix. - - At least one of the usernames or groups must actually exist for a permission to be assigned. - - ```JSON - { - "POST_api_prefixes_permissions_set": [ - { - "group": [ - "bco_drafter" - ], - "mode": "add", - "permissions": [ - "change", - "delete", - "view" - ], - "prefixes": [ - "testR", - "BCO" - ], - "username": [ - "some_user" - ] - } - ] - } - ``` - - """ - - # Permissions - prefix admins only - permission_classes = [RequestorInPrefixAdminsGroup] - - # TODO: Need to get the schema that is being sent here from FE - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Prefix Permissions Schema", - description="Set the permissions for a prefix.", - required=["permissions", "prefix"], - properties={ - "group": openapi.Schema( - type=openapi.TYPE_STRING, - description="Which group the permission is being assigned to.", - ), - "mode": openapi.Schema( - type=openapi.TYPE_STRING, - description="Whether to 'add' (append), 'remove' (subtract), or define the 'full_set' of permissions.", - ), - "permissions": openapi.Schema( - type=openapi.TYPE_STRING, description="Which permissions to assign." - ), - "prefix": openapi.Schema( - type=openapi.TYPE_STRING, - description="Which prefix to assign the permissions to.", - ), - "username": openapi.Schema( - type=openapi.TYPE_STRING, - description="Which user the permission is being assigned to.", - ), - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 201: "The prefix permissions were updated succesfully.", - 400: "Bad request because 1) the requestor isn't the owner of the prefix, or 2) the provided username and/or group could not be found.", - 404: "The prefix provided was not found.", - }, - tags=["Prefix Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_api_prefixes_permissions_set) - - -class ApiPrefixesToken(APIView): - """ - Get list of prefixes - - -------------------- - - Get all available prefixes and their associated permissions for a given token. - The word 'Token' must be included in the header. - - For example: 'Token 627626823549f787c3ec763ff687169206626149'. - """ - - auth = [ - openapi.Parameter( - "Authorization", - openapi.IN_HEADER, - description="Authorization Token", - type=openapi.TYPE_STRING, - ) - ] - - @swagger_auto_schema( - manual_parameters=auth, - responses={ - 200: "The Authorization header was provided and available prefixes were returned.", - 400: "The Authorization header was not provided.", - }, - tags=["Prefix Management"], - ) - def post(self, request) -> Response: - if "Authorization" in request.headers: - # Pass the request to the handling function - # Source: https://stackoverflow.com/a/31813810 - return post_api_prefixes_token_flat(request=request) - else: - return Response(status=status.HTTP_400_BAD_REQUEST) - - -class ApiPrefixesTokenFlat(APIView): - """ - Get a flat list of prefixes - - -------------------- - - Get all available prefixes and their associated permissions for a given - token in flat format. The word 'Token' must be included in the header. - - For example: 'Token 627626823549f787c3ec763ff687169206626149'. - """ - - auth = [ - openapi.Parameter( - "Authorization", - openapi.IN_HEADER, - description="Authorization Token", - type=openapi.TYPE_STRING, - ) - ] - - @swagger_auto_schema( - manual_parameters=auth, - responses={ - 200: "The Authorization header was provided and available prefixes were returned.", - 401: "The Authorization header was not provided.", - }, - tags=["Prefix Management"], - ) - def post(self, request) -> Response: - if "Authorization" in request.headers: - # Pass the request to the handling function - # Source: https://stackoverflow.com/a/31813810 - return post_api_prefixes_token_flat(request=request) - else: - return Response(status=status.HTTP_400_BAD_REQUEST) - - -class ApiPublicDescribe(APIView): - """ - Describe API - - -------------------- - - Returns information about the API. - - """ - - authentication_classes = [] - permission_classes = [] - - # For the success and error messages - # renderer_classes = [ - # TemplateHTMLRenderer - # ] - # template_name = 'api/account_activation_message.html' - - auth = [] - - @swagger_auto_schema( - manual_parameters=auth, - responses={ - 201: "Account has been authorized.", - 208: "Account has already been authorized.", - 403: "Requestor's credentials were rejected.", - 424: "Account has not been registered.", - }, - tags=["API Information"], - ) - def get(self, request): - # Pass the request to the handling function - return Response(UserUtils.UserUtils().get_user_info(username="anon")) - - -# Source: https://www.django-rest-framework.org/api-guide/permissions/#setting-the-permission-policy -class DraftObjectId(APIView): - """ - Read Object by URI - - -------------------- - - Reads and returns a single object from a given object_id. - - """ - - auth = [] - auth.append( - openapi.Parameter( - "object_id", - openapi.IN_PATH, - description="Object ID to be viewed.", - type=openapi.TYPE_STRING, - ) - ) - - @swagger_auto_schema( - manual_parameters=auth, - responses={ - 200: "Success. Object contents returned", - 401: "The contents of the draft could not be sent back because" - " the requestor does not have appropriate permissions.", - 403: "Forbidden. Authentication credentials were not provided, or" - " the token was invalid.", - 404: "Not found. That draft could not be found on the server." - }, - tags=["BCO Management"], - ) - def get(self, request, object_id): - # No need to check the request (unnecessary for GET as it's checked - # by the url parser?). - - # Pass straight to the handler. - # TODO: This is not dealing with the draft_object_id parameter being passed in? - # return GET_draft_object_by_id(do_id=request.build_absolute_uri(), rqst=request) - - # return GET_draft_object_by_id(do_id=draft_object_id, rqst=request) - return get_draft_object_by_id(do_id=object_id, request=request) - - -# Allow anyone to view published objects. -# Source: https://www.django-rest-framework.org/api-guide/permissions/#setting-the-permission-policy -class ObjectIdRootObjectId(APIView): - """ - View Published BCO by ID - -------------------- - Reads and returns a published BCO based on an object ID. This will return the highest versioned object. - """ - - auth = [] - auth.append( - openapi.Parameter( - "object_id_root", - openapi.IN_PATH, - description="Object ID to be viewed.", - type=openapi.TYPE_STRING, - ) - ) - - authentication_classes = [] - permission_classes = [] - - @swagger_auto_schema( - manual_parameters=auth, - responses={ - 200: "Object returned.", - 404: "Object not found." - }, - tags=["BCO Management"], - ) - def get(self, request, object_id_root): - return GET_published_object_by_id(object_id_root) - -class ObjectIdRootObjectIdVersion(APIView): - """ - View Published BCO by ID and Version - - -------------------- - - Reads and returns a published BCO based on an object ID and a version. - - """ - - # For the success and error messages - # renderer_classes = [ - # TemplateHTMLRenderer - # ] - # template_name = 'api/account_activation_message.html' - - auth = [] - auth.append( - openapi.Parameter( - "object_id_root", - openapi.IN_PATH, - description="Object ID to be viewed.", - type=openapi.TYPE_STRING, - ) - ) - auth.append( - openapi.Parameter( - "object_id_version", - openapi.IN_PATH, - description="Object version to be viewed.", - type=openapi.TYPE_STRING, - ) - ) - - # Anyone can view a published object - authentication_classes = [] - permission_classes = [] - - @swagger_auto_schema( - manual_parameters=auth, - responses={ - 201: "Account has been authorized.", - 208: "Account has already been authorized.", - 403: "Requestor's credentials were rejected.", - 424: "Account has not been registered.", - }, - tags=["BCO Management"], - ) - def get(self, request, object_id_root, object_id_version): - return GET_published_object_by_id_with_version( - object_id_root, object_id_version - ) - - -class ValidateBCO(APIView): - """ - Bulk Validate BCOs - - -------------------- - - Bulk operation to validate BCOs. - - ```JSON - { - "POST_validate_bco": [ - {...BCO CONTENTS...}, - {...BCO CONTENTS...} - ] - } - - """ - - authentication_classes = [] - permission_classes = [] - - request_body = openapi.Schema( - type=openapi.TYPE_OBJECT, - title="Validate BCO", - description="Bulk request for validating a BCO", - required=["BCO"], - properties={ - "POST_validate_bco": openapi.Schema( - type=openapi.TYPE_ARRAY, - description="A BCO to validate", - items=openapi.Items(type=openapi.TYPE_OBJECT), - ) - }, - ) - - @swagger_auto_schema( - request_body=request_body, - responses={ - 200: "All BCO validations are successful.", - 207: "Some or all BCO validations failed. Each object submitted" - " will have it's own response object with it's own status" - " message:\n" - }, - tags=["BCO Management"], - ) - def post(self, request) -> Response: - return check_post_and_process(request, post_validate_bco) diff --git a/config/settings.py b/config/settings.py new file mode 100644 index 00000000..2b860f49 --- /dev/null +++ b/config/settings.py @@ -0,0 +1,261 @@ +""" +Django settings for BioCompute Database project. +""" + +import os +from datetime import timedelta +import configparser +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +# --- SECURITY SETTINGS --- # +# Load the server config file. +server_config = configparser.ConfigParser() +server_config.read(BASE_DIR + "/server.conf") + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ + +# Is this a production server? +PRODUCTION = server_config["PRODUCTION"]["production"] + +# Set the anonymous user's key. +ANON_KEY = server_config["KEYS"]["anon"] + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = "$vz@#@^q(od&$rf&*6^z!m5nh6qw2*cq*j6fha#^h9(r7$xqy4" + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = PRODUCTION + +# The human-readable hostname. +HUMAN_READABLE_HOSTNAME = server_config["HRHOSTNAME"]["hrnames"] + +if server_config["GROUP_PREFIX"]["allow_all_creation"] == "True": + GROUP = True + PREFIX = True +elif server_config["GROUP_PREFIX"]["allow_group_creation"] == "True": + GROUP = True +elif server_config["GROUP_PREFIX"]["allow_prefix_creation"] == "True": + PREFIX = True + +# The publicly accessible hostname. +if server_config["PRODUCTION"]["production"] == "True": + PUBLIC_HOSTNAME = server_config["PUBLICHOSTNAME"]["prod_name"] +elif server_config["PRODUCTION"]["production"] == "False": + PUBLIC_HOSTNAME = server_config["PUBLICHOSTNAME"]["name"] + +# Source: https://dzone.com/articles/how-to-fix-django-cors-error + +# Check for open (public) access to the API. +if server_config["REQUESTS_FROM"]["public"].strip() == "false": + + # Process the requester groups. + + # configparser automatically strips white space off the + # ends of arguments. + requesters = [ + server_config["REQUESTS_FROM"][i].strip() + for i in server_config["REQUESTS_FROM"] + ] + requesters.remove("false") + requesters = [i.split(",") for i in requesters] + + # Flatten the list. + # Source: https://stackabuse.com/python-how-to-flatten-list-of-lists/ + flattened = [item.strip() for sublist in requesters for item in sublist] + + if server_config["PRODUCTION"]["production"] == "True": + ALLOWED_HOSTS = [ + i.strip() for i in server_config["HOSTNAMES"]["prod_names"].split(",") + ] + elif server_config["PRODUCTION"]["production"] == "False": + ALLOWED_HOSTS = [ + i.strip() for i in server_config["HOSTNAMES"]["names"].split(",") + ] + + CORS_ORIGIN_ALLOW_ALL = False + CORS_ORIGIN_WHITELIST = tuple(flattened) + +elif server_config["REQUESTS_FROM"]["public"].strip() == "true": + if server_config["PRODUCTION"]["production"] == "True": + ALLOWED_HOSTS = [server_config["HOSTNAMES"]["prod_names"].split(",")[0], "*"] + CORS_ORIGIN_ALLOW_ALL = True + elif server_config["PRODUCTION"]["production"] == "False": + ALLOWED_HOSTS = [server_config["HOSTNAMES"]["names"].split(",")[0], "*"] + CORS_ORIGIN_ALLOW_ALL = True + +# Use the REST framework +REST_FRAMEWORK = { + "DEFAULT_AUTHENTICATION_CLASSES": [ + 'authentication.services.CustomJSONWebTokenAuthentication', + "rest_framework.authentication.TokenAuthentication", + 'rest_framework.authentication.SessionAuthentication', + 'rest_framework.authentication.BasicAuthentication', + ], + "DEFAULT_PERMISSION_CLASSES": ["rest_framework.permissions.IsAuthenticated"], + "DEFAULT_SCHEMA_CLASS": "rest_framework.schemas.coreapi.AutoSchema", + + +} + +JWT_AUTH = { + "JWT_RESPONSE_PAYLOAD_HANDLER": "authentication.services.CustomJSONWebTokenAuthentication", + "JWT_EXPIRATION_DELTA": timedelta(seconds=604800), + "JWT_REFRESH_EXPIRATION_DELTA": timedelta(days=14), + "JWT_ALLOW_REFRESH": True, +} + +# Password validation +# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", + }, + { + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", + }, +] + +# Object-level permissions with django-guardian +# Source: https://github.com/django-guardian/django-guardian#configuration +AUTHENTICATION_BACKENDS = [ + "django.contrib.auth.backends.ModelBackend", + "guardian.backends.ObjectPermissionBackend", +] + +# --- APPLICATION --- # +# Application definition + +# Token-based authentication. +# Source: https://www.django-rest-framework.org/api-guide/authentication/#tokenau thentication +INSTALLED_APPS = [ + "django.contrib.admin", + "django.contrib.admindocs", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "drf_yasg", + "rest_framework", + "rest_framework.authtoken", + 'rest_framework_jwt', + 'rest_framework_jwt.blacklist', + "rest_framework_swagger", + "reset_migrations", + "guardian", + # "api", + "authentication", + "biocompute", + "prefix" +] + +# Source: https://dzone.com/articles/how-to-fix-django-cors-error +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "corsheaders.middleware.CorsMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] + +ROOT_URLCONF = "config.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +SWAGGER_SETTINGS = { + "SECURITY_DEFINITIONS": { + "Bearer": {"type": "apiKey", "name": "Authorization", "in": "header"} + }, + "DEEP_LINKING": True, +} + +REDOC_SETTINGS = {"LAZY_RENDERING": False} + +WSGI_APPLICATION = "config.wsgi.application" + +# Database +# https://docs.djangoproject.com/en/3.0/ref/settings/#databases + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": server_config["DATABASES"]["path"], + } +} + +# Internationalization +# https://docs.djangoproject.com/en/3.0/topics/i18n/ + +LANGUAGE_CODE = "en-us" + +TIME_ZONE = "UTC" + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/3.0/howto/static-files/ + +STATIC_URL = "/api/static/" +# STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')] +STATIC_ROOT = "/var/www/bcoeditor/bco_api/bco_api/static/" + +# ----- CUSTOM VARIABLES AND METHODS ----- # +# Load request and validation templates (definitions). +# Note that we will get TWO loads of settings.py if we start without runserver --noreload +# There is only set of definitions for requests, but for validations, we may have sub-folders. +# First, the request definitions. + +# Make the object naming accessible as a dictionary. +OBJECT_NAMING = {} + +if server_config["PRODUCTION"]["production"] == "True": + + for i in server_config["OBJECT_NAMING"]: + if i.split("_")[0] == "prod": + + # Strip out the production flag. + STRIPPED = "_".join(i.split("_")[1:]) + + OBJECT_NAMING[STRIPPED] = server_config["OBJECT_NAMING"][i] + +elif server_config["PRODUCTION"]["production"] == "False": + + for i in server_config["OBJECT_NAMING"]: + if i.split("_")[0] != "prod": + OBJECT_NAMING[i] = server_config["OBJECT_NAMING"][i] + +# emailing notifications +EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" +EMAIL_HOST = "localhost" +EMAIL_PORT = 25 +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" diff --git a/config/urls.py b/config/urls.py index 3020dc26..413b5396 100755 --- a/config/urls.py +++ b/config/urls.py @@ -10,7 +10,6 @@ path("api/admin/", admin.site.urls), path("api/token/", obtain_jwt_token), path("api/verify/", verify_jwt_token), - path("", include("api.urls")), path("api/", include("authentication.urls")), path("api/", include("search.urls")), ]