Skip to content

Commit

Permalink
(migration): Resolve isupdate correctly
Browse files Browse the repository at this point in the history
  • Loading branch information
realVinayak committed Aug 30, 2024
1 parent 9bc64f1 commit aa11b35
Show file tree
Hide file tree
Showing 3 changed files with 106 additions and 35 deletions.
18 changes: 18 additions & 0 deletions specifyweb/workbench/migrations/0008_alter_spdataset_isupdate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Generated by Django 3.2.15 on 2024-08-30 15:10

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('workbench', '0007_spdataset_parent'),
]

operations = [
migrations.AlterField(
model_name='spdataset',
name='isupdate',
field=models.BooleanField(default=False, null=True),
),
]
119 changes: 86 additions & 33 deletions specifyweb/workbench/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,16 @@
from django.utils import timezone

from specifyweb.specify.func import Func
from specifyweb.specify.models import Collection, Specifyuser, Agent, datamodel, custom_save
from specifyweb.specify.models import (
Collection,
Specifyuser,
Agent,
datamodel,
custom_save,
)
from specifyweb.specify.api import uri_for_model


class Dataset(models.Model):
# All these attributes are meta-data.
name = models.CharField(max_length=256)
Expand All @@ -29,88 +36,134 @@ class Dataset(models.Model):
# Misc meta-data.
timestampcreated = models.DateTimeField(default=timezone.now)
timestampmodified = models.DateTimeField(auto_now=True)
createdbyagent = models.ForeignKey(Agent, null=True, on_delete=models.SET_NULL, related_name="+")
modifiedbyagent = models.ForeignKey(Agent, null=True, on_delete=models.SET_NULL, related_name="+")

base_meta_fields = ["name", "uploaderstatus", "timestampcreated", "timestampmodified"]
object_response_fields = [*base_meta_fields, "id", "remarks", "importedfilename", "uploadresult", "uploadplan"]
createdbyagent = models.ForeignKey(
Agent, null=True, on_delete=models.SET_NULL, related_name="+"
)
modifiedbyagent = models.ForeignKey(
Agent, null=True, on_delete=models.SET_NULL, related_name="+"
)

base_meta_fields = [
"name",
"uploaderstatus",
"timestampcreated",
"timestampmodified",
]
object_response_fields = [
*base_meta_fields,
"id",
"remarks",
"importedfilename",
"uploadresult",
"uploadplan",
]

@classmethod
def get_meta_fields(cls, request, extra_meta_fields=None, extra_filters=None):
attrs = [*cls.base_meta_fields, *(extra_meta_fields if extra_meta_fields is not None else [])]
attrs = [
*cls.base_meta_fields,
*(extra_meta_fields if extra_meta_fields is not None else []),
]

dss = cls.objects.filter(
dss = cls.objects.filter(
specifyuser=request.specify_user,
collection=request.specify_collection,
**(extra_filters if extra_filters is not None else {})
).only(*attrs)
return [{'id': ds.id, **{attr: getattr(ds, attr) for attr in attrs}, 'uploadplan': json.loads(ds.uploadplan) if ds.uploadplan else None} for ds in dss]
return [
{
"id": ds.id,
**{attr: getattr(ds, attr) for attr in attrs},
"uploadplan": json.loads(ds.uploadplan) if ds.uploadplan else None,
}
for ds in dss
]

# raise_404: Whether to raise 404 or return http 404.
# lock_object: Whether to run a "select for update" or "select"
@classmethod
def validate_dataset_request(cls, raise_404: bool, lock_object: bool):
def decorator(func):
def inner(request, **kwargs):
ds_id = kwargs.get('ds_id', None)
ds_id = kwargs.get("ds_id", None)
if ds_id is None:
raise Exception('ds_id not a key in the request. '
'Probably because correct group name is not used url regexp')
raise Exception(
"ds_id not a key in the request. "
"Probably because correct group name is not used url regexp"
)
try:
ds = cls.objects.select_for_update().get(id=ds_id) \
if lock_object else cls.objects.get(id=ds_id)
ds = (
cls.objects.select_for_update().get(id=ds_id)
if lock_object
else cls.objects.get(id=ds_id)
)
except ObjectDoesNotExist as e:
if raise_404:
raise Http404(e)
return http.HttpResponseNotFound()

if ds.specifyuser != request.specify_user:
return http.HttpResponseForbidden()
new_args = {key: kwargs[key] for key in kwargs if key != 'ds_id'}
new_args = {key: kwargs[key] for key in kwargs if key != "ds_id"}
return func(request, ds, **new_args)

return inner
return decorator

return decorator

def get_dataset_as_dict(self):
ds_dict = {key: getattr(self, key) for key in self.object_response_fields}
ds_dict.update({
"rows": self.data,
"uploadplan": Func.maybe(self.uploadplan, json.loads),
"createdbyagent": uri_for_model('agent', self.createdbyagent_id) if self.createdbyagent_id is not None else None,
"modifiedbyagent": uri_for_model('agent', self.modifiedbyagent_id) if self.modifiedbyagent_id is not None else None
})
ds_dict.update(
{
"rows": self.data,
"uploadplan": Func.maybe(self.uploadplan, json.loads),
"createdbyagent": (
uri_for_model("agent", self.createdbyagent_id)
if self.createdbyagent_id is not None
else None
),
"modifiedbyagent": (
uri_for_model("agent", self.modifiedbyagent_id)
if self.modifiedbyagent_id is not None
else None
),
}
)
return ds_dict

class Meta:
abstract = True


class Spdataset(Dataset):
specify_model = datamodel.get_table('spdataset')
specify_model = datamodel.get_table("spdataset")

columns = models.JSONField()
visualorder = models.JSONField(null=True)
rowresults = models.TextField(null=True)

isupdate = models.BooleanField(default=False)
isupdate = models.BooleanField(default=False, null=True)

# very complicated. Essentially, each batch-edit dataset gets backed by another dataset (for rollbacks).
# This should be a one-to-one field, imagine the mess otherwise.
parent = models.OneToOneField('Spdataset', related_name='backer', null=True, on_delete=models.CASCADE)
parent = models.OneToOneField(
"Spdataset", related_name="backer", null=True, on_delete=models.CASCADE
)

class Meta:
db_table = 'spdataset'
db_table = "spdataset"

def get_dataset_as_dict(self):
ds_dict = super().get_dataset_as_dict()
ds_dict.update({
"columns": self.columns,
"visualorder": self.visualorder,
"rowresults": self.rowresults and json.loads(self.rowresults),
"isupdate": self.isupdate
})
ds_dict.update(
{
"columns": self.columns,
"visualorder": self.visualorder,
"rowresults": self.rowresults and json.loads(self.rowresults),
"isupdate": self.isupdate == True,
}
)
return ds_dict

def was_uploaded(self) -> bool:
return self.uploadresult and self.uploadresult['success']
return self.uploadresult and self.uploadresult["success"]
4 changes: 2 additions & 2 deletions specifyweb/workbench/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,7 @@ def rows(request, ds) -> http.HttpResponse:
)

rows = regularize_rows(
len(ds.columns), json.load(request), skip_empty=(not ds.isupdate)
len(ds.columns), json.load(request), skip_empty=(ds.isupdate != True)
)

ds.data = rows
Expand Down Expand Up @@ -993,7 +993,7 @@ def validate_row(request, ds_id: str) -> http.HttpResponse:
bt, upload_plan = uploader.get_ds_upload_plan(collection, ds)
row = json.loads(request.body)
ncols = len(ds.columns)
rows = regularize_rows(ncols, [row], skip_empty=(not ds.isupdate))
rows = regularize_rows(ncols, [row], skip_empty=(ds.isupdate != True))
if not rows:
return http.JsonResponse(None, safe=False)
row = rows[0]
Expand Down

0 comments on commit aa11b35

Please sign in to comment.