Skip to content

Commit

Permalink
Merge branch 'master' into jb/ref/tracetree
Browse files Browse the repository at this point in the history
  • Loading branch information
JonasBa authored Sep 28, 2024
2 parents 356f41d + 7a98a75 commit 2e39edb
Show file tree
Hide file tree
Showing 51 changed files with 1,287 additions and 1,355 deletions.
2 changes: 1 addition & 1 deletion api-docs/paths/events/issue-details.json
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@
"properties": {
"status": {
"type": "string",
"description": "The new status for the issues. Valid values are `\"resolved\"`, `\"reprocessing\"`, `\"unresolved\"`, and `\"ignored\"`."
"description": "The new status for the issues. Valid values are `\"resolved\"`, `\"resolvedInNextRelease\"`, `\"unresolved\"`, and `\"ignored\"`."
},
"statusDetails": {
"type": "object",
Expand Down
18 changes: 0 additions & 18 deletions fixtures/backup/model_dependencies/detailed.json
Original file line number Diff line number Diff line change
Expand Up @@ -6427,24 +6427,6 @@
]
]
},
"workflow_engine.workflowaction": {
"dangling": false,
"foreign_keys": {
"workflow": {
"kind": "FlexibleForeignKey",
"model": "workflow_engine.workflow",
"nullable": false
}
},
"model": "workflow_engine.workflowaction",
"relocation_dependencies": [],
"relocation_scope": "Organization",
"silos": [
"Region"
],
"table_name": "workflow_engine_workflowaction",
"uniques": []
},
"workflow_engine.workflowdataconditiongroup": {
"dangling": false,
"foreign_keys": {
Expand Down
3 changes: 0 additions & 3 deletions fixtures/backup/model_dependencies/flat.json
Original file line number Diff line number Diff line change
Expand Up @@ -888,9 +888,6 @@
"sentry.organization",
"workflow_engine.dataconditiongroup"
],
"workflow_engine.workflowaction": [
"workflow_engine.workflow"
],
"workflow_engine.workflowdataconditiongroup": [
"workflow_engine.dataconditiongroup",
"workflow_engine.workflow"
Expand Down
1 change: 0 additions & 1 deletion fixtures/backup/model_dependencies/sorted.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@
"workflow_engine.datasource",
"workflow_engine.detector",
"workflow_engine.workflow",
"workflow_engine.workflowaction",
"workflow_engine.workflowdataconditiongroup",
"workflow_engine.detectorworkflow",
"workflow_engine.datasourcedetector",
Expand Down
1 change: 0 additions & 1 deletion fixtures/backup/model_dependencies/truncate.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@
"workflow_engine_datasource",
"workflow_engine_detector",
"workflow_engine_workflow",
"workflow_engine_workflowaction",
"workflow_engine_workflowdataconditiongroup",
"workflow_engine_detectorworkflow",
"workflow_engine_datasourcedetector",
Expand Down
7 changes: 0 additions & 7 deletions jest.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,8 @@ const {
GITHUB_PR_REF,
GITHUB_RUN_ID,
GITHUB_RUN_ATTEMPT,
USING_YARN_TEST,
} = process.env;

if (USING_YARN_TEST === undefined) {
// eslint-disable-next-line no-console
console.error('Do not run `jest` directly, use `yarn test` instead!');
process.exit();
}

const IS_MASTER_BRANCH = GITHUB_PR_REF === 'refs/heads/master';

const BALANCE_RESULTS_PATH = path.resolve(
Expand Down
2 changes: 1 addition & 1 deletion migrations_lockfile.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ replays: 0004_index_together
sentry: 0768_fix_old_group_first_seen_dates
social_auth: 0002_default_auto_field
uptime: 0013_uptime_subscription_new_unique
workflow_engine: 0006_data_conditions
workflow_engine: 0007_loosen_workflow_action_relationship
3 changes: 0 additions & 3 deletions scripts/test.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,6 @@ process.env.NODE_ENV = 'test';
process.env.PUBLIC_URL = '';
process.env.TZ = 'America/New_York';

// Marker to indicate that we've correctly ran with `yarn test`.
process.env.USING_YARN_TEST = true;

// Makes the script crash on unhandled rejections instead of silently
// ignoring them. In the future, promise rejections that are not handled will
// terminate the Node.js process with a non-zero exit code.
Expand Down
40 changes: 23 additions & 17 deletions src/sentry/api/endpoints/debug_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from symbolic.debuginfo import normalize_debug_id
from symbolic.exceptions import SymbolicError

from sentry import ratelimits, roles
from sentry import ratelimits
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
Expand All @@ -39,6 +39,7 @@
from sentry.models.project import Project
from sentry.models.release import Release, get_artifact_counts
from sentry.models.releasefile import ReleaseFile
from sentry.roles import organization_roles
from sentry.tasks.assemble import (
AssembleTask,
ChunkFileState,
Expand All @@ -53,15 +54,15 @@
_release_suffix = re.compile(r"^(.*)\s+\(([^)]+)\)\s*$")


def upload_from_request(request, project):
def upload_from_request(request: Request, project: Project):
if "file" not in request.data:
return Response({"detail": "Missing uploaded file"}, status=400)
fileobj = request.data["file"]
files = create_files_from_dif_zip(fileobj, project=project)
return Response(serialize(files, request.user), status=201)


def has_download_permission(request, project):
def has_download_permission(request: Request, project: Project):
if is_system_auth(request.auth) or is_active_superuser(request):
return True

Expand All @@ -72,7 +73,7 @@ def has_download_permission(request, project):
required_role = organization.get_option("sentry:debug_files_role") or DEBUG_FILES_ROLE_DEFAULT

if request.user.is_sentry_app:
if roles.get(required_role).priority > roles.get("member").priority:
if organization_roles.can_manage("member", required_role):
return request.access.has_scope("project:write")
else:
return request.access.has_scope("project:read")
Expand All @@ -86,7 +87,12 @@ def has_download_permission(request, project):
except OrganizationMember.DoesNotExist:
return False

return roles.get(current_role).priority >= roles.get(required_role).priority
if organization_roles.can_manage(current_role, required_role):
return True

# There's an edge case where a team admin is an org member but the required
# role is org admin. In that case, the team admin should be able to download.
return required_role == "admin" and request.access.has_project_scope(project, "project:write")


def _has_delete_permission(access: Access, project: Project) -> bool:
Expand All @@ -104,7 +110,7 @@ class ProguardArtifactReleasesEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)

def post(self, request: Request, project) -> Response:
def post(self, request: Request, project: Project) -> Response:
release_name = request.data.get("release_name")
proguard_uuid = request.data.get("proguard_uuid")

Expand Down Expand Up @@ -153,7 +159,7 @@ def post(self, request: Request, project) -> Response:
status=status.HTTP_409_CONFLICT,
)

def get(self, request: Request, project) -> Response:
def get(self, request: Request, project: Project) -> Response:
"""
List a Project's Proguard Associated Releases
````````````````````````````````````````
Expand Down Expand Up @@ -189,7 +195,7 @@ class DebugFilesEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)

def download(self, debug_file_id, project):
def download(self, debug_file_id, project: Project):
rate_limited = ratelimits.backend.is_limited(
project=project,
key=f"rl:DSymFilesEndpoint:download:{debug_file_id}:{project.id}",
Expand Down Expand Up @@ -223,7 +229,7 @@ def download(self, debug_file_id, project):
except OSError:
raise Http404

def get(self, request: Request, project) -> Response:
def get(self, request: Request, project: Project) -> Response:
"""
List a Project's Debug Information Files
````````````````````````````````````````
Expand All @@ -240,7 +246,7 @@ def get(self, request: Request, project) -> Response:
:auth: required
"""
download_requested = request.GET.get("id") is not None
if download_requested and (has_download_permission(request, project)):
if download_requested and has_download_permission(request, project):
return self.download(request.GET.get("id"), project)
elif download_requested:
return Response(status=403)
Expand Down Expand Up @@ -335,7 +341,7 @@ def delete(self, request: Request, project: Project) -> Response:

return Response(status=404)

def post(self, request: Request, project) -> Response:
def post(self, request: Request, project: Project) -> Response:
"""
Upload a New File
`````````````````
Expand Down Expand Up @@ -367,7 +373,7 @@ class UnknownDebugFilesEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)

def get(self, request: Request, project) -> Response:
def get(self, request: Request, project: Project) -> Response:
checksums = request.GET.getlist("checksums")
missing = ProjectDebugFile.objects.find_missing(checksums, project=project)
return Response({"missing": missing})
Expand All @@ -382,7 +388,7 @@ class AssociateDSymFilesEndpoint(ProjectEndpoint):
permission_classes = (ProjectReleasePermission,)

# Legacy endpoint, kept for backwards compatibility
def post(self, request: Request, project) -> Response:
def post(self, request: Request, project: Project) -> Response:
return Response({"associatedDsymFiles": []})


Expand All @@ -394,7 +400,7 @@ class DifAssembleEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)

def post(self, request: Request, project) -> Response:
def post(self, request: Request, project: Project) -> Response:
"""
Assemble one or multiple chunks (FileBlob) into debug files
````````````````````````````````````````````````````````````
Expand Down Expand Up @@ -517,7 +523,7 @@ class SourceMapsEndpoint(ProjectEndpoint):
}
permission_classes = (ProjectReleasePermission,)

def get(self, request: Request, project) -> Response:
def get(self, request: Request, project: Project) -> Response:
"""
List a Project's Source Map Archives
````````````````````````````````````
Expand Down Expand Up @@ -549,7 +555,7 @@ def get(self, request: Request, project) -> Response:

queryset = queryset.filter(query_q)

def expose_release(release, count):
def expose_release(release, count: int):
return {
"type": "release",
"id": release["id"],
Expand Down Expand Up @@ -581,7 +587,7 @@ def serialize_results(results):
on_results=serialize_results,
)

def delete(self, request: Request, project) -> Response:
def delete(self, request: Request, project: Project) -> Response:
"""
Delete an Archive
```````````````````````````````````````````````````
Expand Down
1 change: 0 additions & 1 deletion src/sentry/backup/comparators.py
Original file line number Diff line number Diff line change
Expand Up @@ -882,7 +882,6 @@ def get_default_comparators() -> dict[str, list[JSONScrubbingComparator]]:
DateUpdatedComparator("date_updated", "date_added")
],
"workflow_engine.workflow": [DateUpdatedComparator("date_updated", "date_added")],
"workflow_engine.workflowaction": [DateUpdatedComparator("date_updated", "date_added")],
"workflow_engine.workflowdataconditiongroup": [
DateUpdatedComparator("date_updated", "date_added")
],
Expand Down
27 changes: 6 additions & 21 deletions src/sentry/event_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,7 @@
GroupingConfig,
get_grouping_config_dict_for_project,
)
from sentry.grouping.ingest.config import (
is_in_transition,
project_uses_optimized_grouping,
update_grouping_config_if_needed,
)
from sentry.grouping.ingest.config import is_in_transition, update_grouping_config_if_needed
from sentry.grouping.ingest.hashing import (
find_existing_grouphash,
get_hash_values,
Expand Down Expand Up @@ -517,12 +513,10 @@ def save(
return jobs[0]["event"]
else:
project = job["event"].project
job["optimized_grouping"] = project_uses_optimized_grouping(project)
job["in_grouping_transition"] = is_in_transition(project)
metric_tags = {
"platform": job["event"].platform or "unknown",
"sdk": normalized_sdk_tag_from_event(job["event"].data),
"using_transition_optimization": job["optimized_grouping"],
"in_transition": job["in_grouping_transition"],
}
# This metric allows differentiating from all calls to the `event_manager.save` metric
Expand Down Expand Up @@ -1323,20 +1317,11 @@ def get_culprit(data: Mapping[str, Any]) -> str:

@sentry_sdk.tracing.trace
def assign_event_to_group(event: Event, job: Job, metric_tags: MutableTags) -> GroupInfo | None:
if job["optimized_grouping"]:
group_info = _save_aggregate_new(
event=event,
job=job,
metric_tags=metric_tags,
)
else:
group_info = _save_aggregate(
event=event,
job=job,
release=job["release"],
received_timestamp=job["received_timestamp"],
metric_tags=metric_tags,
)
group_info = _save_aggregate_new(
event=event,
job=job,
metric_tags=metric_tags,
)

if group_info:
event.group = group_info.group
Expand Down
3 changes: 1 addition & 2 deletions src/sentry/grouping/ingest/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from sentry import options
from sentry.grouping.api import GroupingConfig
from sentry.grouping.ingest.config import is_in_transition, project_uses_optimized_grouping
from sentry.grouping.ingest.config import is_in_transition
from sentry.models.project import Project
from sentry.utils import metrics
from sentry.utils.tag_normalization import normalized_sdk_tag_from_event
Expand Down Expand Up @@ -65,7 +65,6 @@ def record_calculation_metric_with_result(
# count to get an average number of calculations per event
tags = {
"in_transition": str(is_in_transition(project)),
"using_transition_optimization": str(project_uses_optimized_grouping(project)),
"result": result,
}
metrics.incr(
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/roles/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,5 @@
get_choices = default_manager.get_choices
get_default = default_manager.get_default
get_top_dog = default_manager.get_top_dog
with_scope = default_manager.with_scope
with_any_scope = default_manager.with_any_scope
with_scope = default_manager.with_scope
2 changes: 1 addition & 1 deletion src/sentry/seer/anomaly_detection/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def fetch_historical_data(
dataset_label = "errors"
elif dataset_label in ["generic_metrics", "transactions"]:
# XXX: performance alerts dataset differs locally vs in prod
dataset_label = "discover"
dataset_label = "metricsEnhanced"
dataset = get_dataset(dataset_label)

if not project or not dataset or not alert_rule.organization:
Expand Down
1 change: 1 addition & 0 deletions src/sentry/seer/similarity/grouping_records.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class CreateGroupingRecordData(TypedDict):
group_id: int
hash: str
project_id: int
message: str
exception_type: str | None


Expand Down
1 change: 1 addition & 0 deletions src/sentry/tasks/embeddings_grouping/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,7 @@ def get_events_from_nodestore(
CreateGroupingRecordData(
group_id=group_id,
project_id=project.id,
message=filter_null_from_string(event.title),
exception_type=(
filter_null_from_string(exception_type) if exception_type else None
),
Expand Down
8 changes: 0 additions & 8 deletions src/sentry/testutils/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,6 @@
Detector,
DetectorWorkflow,
Workflow,
WorkflowAction,
WorkflowDataConditionGroup,
)
from social_auth.models import UserSocialAuth
Expand Down Expand Up @@ -2075,13 +2074,6 @@ def create_workflow(
name = petname.generate(2, " ", letters=10).title()
return Workflow.objects.create(organization=organization, name=name)

@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
def create_workflow_action(
**kwargs,
) -> WorkflowAction:
return WorkflowAction.objects.create(**kwargs)

@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
def create_data_condition_group(
Expand Down
Loading

0 comments on commit 2e39edb

Please sign in to comment.