Skip to content

Commit d1936e1

Browse files
🔀 merge master
2 parents 3bc8ccf + 472902e commit d1936e1

File tree

360 files changed

+4413
-2661
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

360 files changed

+4413
-2661
lines changed

migrations_lockfile.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,4 +23,4 @@ tempest: 0001_create_tempest_credentials_model
2323

2424
uptime: 0025_uptime_migrate_constraint
2525

26-
workflow_engine: 0029_ds_query_id_to_pending
26+
workflow_engine: 0030_allow_blank_workflow_owner_fks

pyproject.toml

-2
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,6 @@ module = [
135135
"sentry.api.invite_helper",
136136
"sentry.api.paginator",
137137
"sentry.api.permissions",
138-
"sentry.api.serializers.models.event",
139138
"sentry.auth.helper",
140139
"sentry.auth.provider",
141140
"sentry.db.mixin",
@@ -216,7 +215,6 @@ module = [
216215
"sentry.shared_integrations.client.proxy",
217216
"sentry.snuba.errors",
218217
"sentry.snuba.issue_platform",
219-
"sentry.snuba.metrics.datasource",
220218
"sentry.snuba.metrics.query_builder",
221219
"sentry.snuba.spans_metrics",
222220
"sentry.tasks.auth",

src/sentry/api/bases/organization_events.py

+45-12
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
import itertools
34
from collections.abc import Callable, Sequence
45
from datetime import timedelta
56
from typing import Any
@@ -352,7 +353,6 @@ def handle_results_with_meta(
352353
if "confidence" in results:
353354
meta["accuracy"] = {
354355
"confidence": results["confidence"],
355-
# TODO: add sampleCount and rampleRate here
356356
}
357357
# Confidence being a top level key is going to be deprecated in favour of confidence being in the meta
358358
return {"data": data, "meta": meta, "confidence": results["confidence"]}
@@ -505,22 +505,25 @@ def get_event_stats_data(
505505
results, key, query_columns
506506
)
507507
else:
508+
column = resolve_axis_column(
509+
query_columns[0], 0, transform_alias_to_input_format
510+
)
508511
results[key] = serializer.serialize(
509512
event_result,
510-
column=resolve_axis_column(
511-
query_columns[0], 0, transform_alias_to_input_format
512-
),
513+
column=column,
513514
allow_partial_buckets=allow_partial_buckets,
514515
zerofill_results=zerofill_results,
515516
)
516-
results[key]["meta"] = self.handle_results_with_meta(
517+
meta = self.handle_results_with_meta(
517518
request,
518519
organization,
519520
snuba_params.project_ids,
520521
event_result.data,
521522
True,
522523
dataset=dataset,
523524
)["meta"]
525+
self.update_meta_with_accuracy(meta, event_result, column)
526+
results[key]["meta"] = meta
524527

525528
serialized_result = results
526529
elif is_multiple_axis:
@@ -543,23 +546,25 @@ def get_event_stats_data(
543546
extra_columns = None
544547
if comparison_delta:
545548
extra_columns = ["comparisonCount"]
549+
column = resolve_axis_column(query_columns[0], 0, transform_alias_to_input_format)
546550
serialized_result = serializer.serialize(
547551
result,
548-
column=resolve_axis_column(
549-
query_columns[0], 0, transform_alias_to_input_format
550-
),
552+
column=column,
551553
allow_partial_buckets=allow_partial_buckets,
552554
zerofill_results=zerofill_results,
553555
extra_columns=extra_columns,
556+
confidence_column=column,
554557
)
555-
serialized_result["meta"] = self.handle_results_with_meta(
558+
meta = self.handle_results_with_meta(
556559
request,
557560
organization,
558561
snuba_params.project_ids,
559562
result.data,
560563
True,
561564
dataset=dataset,
562565
)["meta"]
566+
self.update_meta_with_accuracy(meta, result, column)
567+
serialized_result["meta"] = meta
563568

564569
return serialized_result
565570

@@ -613,16 +618,44 @@ def serialize_multiple_axis(
613618
)
614619
if is_equation(query_column):
615620
equations += 1
616-
# TODO: confidence is being split up in the serializer right now, need to move that here once its deprecated
617-
if "confidence" in result[columns[index]]:
618-
meta["accuracy"] = {"confidence": result[columns[index]]["confidence"]}
621+
self.update_meta_with_accuracy(meta, event_result, query_column)
619622
result[columns[index]]["meta"] = meta
620623
# Set order if multi-axis + top events
621624
if "order" in event_result.data:
622625
result["order"] = event_result.data["order"]
623626

624627
return result
625628

629+
def update_meta_with_accuracy(self, meta, event_result, query_column) -> None:
630+
if "processed_timeseries" in event_result.data:
631+
processed_timeseries = event_result.data["processed_timeseries"]
632+
meta["accuracy"] = {
633+
"confidence": self.serialize_accuracy_data(
634+
processed_timeseries.confidence, query_column
635+
),
636+
"sampleCount": self.serialize_accuracy_data(
637+
processed_timeseries.sample_count, query_column
638+
),
639+
"samplingRate": self.serialize_accuracy_data(
640+
processed_timeseries.sampling_rate, query_column, null_zero=True
641+
),
642+
}
643+
644+
def serialize_accuracy_data(
645+
self,
646+
data: Any,
647+
column: str,
648+
null_zero: bool = False,
649+
):
650+
serialized_values = []
651+
for timestamp, group in itertools.groupby(data, key=lambda r: r["time"]):
652+
for row in group:
653+
row_value = row.get(column, None)
654+
if row_value == 0 and null_zero:
655+
row_value = None
656+
serialized_values.append({"timestamp": timestamp, "value": row_value})
657+
return serialized_values
658+
626659

627660
class KeyTransactionBase(OrganizationEventsV2EndpointBase):
628661
def has_feature(self, organization: Organization, request: Request) -> bool:

src/sentry/api/endpoints/auth_index.py

+9
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
from sentry.auth.services.auth.impl import promote_request_rpc_user
2323
from sentry.auth.superuser import SUPERUSER_ORG_ID
2424
from sentry.organizations.services.organization import organization_service
25+
from sentry.types.ratelimit import RateLimit, RateLimitCategory
2526
from sentry.users.api.serializers.user import DetailedSelfUserSerializer
2627
from sentry.users.models.authenticator import Authenticator
2728
from sentry.utils import auth, json, metrics
@@ -126,6 +127,14 @@ class AuthIndexEndpoint(BaseAuthIndexEndpoint):
126127
authentication methods from JS endpoints by relying on internal sessions
127128
and simple HTTP authentication.
128129
"""
130+
enforce_rate_limit = True
131+
rate_limits = {
132+
"PUT": {
133+
RateLimitCategory.USER: RateLimit(
134+
limit=5, window=60 * 60
135+
), # 5 PUT requests per hour per user
136+
}
137+
}
129138

130139
def _validate_superuser(
131140
self, validator: AuthVerifyValidator, request: Request, verify_authenticator: bool

src/sentry/api/endpoints/group_external_issues.py

-25
This file was deleted.

src/sentry/api/endpoints/organization_events_trace.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from sentry.api.api_publish_status import ApiPublishStatus
2121
from sentry.api.base import region_silo_endpoint
2222
from sentry.api.bases import NoProjects, OrganizationEventsV2EndpointBase
23-
from sentry.api.serializers.models.event import get_tags_with_meta
23+
from sentry.api.serializers.models.event import EventTag, get_tags_with_meta
2424
from sentry.api.utils import handle_query_errors, update_snuba_params_with_timestamp
2525
from sentry.eventstore.models import Event, GroupEvent
2626
from sentry.issues.issue_occurrence import IssueOccurrence
@@ -169,7 +169,7 @@ class TracePerformanceIssue(TypedDict):
169169
"sdk_name": Optional[str],
170170
"span_id": str,
171171
"start_timestamp": str | int,
172-
"tags": list[tuple[str, str]],
172+
"tags": list[EventTag],
173173
"timestamp": str | int,
174174
"transaction": str,
175175
"transaction.duration": int,

0 commit comments

Comments
 (0)