From 25c7b3e1addb77f824ff7950d9d47df345322fd9 Mon Sep 17 00:00:00 2001
From: Sourcery AI <>
Date: Fri, 10 Jun 2022 22:58:41 +0000
Subject: [PATCH] 'Refactored by Sourcery'
---
bin/dump-command-help | 58 ++++++++++-------
bin/find-good-catalogs | 7 +--
bin/load-mocks | 2 +-
bin/merge-catalogs | 2 +-
bin/mock-traces | 14 ++---
config/hooks/pre-commit | 2 +-
docker/sentry.conf.py | 46 +++++---------
examples/oauth2_consumer_webserver/app.py | 5 +-
fixtures/apidocs_test_case.py | 2 +-
fixtures/integrations/jira/mock.py | 3 +-
fixtures/integrations/stub_service.py | 3 +-
fixtures/vsts.py | 12 ++--
scripts/appconnect_cli.py | 3 +-
src/bitfield/models.py | 3 +-
src/bitfield/types.py | 18 ++----
src/sentry/__init__.py | 4 +-
src/sentry/admin.py | 18 +++---
src/sentry/analytics/base.py | 2 -
src/sentry/analytics/map.py | 8 ++-
src/sentry/analytics/utils.py | 2 +-
src/sentry/api/base.py | 12 +---
src/sentry/api/bases/organization.py | 7 +--
src/sentry/api/bases/organization_events.py | 55 ++++++++--------
src/sentry/api/bases/organizationmember.py | 8 +--
src/sentry/api/bases/sentryapps.py | 13 ++--
src/sentry/api/client.py | 6 +-
.../endpoints/accept_organization_invite.py | 9 +--
.../api/endpoints/api_application_details.py | 5 +-
src/sentry/api/endpoints/assistant.py | 2 +-
src/sentry/api/endpoints/auth_config.py | 2 +-
src/sentry/api/endpoints/auth_index.py | 15 ++---
src/sentry/api/endpoints/auth_login.py | 13 ++--
src/sentry/api/endpoints/broadcast_index.py | 12 +---
src/sentry/api/endpoints/chunk.py | 4 +-
src/sentry/api/endpoints/client_state.py | 6 +-
.../api/endpoints/codeowners/__init__.py | 2 +-
src/sentry/api/endpoints/debug_files.py | 36 +++++------
.../api/endpoints/event_apple_crash_report.py | 2 +-
src/sentry/api/endpoints/event_attachments.py | 3 +-
src/sentry/api/endpoints/filechange.py | 4 +-
src/sentry/api/endpoints/group_attachments.py | 3 +-
src/sentry/api/endpoints/group_details.py | 15 ++---
src/sentry/api/endpoints/group_events.py | 14 ++---
.../api/endpoints/group_similar_issues.py | 4 +-
src/sentry/api/endpoints/group_tags.py | 6 +-
src/sentry/api/endpoints/index.py | 6 +-
.../api/endpoints/integrations/index.py | 2 +-
.../endpoints/integrations/install_request.py | 3 +-
.../organization_integrations/details.py | 6 +-
.../integrations/plugins/configs_index.py | 4 +-
.../endpoints/integrations/plugins/index.py | 23 ++++---
.../integrations/sentry_apps/details.py | 11 ++--
.../integrations/sentry_apps/index.py | 25 ++++----
.../installation/external_requests.py | 2 +-
.../sentry_apps/publish_request.py | 5 +-
src/sentry/api/endpoints/internal/mail.py | 3 +-
.../endpoints/organization_auth_providers.py | 7 ++-
.../organization_codeowners_associations.py | 3 +-
.../organization_dashboard_details.py | 22 +++----
.../api/endpoints/organization_dashboards.py | 8 +--
.../api/endpoints/organization_details.py | 31 +++++-----
.../organization_events_facets_performance.py | 23 ++++---
.../organization_events_histogram.py | 35 +++++------
.../organization_events_spans_histogram.py | 33 +++++-----
.../organization_events_spans_performance.py | 14 +++--
.../endpoints/organization_events_stats.py | 3 +-
.../endpoints/organization_events_trace.py | 8 +--
.../endpoints/organization_events_vitals.py | 2 +-
.../api/endpoints/organization_group_index.py | 39 ++++++------
.../organization_group_index_stats.py | 18 +++---
.../api/endpoints/organization_index.py | 16 ++---
.../endpoints/organization_issues_count.py | 7 +--
.../endpoints/organization_member/details.py | 10 ++-
.../endpoints/organization_member/index.py | 20 +++---
.../organization_member/requests/join.py | 6 +-
.../organization_member_unreleased_commits.py | 4 +-
.../api/endpoints/organization_metrics.py | 9 ++-
.../api/endpoints/organization_monitors.py | 3 +-
.../endpoints/organization_pinned_searches.py | 6 +-
.../organization_profiling_profiles.py | 11 ++--
.../endpoints/organization_recent_searches.py | 12 +++-
.../endpoints/organization_release_details.py | 62 +++++++++----------
.../api/endpoints/organization_releases.py | 47 +++++---------
.../endpoints/organization_repositories.py | 3 +-
.../organization_repository_details.py | 8 +--
.../api/endpoints/organization_searches.py | 19 +++---
.../api/endpoints/organization_sessions.py | 22 +++----
.../api/endpoints/organization_slugs.py | 5 +-
.../api/endpoints/organization_stats.py | 9 ++-
.../api/endpoints/organization_stats_v2.py | 15 +++--
.../api/endpoints/organization_teams.py | 4 +-
.../api/endpoints/organization_user_issues.py | 6 +-
.../organization_user_issues_search.py | 8 ++-
.../project_create_sample_transaction.py | 18 +++---
src/sentry/api/endpoints/project_details.py | 9 ++-
src/sentry/api/endpoints/project_filters.py | 24 ++++---
.../api/endpoints/project_group_index.py | 3 +-
src/sentry/api/endpoints/project_index.py | 3 +-
.../api/endpoints/project_key_details.py | 9 +--
src/sentry/api/endpoints/project_key_stats.py | 21 +++----
src/sentry/api/endpoints/project_ownership.py | 4 +-
src/sentry/api/endpoints/project_plugins.py | 3 +-
.../api/endpoints/project_processingissues.py | 9 +--
.../endpoints/project_profiling_profile.py | 11 ++--
.../api/endpoints/project_release_commits.py | 4 +-
.../api/endpoints/project_release_details.py | 3 +-
.../endpoints/project_release_file_details.py | 15 +++--
.../api/endpoints/project_release_files.py | 7 +--
.../api/endpoints/project_release_setup.py | 10 +--
.../api/endpoints/project_release_stats.py | 34 +++++-----
src/sentry/api/endpoints/project_releases.py | 13 +---
.../endpoints/project_repo_path_parsing.py | 2 +-
.../api/endpoints/project_rule_details.py | 5 +-
src/sentry/api/endpoints/project_rules.py | 5 +-
.../endpoints/project_rules_configuration.py | 3 +-
.../api/endpoints/project_stacktrace_link.py | 18 +++---
src/sentry/api/endpoints/project_stats.py | 2 +-
src/sentry/api/endpoints/project_tags.py | 19 +++---
src/sentry/api/endpoints/project_transfer.py | 3 +-
src/sentry/api/endpoints/prompts_activity.py | 11 ++--
.../api/endpoints/relay/project_configs.py | 19 +++---
.../api/endpoints/relay/register_challenge.py | 13 ++--
src/sentry/api/endpoints/setup_wizard.py | 8 +--
src/sentry/api/endpoints/system_options.py | 6 +-
src/sentry/api/endpoints/team_details.py | 9 ++-
.../endpoints/user_authenticator_details.py | 19 +++---
.../endpoints/user_authenticator_enroll.py | 4 +-
src/sentry/api/endpoints/user_details.py | 27 ++++----
src/sentry/api/endpoints/user_identity.py | 3 +-
.../api/endpoints/user_identity_config.py | 19 +++---
src/sentry/api/endpoints/user_index.py | 3 +-
.../endpoints/user_notification_details.py | 6 +-
.../user_notification_fine_tuning.py | 6 +-
.../user_organizationintegrations.py | 3 +-
.../api/endpoints/user_organizations.py | 3 +-
.../api/endpoints/user_permissions_config.py | 2 +-
.../endpoints/user_social_identity_details.py | 2 -
src/sentry/api/event_search.py | 36 +++++++----
src/sentry/api/fields/avatar.py | 4 +-
src/sentry/api/fields/empty_decimal.py | 8 +--
src/sentry/api/fields/empty_integer.py | 8 +--
src/sentry/api/helpers/events.py | 3 +-
src/sentry/api/helpers/group_index/delete.py | 3 +-
src/sentry/api/helpers/group_index/index.py | 13 +---
src/sentry/api/helpers/teams.py | 4 +-
src/sentry/api/helpers/user_reports.py | 13 ++--
src/sentry/api/invite_helper.py | 29 +++++----
src/sentry/api/paginator.py | 37 ++++-------
src/sentry/api/serializers/base.py | 4 +-
src/sentry/api/serializers/snuba.py | 6 +-
src/sentry/constants.py | 3 +-
src/sentry/culprit.py | 16 ++---
src/sentry/datascrubbing.py | 38 ++++++------
src/sentry/http.py | 7 +--
src/sentry/reprocessing.py | 8 +--
src/sentry/reprocessing2.py | 6 +-
src/sentry/sdk_updates.py | 13 ++--
157 files changed, 825 insertions(+), 974 deletions(-)
diff --git a/bin/dump-command-help b/bin/dump-command-help
index 2d7453ee2ebde9..b4831cb8460074 100755
--- a/bin/dump-command-help
+++ b/bin/dump-command-help
@@ -16,7 +16,7 @@ def get_opts(param):
if any_slashes:
any_prefix_is_slash[:] = [True]
if not param.is_flag and not param.count:
- rv += " " + param.make_metavar()
+ rv += f" {param.make_metavar()}"
return rv
rv = [_write(param.opts)]
@@ -27,7 +27,7 @@ def get_opts(param):
def write_page(out, data):
path = data["path"]
- filename = os.path.join(out, *path[1:]) + "/index.rst"
+ filename = f"{os.path.join(out, *path[1:])}/index.rst"
if len(path) == 1:
filename += ".inc"
@@ -38,29 +38,44 @@ def write_page(out, data):
pass
args = [x["metavar"] for x in data["arguments"]]
- title = "`%s`" % " ".join(data["path"] + args)
- body = [title, "-" * len(title), "", data["help"] or ""]
+ title = f'`{" ".join(data["path"] + args)}`'
+ body = [
+ title,
+ "-" * len(title),
+ "",
+ data["help"] or "",
+ "",
+ "Options",
+ "```````",
+ "",
+ ]
+
- body.append("")
- body.append("Options")
- body.append("```````")
- body.append("")
for opt in data["options"]:
- prefix = "- ``%s``: " % opt["opt_string"]
- for line in click.wrap_text(opt["help"], 74, prefix, " ").splitlines() or [""]:
- body.append(line)
+ prefix = f'- ``{opt["opt_string"]}``: '
+ body.extend(
+ iter(
+ click.wrap_text(opt["help"], 74, prefix, " ").splitlines()
+ or [""]
+ )
+ )
+
body.append("- ``--help``: print this help page.")
if data["subcommands"]:
- body.append("")
- body.append("Subcommands")
- body.append("```````````")
- body.append("")
- body.append(".. toctree::")
- body.append(" :maxdepth: 1")
- body.append("")
- for subcmd in data["subcommands"]:
- body.append(f" {subcmd} <{subcmd}/index>")
+ body.extend(
+ (
+ "",
+ "Subcommands",
+ "```````````",
+ "",
+ ".. toctree::",
+ " :maxdepth: 1",
+ "",
+ )
+ )
+
+ body.extend(f" {subcmd} <{subcmd}/index>" for subcmd in data["subcommands"])
body.append("")
with open(filename, "w") as f:
@@ -88,10 +103,11 @@ def dump_command(out, cmd, path):
help_text = param.help or ""
if param.show_default:
help_text += " [default: %s]" % (
- ", ".join("%s" % d for d in param.default)
+ ", ".join(f"{d}" for d in param.default)
if isinstance(param.default, (list, tuple))
else (param.default,)
)
+
data["options"].append({"opt_string": get_opts(param), "help": help_text})
else:
data["arguments"].append({"metavar": param.make_metavar()})
diff --git a/bin/find-good-catalogs b/bin/find-good-catalogs
index 677e66ab2cc8dc..73a88b3710ec2b 100755
--- a/bin/find-good-catalogs
+++ b/bin/find-good-catalogs
@@ -11,12 +11,7 @@ MINIMUM = 80
def is_translated(msg):
- if isinstance(msg.string, bytes):
- return bool(msg.string)
- for item in msg.string:
- if not item:
- return False
- return True
+ return bool(msg.string) if isinstance(msg.string, bytes) else all(msg.string)
@click.command()
diff --git a/bin/load-mocks b/bin/load-mocks
index 8306297d1bffdc..50ff227771d963 100755
--- a/bin/load-mocks
+++ b/bin/load-mocks
@@ -120,7 +120,7 @@ def generate_commits(user):
if i == 1:
filename = "raven/base.py"
else:
- filename = random.choice(loremipsum.words) + ".js"
+ filename = f"{random.choice(loremipsum.words)}.js"
if random.randint(0, 5) == 1:
author = (user.name, user.email)
else:
diff --git a/bin/merge-catalogs b/bin/merge-catalogs
index b8348267c29015..3889fc8cc980d6 100755
--- a/bin/merge-catalogs
+++ b/bin/merge-catalogs
@@ -21,7 +21,7 @@ def merge_message(msg, frontend_msg):
@click.command()
@click.argument("locale")
def cli(locale):
- catalog_file = "src/sentry/locale/%s/LC_MESSAGES/django.po" % locale
+ catalog_file = f"src/sentry/locale/{locale}/LC_MESSAGES/django.po"
frontend_file = "build/javascript.po"
if not os.path.isfile(frontend_file):
return
diff --git a/bin/mock-traces b/bin/mock-traces
index e8ffb756d6c5c2..8f5567726ee0a4 100755
--- a/bin/mock-traces
+++ b/bin/mock-traces
@@ -23,7 +23,7 @@ def main(slow=False):
org = Organization.get_default()
print(f"Mocking org {org.name}") # NOQA
else:
- print("Mocking org {}".format("Default")) # NOQA
+ print('Mocking org Default')
org, _ = Organization.objects.get_or_create(slug="default")
for project_name in project_names:
@@ -54,7 +54,7 @@ def main(slow=False):
timestamp = timezone.now()
- print(f" > Loading normal trace") # NOQA
+ print(" > Loading normal trace")
# Normal trace
create_trace(
slow,
@@ -94,7 +94,7 @@ def main(slow=False):
},
)
- print(f" > Loading orphan data") # NOQA
+ print(" > Loading orphan data")
# Trace only with orphans
create_trace(
slow,
@@ -136,7 +136,7 @@ def main(slow=False):
},
)
- print(f" > Loading trace with many siblings") # NOQA
+ print(" > Loading trace with many siblings")
create_trace(
slow,
timestamp - timedelta(milliseconds=random_normal(4000, 250, 1000)),
@@ -158,7 +158,7 @@ def main(slow=False):
],
},
)
- print(f" > Loading trace with many roots") # NOQA
+ print(" > Loading trace with many roots")
trace_id = uuid4().hex
for _ in range(15):
create_trace(
@@ -182,7 +182,7 @@ def main(slow=False):
},
)
- print(f" > Loading chained trace with orphans") # NOQA
+ print(" > Loading chained trace with orphans")
trace_id = uuid4().hex
create_trace(
slow,
@@ -247,7 +247,7 @@ def main(slow=False):
},
)
- print(f" > Loading traces missing instrumentation") # NOQA
+ print(" > Loading traces missing instrumentation")
create_trace(
slow,
timestamp - timedelta(milliseconds=random_normal(4000, 250, 1000)),
diff --git a/config/hooks/pre-commit b/config/hooks/pre-commit
index 6bf9e1bdcebaea..d2fbd53e94f8ae 100755
--- a/config/hooks/pre-commit
+++ b/config/hooks/pre-commit
@@ -29,7 +29,7 @@ text_type = str
if "VIRTUAL_ENV" in os.environ:
# If pre-commit is not installed outside of the virtualenv, glob will return []
try:
- site_packages = glob("%s/lib/*/site-packages" % os.environ["VIRTUAL_ENV"])[0]
+ site_packages = glob(f'{os.environ["VIRTUAL_ENV"]}/lib/*/site-packages')[0]
sys.path.insert(0, site_packages)
except IndexError:
pass
diff --git a/docker/sentry.conf.py b/docker/sentry.conf.py
index 5d2d918114ef4b..9a126907ac383d 100644
--- a/docker/sentry.conf.py
+++ b/docker/sentry.conf.py
@@ -41,8 +41,9 @@
CONF_ROOT = os.path.dirname(__file__)
env = os.environ.get
-postgres = env("SENTRY_POSTGRES_HOST") or (env("POSTGRES_PORT_5432_TCP_ADDR") and "postgres")
-if postgres:
+if postgres := env("SENTRY_POSTGRES_HOST") or (
+ env("POSTGRES_PORT_5432_TCP_ADDR") and "postgres"
+):
DATABASES = {
"default": {
"ENGINE": "sentry.db.postgres",
@@ -103,38 +104,25 @@
}
)
-#########
-# Cache #
-#########
-
-# Sentry currently utilizes two separate mechanisms. While CACHES is not a
-# requirement, it will optimize several high throughput patterns.
-
-memcached = env("SENTRY_MEMCACHED_HOST") or (env("MEMCACHED_PORT_11211_TCP_ADDR") and "memcached")
-if memcached:
+if memcached := env("SENTRY_MEMCACHED_HOST") or (
+ env("MEMCACHED_PORT_11211_TCP_ADDR") and "memcached"
+):
memcached_port = env("SENTRY_MEMCACHED_PORT") or "11211"
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.memcached.MemcachedCache",
- "LOCATION": [memcached + ":" + memcached_port],
+ "LOCATION": [f"{memcached}:{memcached_port}"],
"TIMEOUT": 3600,
}
}
+
# A primary cache is required for things such as processing events
SENTRY_CACHE = "sentry.cache.redis.RedisCache"
-#########
-# Queue #
-#########
-
-# See https://develop.sentry.dev/services/queue/ for more information on
-# configuring your queue broker and workers. Sentry relies on a Python
-# framework called Celery to manage queues.
-
-rabbitmq = env("SENTRY_RABBITMQ_HOST") or (env("RABBITMQ_PORT_5672_TCP_ADDR") and "rabbitmq")
-
-if rabbitmq:
+if rabbitmq := env("SENTRY_RABBITMQ_HOST") or (
+ env("RABBITMQ_PORT_5672_TCP_ADDR") and "rabbitmq"
+):
BROKER_URL = (
"amqp://"
+ (env("SENTRY_RABBITMQ_USERNAME") or env("RABBITMQ_ENV_RABBITMQ_DEFAULT_USER") or "guest")
@@ -146,7 +134,7 @@
+ (env("SENTRY_RABBITMQ_VHOST") or env("RABBITMQ_ENV_RABBITMQ_DEFAULT_VHOST") or "/")
)
else:
- BROKER_URL = "redis://:" + redis_password + "@" + redis + ":" + redis_port + "/" + redis_db
+ BROKER_URL = f"redis://:{redis_password}@{redis}:{redis_port}/{redis_db}"
###############
@@ -211,13 +199,9 @@
# 'workers': 1, # the number of web workers
}
-###############
-# Mail Server #
-###############
-
-
-email = env("SENTRY_EMAIL_HOST") or (env("SMTP_PORT_25_TCP_ADDR") and "smtp")
-if email:
+if email := env("SENTRY_EMAIL_HOST") or (
+ env("SMTP_PORT_25_TCP_ADDR") and "smtp"
+):
SENTRY_OPTIONS["mail.backend"] = "smtp"
SENTRY_OPTIONS["mail.host"] = email
SENTRY_OPTIONS["mail.password"] = env("SENTRY_EMAIL_PASSWORD") or ""
diff --git a/examples/oauth2_consumer_webserver/app.py b/examples/oauth2_consumer_webserver/app.py
index 0dd40988284188..10b70f6113700c 100644
--- a/examples/oauth2_consumer_webserver/app.py
+++ b/examples/oauth2_consumer_webserver/app.py
@@ -39,9 +39,8 @@
def index():
access_token = session.get("access_token")
if access_token is None:
- return ("
Who are you?
" 'Login with Sentry
').format(
- url_for("login")
- )
+ return f'Who are you?
Login with Sentry
'
+
from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
diff --git a/fixtures/apidocs_test_case.py b/fixtures/apidocs_test_case.py
index b847c4eb87c360..f815a98ccf3c61 100644
--- a/fixtures/apidocs_test_case.py
+++ b/fixtures/apidocs_test_case.py
@@ -48,6 +48,6 @@ def create_event(self, name, **kwargs):
"user": {"id": self.user.id, "email": self.user.email},
"release": name,
}
- data.update(kwargs)
+ data |= kwargs
return self.store_event(data=data, project_id=self.project.id)
diff --git a/fixtures/integrations/jira/mock.py b/fixtures/integrations/jira/mock.py
index 0e5e391f12c0c8..950f17be3b475d 100644
--- a/fixtures/integrations/jira/mock.py
+++ b/fixtures/integrations/jira/mock.py
@@ -48,8 +48,7 @@ def get_create_meta_for_project(self, project):
"""
self._throw_if_broken()
- createmeta = self._get_data(project, "createmeta")
- if createmeta:
+ if createmeta := self._get_data(project, "createmeta"):
return createmeta
# Fallback to stub data
diff --git a/fixtures/integrations/stub_service.py b/fixtures/integrations/stub_service.py
index ddeaa997a52775..445c6c76b72d9f 100644
--- a/fixtures/integrations/stub_service.py
+++ b/fixtures/integrations/stub_service.py
@@ -41,8 +41,7 @@ def get_stub_data(service_name, name):
:return: object
"""
cache_key = f"{service_name}.{name}"
- cached = StubService.stub_data_cache.get(cache_key)
- if cached:
+ if cached := StubService.stub_data_cache.get(cache_key):
data = cached
else:
data = json.loads(StubService.get_stub_json(service_name, name))
diff --git a/fixtures/vsts.py b/fixtures/vsts.py
index cc8788e6115a0a..35f702dda31d95 100644
--- a/fixtures/vsts.py
+++ b/fixtures/vsts.py
@@ -53,8 +53,7 @@ def _stub_vsts(self):
responses.add(
responses.GET,
- "https://app.vssps.visualstudio.com/_apis/accounts?memberId=%s&api-version=4.1"
- % self.vsts_user_id,
+ f"https://app.vssps.visualstudio.com/_apis/accounts?memberId={self.vsts_user_id}&api-version=4.1",
json={
"count": 1,
"value": [
@@ -68,12 +67,13 @@ def _stub_vsts(self):
},
)
+
responses.add(
responses.GET,
- "https://app.vssps.visualstudio.com/_apis/resourceareas/79134C72-4A58-4B42-976C-04E7115F32BF?hostId=%s&api-preview=5.0-preview.1"
- % self.vsts_account_id,
+ f"https://app.vssps.visualstudio.com/_apis/resourceareas/79134C72-4A58-4B42-976C-04E7115F32BF?hostId={self.vsts_account_id}&api-preview=5.0-preview.1",
json={"locationUrl": self.vsts_base_url},
)
+
responses.add(
responses.GET,
"https://app.vssps.visualstudio.com/_apis/profile/profiles/me?api-version=1.0",
@@ -130,9 +130,7 @@ def _stub_vsts(self):
responses.add(
responses.GET,
- "https://{}.visualstudio.com/{}/_apis/wit/workitemtypes/{}/states".format(
- self.vsts_account_name.lower(), self.project_a["name"], "Bug"
- ),
+ f'https://{self.vsts_account_name.lower()}.visualstudio.com/{self.project_a["name"]}/_apis/wit/workitemtypes/Bug/states',
json={
"value": [
{"name": "resolve_status"},
diff --git a/scripts/appconnect_cli.py b/scripts/appconnect_cli.py
index d4be9efe86b8f8..b984e7c393bfd9 100755
--- a/scripts/appconnect_cli.py
+++ b/scripts/appconnect_cli.py
@@ -68,8 +68,7 @@ def appconnect_config():
for config in symbol_sources:
if config["type"] == "appStoreConnect":
return config
- else:
- raise KeyError("appStoreConnect config not found")
+ raise KeyError("appStoreConnect config not found")
if __name__ == "__main__":
diff --git a/src/bitfield/models.py b/src/bitfield/models.py
index 727326c8a000b0..c1f90fa72036b7 100644
--- a/src/bitfield/models.py
+++ b/src/bitfield/models.py
@@ -113,8 +113,7 @@ def __init__(self, flags, default=None, *args, **kwargs):
self.labels = labels
def pre_save(self, instance, add):
- value = getattr(instance, self.attname)
- return value
+ return getattr(instance, self.attname)
def get_prep_value(self, value):
if value is None:
diff --git a/src/bitfield/types.py b/src/bitfield/types.py
index d05686acd94ba9..d099a3da6773cf 100644
--- a/src/bitfield/types.py
+++ b/src/bitfield/types.py
@@ -107,17 +107,12 @@ class BitHandler:
def __init__(self, value, keys, labels=None):
# TODO: change to bitarray?
- if value:
- self._value = int(value)
- else:
- self._value = 0
+ self._value = int(value) if value else 0
self._keys = keys
self._labels = labels is not None and labels or keys
def __eq__(self, other):
- if not isinstance(other, BitHandler):
- return False
- return self._value == other._value
+ return self._value == other._value if isinstance(other, BitHandler) else False
def __lt__(self, other):
return int(self._value) < other
@@ -135,10 +130,7 @@ def __cmp__(self, other):
return cmp(self._value, other)
def __repr__(self):
- return "<{}: {}>".format(
- self.__class__.__name__,
- ", ".join(f"{k}={self.get_bit(n).is_set}" for n, k in enumerate(self._keys)),
- )
+ return f'<{self.__class__.__name__}: {", ".join((f"{k}={self.get_bit(n).is_set}" for n, k in enumerate(self._keys)))}>'
def __str__(self):
return str(self._value)
@@ -180,7 +172,7 @@ def __getattr__(self, key):
if key.startswith("_"):
return object.__getattribute__(self, key)
if key not in self._keys:
- raise AttributeError("%s is not a valid flag" % key)
+ raise AttributeError(f"{key} is not a valid flag")
return self.get_bit(self._keys.index(key))
__getitem__ = __getattr__
@@ -189,7 +181,7 @@ def __setattr__(self, key, value):
if key.startswith("_"):
return object.__setattr__(self, key, value)
if key not in self._keys:
- raise AttributeError("%s is not a valid flag" % key)
+ raise AttributeError(f"{key} is not a valid flag")
self.set_bit(self._keys.index(key), value)
__setitem__ = __setattr__
diff --git a/src/sentry/__init__.py b/src/sentry/__init__.py
index 4aa18fec9b4cf4..823c56d8e4a183 100644
--- a/src/sentry/__init__.py
+++ b/src/sentry/__init__.py
@@ -36,9 +36,7 @@ def get_revision():
def get_version():
- if __build__:
- return f"{__version__}.{__build__}"
- return __version__
+ return f"{__version__}.{__build__}" if __build__ else __version__
def is_docker():
diff --git a/src/sentry/admin.py b/src/sentry/admin.py
index 9fef19aaa17e96..2e77d4961e082d 100644
--- a/src/sentry/admin.py
+++ b/src/sentry/admin.py
@@ -41,9 +41,7 @@ class OptionAdmin(admin.ModelAdmin):
search_fields = ("key",)
def value_repr(self, instance):
- return '{}'.format(
- escape(saferepr(instance.value))
- )
+ return f'{escape(saferepr(instance.value))}'
value_repr.short_description = "Value"
value_repr.allow_tags = True
@@ -209,9 +207,7 @@ class UserAdmin(admin.ModelAdmin):
inlines = (OrganizationUserInline, AuthIdentityInline)
def get_fieldsets(self, request, obj=None):
- if not obj:
- return self.add_fieldsets
- return super().get_fieldsets(request, obj)
+ return super().get_fieldsets(request, obj) if obj else self.add_fieldsets
def get_form(self, request, obj=None, **kwargs):
"""
@@ -219,10 +215,12 @@ def get_form(self, request, obj=None, **kwargs):
"""
defaults = {}
if obj is None:
- defaults.update(
- {"form": self.add_form, "fields": admin.util.flatten_fieldsets(self.add_fieldsets)}
- )
- defaults.update(kwargs)
+ defaults |= {
+ "form": self.add_form,
+ "fields": admin.util.flatten_fieldsets(self.add_fieldsets),
+ }
+
+ defaults |= kwargs
return super().get_form(request, obj, **defaults)
def get_urls(self):
diff --git a/src/sentry/analytics/base.py b/src/sentry/analytics/base.py
index 9c5babf0cbed4c..64a95c43726fc6 100644
--- a/src/sentry/analytics/base.py
+++ b/src/sentry/analytics/base.py
@@ -33,5 +33,3 @@ def record_event(self, event: Event) -> None:
def setup(self) -> None:
# Load default event types
import sentry.analytics.events # NOQA
-
- pass
diff --git a/src/sentry/analytics/map.py b/src/sentry/analytics/map.py
index 2208e1773a9aa1..54fdc710156374 100644
--- a/src/sentry/analytics/map.py
+++ b/src/sentry/analytics/map.py
@@ -31,9 +31,11 @@ def extract(self, value: dict[str, Any] | Any | None) -> Mapping[str, Any] | Non
# object in it, that object will not be copied.
items = value.copy()
else:
- new_value = {}
- for attr in self.attributes:
- new_value[attr.name] = attr.extract(getattr(value, attr.name, None))
+ new_value = {
+ attr.name: attr.extract(getattr(value, attr.name, None))
+ for attr in self.attributes
+ }
+
items = new_value
return get_data(self.attributes, items)
diff --git a/src/sentry/analytics/utils.py b/src/sentry/analytics/utils.py
index c1a1dbc163c4fa..415c632937239b 100644
--- a/src/sentry/analytics/utils.py
+++ b/src/sentry/analytics/utils.py
@@ -25,6 +25,6 @@ def get_data(attributes: Sequence[Attribute], items: dict[str, Any]) -> Mapping[
data[attr.name] = attr.extract(nv)
if items:
- raise ValueError("Unknown attributes: {}".format(", ".join(items.keys())))
+ raise ValueError(f'Unknown attributes: {", ".join(items.keys())}')
return data
diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py
index ea1d1d42273b42..227fbd8adedf8f 100644
--- a/src/sentry/api/base.py
+++ b/src/sentry/api/base.py
@@ -123,7 +123,7 @@ def build_cursor_link(self, request: Request, name, cursor):
if querystring is not None:
base_url = f"{base_url}?{querystring}"
else:
- base_url = base_url + "?"
+ base_url = f"{base_url}?"
return LINK_HEADER.format(
uri=base_url,
@@ -223,10 +223,7 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response:
try:
with sentry_sdk.start_span(op="base.dispatch.request", description=type(self).__name__):
if origin:
- if request.auth:
- allowed_origins = request.auth.get_allowed_origins()
- else:
- allowed_origins = None
+ allowed_origins = request.auth.get_allowed_origins() if request.auth else None
if not is_valid_origin(origin, allowed=allowed_origins):
response = Response(f"Invalid origin: {origin}", status=400)
self.response = self.finalize_response(request, response, *args, **kwargs)
@@ -417,10 +414,7 @@ def _parse_args(self, request: Request, environment_id=None):
try:
end = request.GET.get("until")
- if end:
- end = to_datetime(float(end))
- else:
- end = datetime.utcnow().replace(tzinfo=utc)
+ end = to_datetime(float(end)) if end else datetime.utcnow().replace(tzinfo=utc)
except ValueError:
raise ParseError(detail="until must be a numeric timestamp.")
diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py
index 6cea2b337f7e2f..dc774328973b2d 100644
--- a/src/sentry/api/bases/organization.py
+++ b/src/sentry/api/bases/organization.py
@@ -334,8 +334,7 @@ def get_filter_params(
"organization_id": organization.id,
}
- environments = self.get_environments(request, organization)
- if environments:
+ if environments := self.get_environments(request, organization):
params["environment"] = [env.name for env in environments]
params["environment_objects"] = environments
@@ -409,9 +408,9 @@ def has_release_permission(self, request: Request, organization, release):
has_perms = None
key = None
if getattr(request, "user", None) and request.user.id:
- actor_id = "user:%s" % request.user.id
+ actor_id = f"user:{request.user.id}"
if getattr(request, "auth", None) and request.auth.id:
- actor_id = "apikey:%s" % request.auth.id
+ actor_id = f"apikey:{request.auth.id}"
if actor_id is not None:
project_ids = sorted(self.get_requested_project_ids_unchecked(request))
key = "release_perms:1:%s" % hash_values(
diff --git a/src/sentry/api/bases/organization_events.py b/src/sentry/api/bases/organization_events.py
index d20c67ba7f70b8..a7ca31e190562c 100644
--- a/src/sentry/api/bases/organization_events.py
+++ b/src/sentry/api/bases/organization_events.py
@@ -38,7 +38,10 @@
def resolve_axis_column(column: str, index: int = 0) -> str:
return cast(
- str, get_function_alias(column) if not is_equation(column) else f"equation[{index}]"
+ str,
+ f"equation[{index}]"
+ if is_equation(column)
+ else get_function_alias(column),
)
@@ -202,11 +205,7 @@ def build_cursor_link(self, request: Request, name: str, cursor: Optional[Cursor
)
base_url = absolute_uri(urlquote(request.path))
- if querystring:
- base_url = f"{base_url}?{querystring}"
- else:
- base_url = base_url + "?"
-
+ base_url = f"{base_url}?{querystring}" if querystring else f"{base_url}?"
return cast(str, LINK_HEADER).format(
uri=base_url,
cursor=str(cursor),
@@ -263,7 +262,7 @@ def handle_data(
if "issue" in fields: # Look up the short ID and return that in the results
self.handle_issues(results, project_ids, organization)
- if not ("project.id" in first_row or "projectid" in first_row):
+ if "project.id" not in first_row and "projectid" not in first_row:
return results
for result in results:
@@ -362,25 +361,25 @@ def get_event_stats_data(
# that acts as a placeholder.
is_multiple_axis = len(query_columns) > 1
if top_events > 0 and isinstance(result, dict):
- results = {}
- for key, event_result in result.items():
- if is_multiple_axis:
- results[key] = self.serialize_multiple_axis(
- serializer,
- event_result,
- columns,
- query_columns,
- allow_partial_buckets,
- zerofill_results=zerofill_results,
- )
- else:
- # Need to get function alias if count is a field, but not the axis
- results[key] = serializer.serialize(
- event_result,
- column=resolve_axis_column(query_columns[0]),
- allow_partial_buckets=allow_partial_buckets,
- zerofill_results=zerofill_results,
- )
+ results = {
+ key: self.serialize_multiple_axis(
+ serializer,
+ event_result,
+ columns,
+ query_columns,
+ allow_partial_buckets,
+ zerofill_results=zerofill_results,
+ )
+ if is_multiple_axis
+ else serializer.serialize(
+ event_result,
+ column=resolve_axis_column(query_columns[0]),
+ allow_partial_buckets=allow_partial_buckets,
+ zerofill_results=zerofill_results,
+ )
+ for key, event_result in result.items()
+ }
+
serialized_result = results
elif is_multiple_axis:
serialized_result = self.serialize_multiple_axis(
@@ -394,9 +393,7 @@ def get_event_stats_data(
if top_events > 0 and isinstance(result, SnubaTSResult):
serialized_result = {"": serialized_result}
else:
- extra_columns = None
- if comparison_delta:
- extra_columns = ["comparisonCount"]
+ extra_columns = ["comparisonCount"] if comparison_delta else None
serialized_result = serializer.serialize(
result,
resolve_axis_column(query_columns[0]),
diff --git a/src/sentry/api/bases/organizationmember.py b/src/sentry/api/bases/organizationmember.py
index 4889df72165172..2422a502c54583 100644
--- a/src/sentry/api/bases/organizationmember.py
+++ b/src/sentry/api/bases/organizationmember.py
@@ -19,14 +19,10 @@ class MemberIdField(serializers.IntegerField):
"""
def to_internal_value(self, data):
- if data == "me":
- return data
- return super().to_internal_value(data)
+ return data if data == "me" else super().to_internal_value(data)
def run_validation(self, data):
- if data == "me":
- return data
- return super().run_validation(data)
+ return data if data == "me" else super().run_validation(data)
class MemberSerializer(serializers.Serializer):
diff --git a/src/sentry/api/bases/sentryapps.py b/src/sentry/api/bases/sentryapps.py
index 1b46df80a17307..599876d674d8df 100644
--- a/src/sentry/api/bases/sentryapps.py
+++ b/src/sentry/api/bases/sentryapps.py
@@ -119,9 +119,8 @@ def _get_organization(self, request: Request):
organization_slug = self._get_organization_slug(request)
if is_active_superuser(request):
return self._get_organization_for_superuser(organization_slug)
- else:
- user = request.user
- return self._get_organization_for_user(user, organization_slug)
+ user = request.user
+ return self._get_organization_for_user(user, organization_slug)
def convert_args(self, request: Request, *args, **kwargs):
"""
@@ -182,9 +181,11 @@ def has_object_permission(self, request: Request, view, sentry_app):
return True
# if app is unpublished, user must be in the Org who owns the app.
- if not sentry_app.is_published:
- if sentry_app.owner not in request.user.get_orgs():
- raise Http404
+ if (
+ not sentry_app.is_published
+ and sentry_app.owner not in request.user.get_orgs()
+ ):
+ raise Http404
# TODO(meredith): make a better way to allow for public
# endpoints. we can't use ensure_scoped_permission now
diff --git a/src/sentry/api/client.py b/src/sentry/api/client.py
index ca6c9a12da0f19..d42985eb31caae 100644
--- a/src/sentry/api/client.py
+++ b/src/sentry/api/client.py
@@ -36,11 +36,7 @@ def request(
is_superuser=None,
request=None,
):
- if self.prefix not in path:
- full_path = self.prefix + path
- else:
- full_path = path
-
+ full_path = self.prefix + path if self.prefix not in path else path
# we explicitly do not allow you to override the request *and* the user
# as then other checks like is_superuser would need overwritten
assert not (request and (user or auth)), "use either request or auth"
diff --git a/src/sentry/api/endpoints/accept_organization_invite.py b/src/sentry/api/endpoints/accept_organization_invite.py
index 773554b9892949..2ebc159fe270ff 100644
--- a/src/sentry/api/endpoints/accept_organization_invite.py
+++ b/src/sentry/api/endpoints/accept_organization_invite.py
@@ -61,10 +61,11 @@ def get(self, request: Request, member_id, token) -> Response:
# When SSO is required do *not* set a next_url to return to accept
# invite. The invite will be accepted after SSO is completed.
url = (
- reverse("sentry-accept-invite", args=[member_id, token])
- if not auth_provider
- else "/"
+ "/"
+ if auth_provider
+ else reverse("sentry-accept-invite", args=[member_id, token])
)
+
auth.initiate_login(self.request, next_url=url)
# If the org has SSO setup, we'll store the invite cookie to later
@@ -77,7 +78,7 @@ def get(self, request: Request, member_id, token) -> Response:
data["ssoProvider"] = provider.name
onboarding_steps = helper.get_onboarding_steps()
- data.update(onboarding_steps)
+ data |= onboarding_steps
if any(onboarding_steps.values()):
add_invite_cookie(request, response, member_id, token)
diff --git a/src/sentry/api/endpoints/api_application_details.py b/src/sentry/api/endpoints/api_application_details.py
index 08f506e213e983..95b3ae20b4314e 100644
--- a/src/sentry/api/endpoints/api_application_details.py
+++ b/src/sentry/api/endpoints/api_application_details.py
@@ -83,9 +83,8 @@ def delete(self, request: Request, app_id) -> Response:
raise ResourceDoesNotExist
with transaction.atomic():
- updated = ApiApplication.objects.filter(id=instance.id).update(
+ if updated := ApiApplication.objects.filter(id=instance.id).update(
status=ApiApplicationStatus.pending_deletion
- )
- if updated:
+ ):
ScheduledDeletion.schedule(instance, days=0, actor=request.user)
return Response(status=204)
diff --git a/src/sentry/api/endpoints/assistant.py b/src/sentry/api/endpoints/assistant.py
index 33f72d49fd25d8..3c23cadacd853d 100644
--- a/src/sentry/api/endpoints/assistant.py
+++ b/src/sentry/api/endpoints/assistant.py
@@ -35,7 +35,7 @@ def validate(self, attrs):
if guide_id:
return attrs
- if not guide and not guide_id:
+ if not guide:
raise serializers.ValidationError("Either assistant guide or guide_id is required")
guide_id = manager.get_guide_id(guide)
diff --git a/src/sentry/api/endpoints/auth_config.py b/src/sentry/api/endpoints/auth_config.py
index fc06ecf7cb95ff..ad9f3e97aed55f 100644
--- a/src/sentry/api/endpoints/auth_config.py
+++ b/src/sentry/api/endpoints/auth_config.py
@@ -79,6 +79,6 @@ def prepare_login_context(self, request: Request, *args, **kwargs):
if "session_expired" in request.COOKIES:
context["warning"] = WARN_SESSION_EXPIRED
- context.update(additional_context.run_callbacks(request))
+ context |= additional_context.run_callbacks(request)
return context
diff --git a/src/sentry/api/endpoints/auth_index.py b/src/sentry/api/endpoints/auth_index.py
index 9f77bf2fff84cf..287219153538c7 100644
--- a/src/sentry/api/endpoints/auth_index.py
+++ b/src/sentry/api/endpoints/auth_index.py
@@ -71,14 +71,11 @@ def _verify_user_via_inputs(validator, request):
"u2f_authentication.value_error",
extra={"user": request.user.id, "error_message": err},
)
- pass
except LookupError:
logger.warning(
"u2f_authentication.interface_not_enrolled",
extra={"validated_data": validator.validated_data, "user": request.user.id},
)
- pass
- # attempt password authentication
elif "password" in validator.validated_data:
authenticated = request.user.check_password(validator.validated_data["password"])
return authenticated
@@ -205,14 +202,14 @@ def put(self, request: Request):
return Response(status=status.HTTP_401_UNAUTHORIZED)
validator = AuthVerifyValidator(data=request.data)
- if not request.user.is_superuser:
- if not validator.is_valid():
- return self.respond(validator.errors, status=status.HTTP_400_BAD_REQUEST)
-
- authenticated = self._verify_user_via_inputs(validator, request)
- else:
+ if request.user.is_superuser:
authenticated = self._validate_superuser(validator, request)
+ elif not validator.is_valid():
+ return self.respond(validator.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ else:
+ authenticated = self._verify_user_via_inputs(validator, request)
if not authenticated:
return Response({"detail": {"code": "ignore"}}, status=status.HTTP_403_FORBIDDEN)
diff --git a/src/sentry/api/endpoints/auth_login.py b/src/sentry/api/endpoints/auth_login.py
index e9f16d394bc4ec..cfb62fb9d02dab 100644
--- a/src/sentry/api/endpoints/auth_login.py
+++ b/src/sentry/api/endpoints/auth_login.py
@@ -22,16 +22,11 @@ def post(self, request: Request, organization=None, *args, **kwargs) -> Response
"""
login_form = AuthenticationForm(request, request.data)
- # Rate limit logins
- is_limited = ratelimiter.is_limited(
- "auth:login:username:{}".format(
- md5_text(login_form.clean_username(request.data.get("username"))).hexdigest()
- ),
+ if is_limited := ratelimiter.is_limited(
+ f'auth:login:username:{md5_text(login_form.clean_username(request.data.get("username"))).hexdigest()}',
limit=10,
- window=60, # 10 per minute should be enough for anyone
- )
-
- if is_limited:
+ window=60,
+ ):
errors = {"__all__": [login_form.error_messages["rate_limited"]]}
metrics.incr(
"login.attempt", instance="rate_limited", skip_internal=True, sample_rate=1.0
diff --git a/src/sentry/api/endpoints/broadcast_index.py b/src/sentry/api/endpoints/broadcast_index.py
index 8487f235431430..85e2ee17127b87 100644
--- a/src/sentry/api/endpoints/broadcast_index.py
+++ b/src/sentry/api/endpoints/broadcast_index.py
@@ -53,8 +53,7 @@ def get(self, request: Request, organization=None) -> Response:
Q(date_expires__isnull=True) | Q(date_expires__gt=timezone.now()), is_active=True
).order_by("-date_added")
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
tokens = tokenize_query(query)
for key, value in tokens.items():
if key == "query":
@@ -91,13 +90,8 @@ def get(self, request: Request, organization=None) -> Response:
return self.respond(self._serialize_objects(data, request))
sort_by = request.GET.get("sortBy")
- if sort_by == "expires":
- order_by = "-date_expires"
- paginator_cls = DateTimePaginator
- else:
- order_by = "-date_added"
- paginator_cls = DateTimePaginator
-
+ order_by = "-date_expires" if sort_by == "expires" else "-date_added"
+ paginator_cls = DateTimePaginator
return self.paginate(
request=request,
queryset=queryset,
diff --git a/src/sentry/api/endpoints/chunk.py b/src/sentry/api/endpoints/chunk.py
index 6d78668b28f4bd..741cd6fae4452a 100644
--- a/src/sentry/api/endpoints/chunk.py
+++ b/src/sentry/api/endpoints/chunk.py
@@ -18,7 +18,7 @@
MAX_CHUNKS_PER_REQUEST = 64
MAX_REQUEST_SIZE = 32 * 1024 * 1024
-MAX_CONCURRENCY = settings.DEBUG and 1 or 8
+MAX_CONCURRENCY = 1 if settings.DEBUG else 8
HASH_ALGORITHM = "sha1"
SENTRYCLI_SEMVER_RE = re.compile(r"^sentry-cli\/(?P\d+)\.(?P\d+)\.(?P\d+).*$")
API_PREFIX = "/api/0"
@@ -110,7 +110,7 @@ def post(self, request: Request, organization) -> Response:
files = request.data.getlist("file")
files += [GzipChunk(chunk) for chunk in request.data.getlist("file_gzip")]
- if len(files) == 0:
+ if not files:
# No files uploaded is ok
logger.info("chunkupload.end", extra={"status": status.HTTP_200_OK})
return Response(status=status.HTTP_200_OK)
diff --git a/src/sentry/api/endpoints/client_state.py b/src/sentry/api/endpoints/client_state.py
index b0f3eee5cba02b..33cccdfbd3fb83 100644
--- a/src/sentry/api/endpoints/client_state.py
+++ b/src/sentry/api/endpoints/client_state.py
@@ -23,8 +23,7 @@ def get(self, request: Request, organization) -> Response:
result = {}
for category in STATE_CATEGORIES:
key = get_client_state_key(organization.slug, category, request.user)
- value = self.client.get(key)
- if value:
+ if value := self.client.get(key):
result[category] = json.loads(value)
return Response(result)
@@ -48,8 +47,7 @@ def convert_args(self, request: Request, organization_slug, *args, **kwargs):
return (args, kwargs)
def get(self, request: Request, organization, category, key) -> Response:
- value = self.client.get(key)
- if value:
+ if value := self.client.get(key):
response = HttpResponse(value)
response["Content-Type"] = "application/json"
return response
diff --git a/src/sentry/api/endpoints/codeowners/__init__.py b/src/sentry/api/endpoints/codeowners/__init__.py
index 33240b19fb5a9f..6b9225c2078d7a 100644
--- a/src/sentry/api/endpoints/codeowners/__init__.py
+++ b/src/sentry/api/endpoints/codeowners/__init__.py
@@ -114,7 +114,7 @@ def has_feature(self, request: Request, project: Project) -> bool:
)
def track_response_code(self, type: str, status: int | str) -> None:
- if type in ["create", "update"]:
+ if type in {"create", "update"}:
metrics.incr(
f"codeowners.{type}.http_response",
sample_rate=1.0,
diff --git a/src/sentry/api/endpoints/debug_files.py b/src/sentry/api/endpoints/debug_files.py
index 189ed4186b7f40..b2cba6f3ac9bb5 100644
--- a/src/sentry/api/endpoints/debug_files.py
+++ b/src/sentry/api/endpoints/debug_files.py
@@ -83,12 +83,11 @@ class DebugFilesEndpoint(ProjectEndpoint):
permission_classes = (ProjectReleasePermission,)
def download(self, debug_file_id, project):
- rate_limited = ratelimits.is_limited(
+ if rate_limited := ratelimits.is_limited(
project=project,
key=f"rl:DSymFilesEndpoint:download:{debug_file_id}:{project.id}",
limit=10,
- )
- if rate_limited:
+ ):
logger.info(
"notification.rate_limited",
extra={"project_id": project.id, "project_debug_file_id": debug_file_id},
@@ -106,10 +105,10 @@ def download(self, debug_file_id, project):
iter(lambda: fp.read(4096), b""), content_type="application/octet-stream"
)
response["Content-Length"] = debug_file.file.size
- response["Content-Disposition"] = 'attachment; filename="{}{}"'.format(
- posixpath.basename(debug_file.debug_id),
- debug_file.file_extension,
- )
+ response[
+ "Content-Disposition"
+ ] = f'attachment; filename="{posixpath.basename(debug_file.debug_id)}{debug_file.file_extension}"'
+
return response
except OSError:
raise Http404
@@ -131,10 +130,11 @@ def get(self, request: Request, project) -> Response:
:auth: required
"""
download_requested = request.GET.get("id") is not None
- if download_requested and (has_download_permission(request, project)):
- return self.download(request.GET.get("id"), project)
- elif download_requested:
- return Response(status=403)
+ if download_requested:
+ if has_download_permission(request, project):
+ return self.download(request.GET.get("id"), project)
+ else:
+ return Response(status=403)
code_id = request.GET.get("code_id")
debug_id = request.GET.get("debug_id")
@@ -166,16 +166,14 @@ def get(self, request: Request, project) -> Response:
| Q(file__headers__icontains=query)
)
- known_file_format = DIF_MIMETYPES.get(query)
- if known_file_format:
+ if known_file_format := DIF_MIMETYPES.get(query):
q |= Q(file__headers__icontains=known_file_format)
else:
q = Q()
file_format_q = Q()
for file_format in file_formats:
- known_file_format = DIF_MIMETYPES.get(file_format)
- if known_file_format:
+ if known_file_format := DIF_MIMETYPES.get(file_format):
file_format_q |= Q(file__headers__icontains=known_file_format)
q &= file_format_q
@@ -357,9 +355,7 @@ def post(self, request: Request, project) -> Response:
file_response[checksum] = {"state": ChunkFileState.NOT_FOUND, "missingChunks": []}
continue
- # Check if all requested chunks have been uploaded.
- missing_chunks = find_missing_chunks(project.organization, chunks)
- if missing_chunks:
+ if missing_chunks := find_missing_chunks(project.organization, chunks):
file_response[checksum] = {
"state": ChunkFileState.NOT_FOUND,
"missingChunks": missing_chunks,
@@ -461,9 +457,7 @@ def delete(self, request: Request, project) -> Response:
:auth: required
"""
- archive_name = request.GET.get("name")
-
- if archive_name:
+ if archive_name := request.GET.get("name"):
with atomic_transaction(using=router.db_for_write(ReleaseFile)):
release = Release.objects.get(
organization_id=project.organization_id, projects=project, version=archive_name
diff --git a/src/sentry/api/endpoints/event_apple_crash_report.py b/src/sentry/api/endpoints/event_apple_crash_report.py
index 64e433e3182663..2587c6794d78ce 100644
--- a/src/sentry/api/endpoints/event_apple_crash_report.py
+++ b/src/sentry/api/endpoints/event_apple_crash_report.py
@@ -42,7 +42,7 @@ def get(self, request: Request, project, event_id) -> Response:
response = HttpResponse(apple_crash_report_string, content_type="text/plain")
if request.GET.get("download") is not None:
- filename = "{}{}.crash".format(event.event_id, symbolicated and "-symbolicated" or "")
+ filename = f'{event.event_id}{symbolicated and "-symbolicated" or ""}.crash'
response = StreamingHttpResponse(apple_crash_report_string, content_type="text/plain")
response["Content-Length"] = len(apple_crash_report_string)
response["Content-Disposition"] = 'attachment; filename="%s"' % filename
diff --git a/src/sentry/api/endpoints/event_attachments.py b/src/sentry/api/endpoints/event_attachments.py
index 51eed0bd61b9db..0c6fb5cb7f02c0 100644
--- a/src/sentry/api/endpoints/event_attachments.py
+++ b/src/sentry/api/endpoints/event_attachments.py
@@ -33,8 +33,7 @@ def get(self, request: Request, project, event_id) -> Response:
queryset = EventAttachment.objects.filter(project_id=project.id, event_id=event.event_id)
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
tokens = tokenize_query(query)
for key, value in tokens.items():
if key == "query":
diff --git a/src/sentry/api/endpoints/filechange.py b/src/sentry/api/endpoints/filechange.py
index 821fb9b859d117..6a98c5d13175bd 100644
--- a/src/sentry/api/endpoints/filechange.py
+++ b/src/sentry/api/endpoints/filechange.py
@@ -41,9 +41,7 @@ def get(self, request: Request, organization, version) -> Response:
)
)
- repo_name = request.query_params.get("repo_name")
-
- if repo_name:
+ if repo_name := request.query_params.get("repo_name"):
try:
repo = Repository.objects.get(organization_id=organization.id, name=repo_name)
queryset = queryset.filter(commit__repository_id=repo.id)
diff --git a/src/sentry/api/endpoints/group_attachments.py b/src/sentry/api/endpoints/group_attachments.py
index 939c2a8ec87a68..8b8c184ee6c409 100644
--- a/src/sentry/api/endpoints/group_attachments.py
+++ b/src/sentry/api/endpoints/group_attachments.py
@@ -41,8 +41,7 @@ def get(self, request: Request, group) -> Response:
attachments = EventAttachment.objects.filter(group_id=group.id)
- types = request.GET.getlist("types") or ()
- if types:
+ if types := request.GET.getlist("types") or ():
attachments = attachments.filter(type__in=types)
return self.paginate(
diff --git a/src/sentry/api/endpoints/group_details.py b/src/sentry/api/endpoints/group_details.py
index c505199c283ccf..b2eb1f8ae6d18e 100644
--- a/src/sentry/api/endpoints/group_details.py
+++ b/src/sentry/api/endpoints/group_details.py
@@ -91,9 +91,9 @@ def _get_available_issue_plugins(self, request: Request, group):
plugin_issues = []
for plugin in plugins.for_project(project, version=1):
- if isinstance(plugin, IssueTrackingPlugin2):
- if is_plugin_deprecated(plugin, project):
- continue
+ if isinstance(
+ plugin, IssueTrackingPlugin2
+ ) and not is_plugin_deprecated(plugin, project):
plugin_issues = safe_execute(
plugin.plugin_issues, request, group, plugin_issues, _with_transaction=False
)
@@ -160,12 +160,13 @@ def get(self, request: Request, group) -> Response:
tags = tagstore.get_group_tag_keys(
group.project_id, group.id, environment_ids, limit=100
)
- if not environment_ids:
- user_reports = UserReport.objects.filter(group_id=group.id)
- else:
- user_reports = UserReport.objects.filter(
+ user_reports = (
+ UserReport.objects.filter(
group_id=group.id, environment_id__in=environment_ids
)
+ if environment_ids
+ else UserReport.objects.filter(group_id=group.id)
+ )
now = timezone.now()
hourly_stats = tsdb.rollup(
diff --git a/src/sentry/api/endpoints/group_events.py b/src/sentry/api/endpoints/group_events.py
index e682a2ec796b10..e69f227250a4a8 100644
--- a/src/sentry/api/endpoints/group_events.py
+++ b/src/sentry/api/endpoints/group_events.py
@@ -69,11 +69,13 @@ def _get_events_snuba(self, request: Request, group, environments, query, tags,
"group_ids": [group.id],
"project_id": [group.project_id],
"organization_id": group.project.organization_id,
- "start": start if start else default_start,
- "end": end if end else default_end,
+ "start": start or default_start,
+ "end": end or default_end,
}
- direct_hit_resp = get_direct_hit_response(request, query, params, "api.group-events")
- if direct_hit_resp:
+
+ if direct_hit_resp := get_direct_hit_response(
+ request, query, params, "api.group-events"
+ ):
return direct_hit_resp
if environments:
@@ -96,9 +98,7 @@ def _get_events_snuba(self, request: Request, group, environments, query, tags,
)
def _get_search_query_and_tags(self, request: Request, group, environments=None):
- raw_query = request.GET.get("query")
-
- if raw_query:
+ if raw_query := request.GET.get("query"):
query_kwargs = parse_query([group.project], raw_query, request.user, environments)
query = query_kwargs.pop("query", None)
tags = query_kwargs.pop("tags", {})
diff --git a/src/sentry/api/endpoints/group_similar_issues.py b/src/sentry/api/endpoints/group_similar_issues.py
index 678d8860d0c103..27067fadf824c4 100644
--- a/src/sentry/api/endpoints/group_similar_issues.py
+++ b/src/sentry/api/endpoints/group_similar_issues.py
@@ -13,9 +13,7 @@
def _fix_label(label):
- if isinstance(label, tuple):
- return ":".join(label)
- return label
+ return ":".join(label) if isinstance(label, tuple) else label
class GroupSimilarIssuesEndpoint(GroupEndpoint):
diff --git a/src/sentry/api/endpoints/group_tags.py b/src/sentry/api/endpoints/group_tags.py
index 0f1b4dc9821475..ff141bb61802e4 100644
--- a/src/sentry/api/endpoints/group_tags.py
+++ b/src/sentry/api/endpoints/group_tags.py
@@ -17,11 +17,7 @@ def get(self, request: Request, group) -> Response:
# There are 2 use-cases for this method. For the 'Tags' tab we
# get the top 10 values, for the tag distribution bars we get 9
# This should ideally just be specified by the client
- if keys:
- value_limit = 9
- else:
- value_limit = 10
-
+ value_limit = 9 if keys else 10
environment_ids = [e.id for e in get_environments(request, group.project.organization)]
tag_keys = tagstore.get_group_tag_keys_and_top_values(
diff --git a/src/sentry/api/endpoints/index.py b/src/sentry/api/endpoints/index.py
index 41bd3c1ccc59ff..a99980cd37102a 100644
--- a/src/sentry/api/endpoints/index.py
+++ b/src/sentry/api/endpoints/index.py
@@ -15,10 +15,6 @@ def get(self, request: Request) -> Response:
else:
user = None
- if request.auth:
- auth = {"scopes": request.auth.get_scopes()}
- else:
- auth = None
-
+ auth = {"scopes": request.auth.get_scopes()} if request.auth else None
context = {"version": "0", "auth": auth, "user": user}
return Response(context, status=200)
diff --git a/src/sentry/api/endpoints/integrations/index.py b/src/sentry/api/endpoints/integrations/index.py
index b4e18614fa82f4..9fd4c0273ebecd 100644
--- a/src/sentry/api/endpoints/integrations/index.py
+++ b/src/sentry/api/endpoints/integrations/index.py
@@ -14,7 +14,7 @@ def is_provider_enabled(provider):
if not provider.requires_feature_flag:
return True
provider_key = provider.key.replace("_", "-")
- feature_flag_name = "organizations:integrations-%s" % provider_key
+ feature_flag_name = f"organizations:integrations-{provider_key}"
return features.has(feature_flag_name, organization, actor=request.user)
providers = filter(is_provider_enabled, list(integrations.all()))
diff --git a/src/sentry/api/endpoints/integrations/install_request.py b/src/sentry/api/endpoints/integrations/install_request.py
index 1098599e0dafae..cd1af190e53d8e 100644
--- a/src/sentry/api/endpoints/integrations/install_request.py
+++ b/src/sentry/api/endpoints/integrations/install_request.py
@@ -32,8 +32,7 @@ def get_provider_name(provider_type: str, provider_slug: str) -> str | None:
if plugins.exists(provider_slug):
return plugins.get(provider_slug).title
elif provider_type == "sentry_app":
- sentry_app = SentryApp.objects.filter(slug=provider_slug).first()
- if sentry_app:
+ if sentry_app := SentryApp.objects.filter(slug=provider_slug).first():
return sentry_app.name
return None
diff --git a/src/sentry/api/endpoints/integrations/organization_integrations/details.py b/src/sentry/api/endpoints/integrations/organization_integrations/details.py
index a1ef7eb2256130..77f6e2e1e1a7db 100644
--- a/src/sentry/api/endpoints/integrations/organization_integrations/details.py
+++ b/src/sentry/api/endpoints/integrations/organization_integrations/details.py
@@ -75,11 +75,9 @@ def delete(self, request: Request, organization, integration_id) -> Response:
integration.get_installation(organization.id).uninstall()
with transaction.atomic():
- updated = OrganizationIntegration.objects.filter(
+ if updated := OrganizationIntegration.objects.filter(
id=org_integration.id, status=ObjectStatus.VISIBLE
- ).update(status=ObjectStatus.PENDING_DELETION)
-
- if updated:
+ ).update(status=ObjectStatus.PENDING_DELETION):
ScheduledDeletion.schedule(org_integration, days=0, actor=request.user)
create_audit_entry(
request=request,
diff --git a/src/sentry/api/endpoints/integrations/plugins/configs_index.py b/src/sentry/api/endpoints/integrations/plugins/configs_index.py
index 790a6cd1df0fc8..3a440c5543004a 100644
--- a/src/sentry/api/endpoints/integrations/plugins/configs_index.py
+++ b/src/sentry/api/endpoints/integrations/plugins/configs_index.py
@@ -29,7 +29,7 @@ def get(self, request: Request, organization) -> Response:
try:
desired_plugins.append(plugins.get(slug))
except KeyError:
- return Response({"detail": "Plugin %s not found" % slug}, status=404)
+ return Response({"detail": f"Plugin {slug} not found"}, status=404)
# if no plugins were specified, grab all plugins but limit by those that have the ability to be configured
if not desired_plugins:
@@ -40,7 +40,7 @@ def get(self, request: Request, organization) -> Response:
# plugin to work (ex:`opsgenie:api_key`)
keys_to_check = []
for plugin in desired_plugins:
- keys_to_check.append("%s:enabled" % plugin.slug)
+ keys_to_check.append(f"{plugin.slug}:enabled")
if plugin.required_field:
keys_to_check.append(f"{plugin.slug}:{plugin.required_field}")
diff --git a/src/sentry/api/endpoints/integrations/plugins/index.py b/src/sentry/api/endpoints/integrations/plugins/index.py
index 56327702846472..2bd90cd0b01788 100644
--- a/src/sentry/api/endpoints/integrations/plugins/index.py
+++ b/src/sentry/api/endpoints/integrations/plugins/index.py
@@ -16,9 +16,12 @@ def get(self, request: Request, organization) -> Response:
if "plugins" in request.GET:
if request.GET.get("plugins") == "_all":
return Response(
- serialize([p for p in plugins.all()], request.user, PluginSerializer())
+ serialize(
+ list(plugins.all()), request.user, PluginSerializer()
+ )
)
+
desired_plugins = set(request.GET.getlist("plugins"))
else:
desired_plugins = set(all_plugins.keys())
@@ -29,19 +32,19 @@ def get(self, request: Request, organization) -> Response:
# Each tuple represents an enabled Plugin (of only the ones we care
# about) and its corresponding Project.
enabled_plugins = ProjectOption.objects.filter(
- key__in=["%s:enabled" % slug for slug in desired_plugins],
+ key__in=[f"{slug}:enabled" for slug in desired_plugins],
project__organization=organization,
).select_related("project")
- resources = []
- for project_option in enabled_plugins:
- resources.append(
- serialize(
- all_plugins[project_option.key.split(":")[0]],
- request.user,
- OrganizationPluginSerializer(project_option.project),
- )
+ resources = [
+ serialize(
+ all_plugins[project_option.key.split(":")[0]],
+ request.user,
+ OrganizationPluginSerializer(project_option.project),
)
+ for project_option in enabled_plugins
+ ]
+
return Response(resources)
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/details.py b/src/sentry/api/endpoints/integrations/sentry_apps/details.py
index c3ccebe1affa97..b5ac939bab175e 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/details.py
+++ b/src/sentry/api/endpoints/integrations/sentry_apps/details.py
@@ -63,8 +63,8 @@ def put(self, request: Request, sentry_app) -> Response:
# log any errors with schema
if "schema" in serializer.errors:
+ name = "sentry_app.schema_validation_error"
for error_message in serializer.errors["schema"]:
- name = "sentry_app.schema_validation_error"
log_info = {
"schema": json.dumps(request.data["schema"]),
"user_id": request.user.id,
@@ -86,7 +86,8 @@ def delete(self, request: Request, sentry_app) -> Response:
return Response({"detail": ["Published apps cannot be removed."]}, status=403)
def _has_hook_events(self, request: Request):
- if not request.json_body.get("events"):
- return False
-
- return "error" in request.json_body["events"]
+ return (
+ "error" in request.json_body["events"]
+ if request.json_body.get("events")
+ else False
+ )
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/index.py b/src/sentry/api/endpoints/integrations/sentry_apps/index.py
index 1eadaab88304b8..b3d1a105dde437 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/index.py
+++ b/src/sentry/api/endpoints/integrations/sentry_apps/index.py
@@ -22,7 +22,13 @@ class SentryAppsEndpoint(SentryAppsBaseEndpoint):
def get(self, request: Request) -> Response:
status = request.GET.get("status")
- if status == "published":
+ if (
+ status == "published"
+ or status != "published"
+ and status != "unpublished"
+ and status != "internal"
+ and not is_active_superuser(request)
+ ):
queryset = SentryApp.objects.filter(status=SentryAppStatus.PUBLISHED)
elif status == "unpublished":
@@ -34,11 +40,7 @@ def get(self, request: Request) -> Response:
if not is_active_superuser(request):
queryset = queryset.filter(owner__in=request.user.get_orgs())
else:
- if is_active_superuser(request):
- queryset = SentryApp.objects.all()
- else:
- queryset = SentryApp.objects.filter(status=SentryAppStatus.PUBLISHED)
-
+ queryset = SentryApp.objects.all()
return self.paginate(
request=request,
queryset=queryset,
@@ -102,8 +104,8 @@ def post(self, request: Request, organization) -> Response:
# log any errors with schema
if "schema" in serializer.errors:
+ name = "sentry_app.schema_validation_error"
for error_message in serializer.errors["schema"]:
- name = "sentry_app.schema_validation_error"
log_info = {
"schema": json.dumps(data["schema"]),
"user_id": request.user.id,
@@ -116,7 +118,8 @@ def post(self, request: Request, organization) -> Response:
return Response(serializer.errors, status=400)
def _has_hook_events(self, request: Request):
- if not request.json_body.get("events"):
- return False
-
- return "error" in request.json_body["events"]
+ return (
+ "error" in request.json_body["events"]
+ if request.json_body.get("events")
+ else False
+ )
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_requests.py b/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_requests.py
index 7e52f64a5479a9..9803b459c6701b 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_requests.py
+++ b/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_requests.py
@@ -23,7 +23,7 @@ def get(self, request: Request, installation) -> Response:
}
if project:
- kwargs.update({"project": project})
+ kwargs["project"] = project
try:
choices = external_requests.SelectRequester.run(**kwargs)
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/publish_request.py b/src/sentry/api/endpoints/integrations/sentry_apps/publish_request.py
index 0978dc0be059a0..6b3e8b700bd903 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/publish_request.py
+++ b/src/sentry/api/endpoints/integrations/sentry_apps/publish_request.py
@@ -56,7 +56,10 @@ def post(self, request: Request, sentry_app) -> Response:
for question_pair in request.data.get("questionnaire"):
message += "\n\n>{}\n{}".format(question_pair["question"], question_pair["answer"])
- subject = "Sentry Integration Publication Request from %s" % sentry_app.owner.slug
+ subject = (
+ f"Sentry Integration Publication Request from {sentry_app.owner.slug}"
+ )
+
email.send_mail(
subject,
diff --git a/src/sentry/api/endpoints/internal/mail.py b/src/sentry/api/endpoints/internal/mail.py
index f9bdb1d1d17d05..29e5c942a4b11d 100644
--- a/src/sentry/api/endpoints/internal/mail.py
+++ b/src/sentry/api/endpoints/internal/mail.py
@@ -33,12 +33,13 @@ def post(self, request: Request) -> Response:
)
try:
send_mail(
- "{} Test Email".format(options.get("mail.subject-prefix")),
+ f'{options.get("mail.subject-prefix")} Test Email',
body,
options.get("mail.from"),
[request.user.email],
fail_silently=False,
)
+
except Exception as e:
error = str(e)
diff --git a/src/sentry/api/endpoints/organization_auth_providers.py b/src/sentry/api/endpoints/organization_auth_providers.py
index aa75ddb02d3283..4509c273d4074f 100644
--- a/src/sentry/api/endpoints/organization_auth_providers.py
+++ b/src/sentry/api/endpoints/organization_auth_providers.py
@@ -17,8 +17,9 @@ def get(self, request: Request, organization) -> Response:
:pparam string organization_slug: the organization short name
:auth: required
"""
- provider_list = []
- for k, v in manager:
- provider_list.append({"key": k, "name": v.name, "requiredFeature": v.required_feature})
+ provider_list = [
+ {"key": k, "name": v.name, "requiredFeature": v.required_feature}
+ for k, v in manager
+ ]
return Response(serialize(provider_list, request.user))
diff --git a/src/sentry/api/endpoints/organization_codeowners_associations.py b/src/sentry/api/endpoints/organization_codeowners_associations.py
index e8c26ea15eb6b8..0c98f786db716e 100644
--- a/src/sentry/api/endpoints/organization_codeowners_associations.py
+++ b/src/sentry/api/endpoints/organization_codeowners_associations.py
@@ -24,8 +24,7 @@ def get(self, request: Request, organization: Organization):
status=ObjectStatus.VISIBLE,
)
project_code_owners = ProjectCodeOwners.objects.filter(project__in=projects)
- provider = request.GET.get("provider")
- if provider:
+ if provider := request.GET.get("provider"):
project_code_owners = project_code_owners.filter(
repository_project_path_config__organization_integration__integration__provider=provider
)
diff --git a/src/sentry/api/endpoints/organization_dashboard_details.py b/src/sentry/api/endpoints/organization_dashboard_details.py
index eeb5569c45851f..d9172352ba57cb 100644
--- a/src/sentry/api/endpoints/organization_dashboard_details.py
+++ b/src/sentry/api/endpoints/organization_dashboard_details.py
@@ -76,18 +76,18 @@ def delete(self, request: Request, organization, dashboard) -> Response:
num_dashboards = Dashboard.objects.filter(organization=organization).count()
num_tombstones = DashboardTombstone.objects.filter(organization=organization).count()
- if isinstance(dashboard, dict):
- if num_dashboards > 0:
- DashboardTombstone.objects.get_or_create(
- organization=organization, slug=dashboard["id"]
- )
- else:
- return self.respond({"Cannot delete last Dashboard."}, status=409)
- elif (num_dashboards > 1) or (num_tombstones == 0):
- dashboard.delete()
- else:
+ if isinstance(dashboard, dict) and num_dashboards > 0:
+ DashboardTombstone.objects.get_or_create(
+ organization=organization, slug=dashboard["id"]
+ )
+ elif (
+ isinstance(dashboard, dict)
+ or num_dashboards <= 1
+ and num_tombstones != 0
+ ):
return self.respond({"Cannot delete last Dashboard."}, status=409)
-
+ else:
+ dashboard.delete()
return self.respond(status=204)
def put(self, request: Request, organization, dashboard) -> Response:
diff --git a/src/sentry/api/endpoints/organization_dashboards.py b/src/sentry/api/endpoints/organization_dashboards.py
index 883f133e26c3df..e232efd0ead53c 100644
--- a/src/sentry/api/endpoints/organization_dashboards.py
+++ b/src/sentry/api/endpoints/organization_dashboards.py
@@ -161,11 +161,9 @@ def post(self, request: Request, organization, retry=0) -> Response:
return Response("Dashboard title already taken", status=409)
title = request.data["title"]
- match = re.match(DUPLICATE_TITLE_PATTERN, title)
- if match:
- partial_title = match.group(1)
- copy_counter = match.group(2)
- if copy_counter:
+ if match := re.match(DUPLICATE_TITLE_PATTERN, title):
+ partial_title = match[1]
+ if copy_counter := match[2]:
request.data["title"] = f"{partial_title} copy {int(copy_counter) + 1}"
else:
request.data["title"] = f"{partial_title} copy 1"
diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py
index 453e67ebb0cf5f..7606a8c64656a7 100644
--- a/src/sentry/api/endpoints/organization_details.py
+++ b/src/sentry/api/endpoints/organization_details.py
@@ -250,9 +250,9 @@ def validate_trustedRelays(self, value):
"Organization does not have the relay feature enabled"
)
- # make sure we don't have multiple instances of one public key
- public_keys = set()
if value is not None:
+ # make sure we don't have multiple instances of one public key
+ public_keys = set()
for key_info in value:
key = key_info.get("public_key")
if key in public_keys:
@@ -323,19 +323,18 @@ def save_trusted_relays(self, incoming, changed_data, organization):
modified = True
if modified:
- # we have some modifications create a log message
- if existing is not None:
- # generate an update log message
- changed_data["trustedRelays"] = f"from {existing} to {incoming}"
- existing.value = incoming
- existing.save()
- else:
+ if existing is None:
# first time we set trusted relays, generate a create log message
changed_data["trustedRelays"] = f"to {incoming}"
OrganizationOption.objects.set_value(
organization=organization, key=option_key, value=incoming
)
+ else:
+ # generate an update log message
+ changed_data["trustedRelays"] = f"from {existing} to {incoming}"
+ existing.value = incoming
+ existing.save()
return incoming
def save(self):
@@ -408,16 +407,15 @@ def save(self):
# check if fields changed
for f, v in org_tracked_field.items():
- if f != "flag_field":
- if has_changed(org, f):
- old_val = old_value(org, f)
- changed_data[f] = f"from {old_val} to {v}"
- else:
+ if f == "flag_field":
# check if flag fields changed
for f, v in org_tracked_field["flag_field"].items():
if flag_has_changed(org, f):
changed_data[f] = f"to {v}"
+ elif has_changed(org, f):
+ old_val = old_value(org, f)
+ changed_data[f] = f"from {old_val} to {v}"
org.save()
if "avatar" in data or "avatarType" in data:
@@ -549,10 +547,9 @@ def handle_delete(self, request: Request, organization):
return self.respond({"detail": ERR_DEFAULT_ORG}, status=400)
with transaction.atomic():
- updated = Organization.objects.filter(
+ if updated := Organization.objects.filter(
id=organization.id, status=OrganizationStatus.VISIBLE
- ).update(status=OrganizationStatus.PENDING_DELETION)
- if updated:
+ ).update(status=OrganizationStatus.PENDING_DELETION):
organization.status = OrganizationStatus.PENDING_DELETION
schedule = ScheduledDeletion.schedule(organization, days=1, actor=request.user)
entry = self.create_audit_entry(
diff --git a/src/sentry/api/endpoints/organization_events_facets_performance.py b/src/sentry/api/endpoints/organization_events_facets_performance.py
index 32f6fd58874628..b44c7b57215886 100644
--- a/src/sentry/api/endpoints/organization_events_facets_performance.py
+++ b/src/sentry/api/endpoints/organization_events_facets_performance.py
@@ -174,7 +174,7 @@ def data_fn(offset, limit, raw_limit):
return {"tags": [], "histogram": {"data": []}}
# Only pass exactly the number of tags so histogram fetches correct number of rows
- histogram_top_tags = top_tags[0:raw_limit]
+ histogram_top_tags = top_tags[:raw_limit]
histogram = query_facet_performance_key_histogram(
top_tags=histogram_top_tags,
@@ -223,13 +223,12 @@ def get_result(self, limit, cursor=None):
# Use raw_limit for the histogram itself so bucket calculations are correct
data = self.data_fn(offset=offset, limit=limit + 1, raw_limit=limit)
- if isinstance(data["tags"], list):
- has_more = len(data["tags"]) == limit + 1
- if has_more:
- data["tags"].pop()
- else:
+ if not isinstance(data["tags"], list):
raise NotImplementedError
+ has_more = len(data["tags"]) == limit + 1
+ if has_more:
+ data["tags"].pop()
return CursorResult(
data,
prev=Cursor(0, max(0, offset - limit), True, offset > 0),
@@ -373,13 +372,13 @@ def query_facet_performance(
dynamic_sample_rate = 0 if transaction_count <= 0 else (target_sample / transaction_count)
sample_rate = min(max(dynamic_sample_rate, 0), 1) if sampling_enabled else None
- frequency_sample_rate = sample_rate if sample_rate else 1
+ frequency_sample_rate = sample_rate or 1
tag_key_limit = limit if tag_key else 1
with sentry_sdk.start_span(
- op="discover.discover", description="facets.filter_transform"
- ) as span:
+ op="discover.discover", description="facets.filter_transform"
+ ) as span:
span.set_data("query", filter_query)
tag_query = QueryBuilder(
dataset=Dataset.Discover,
@@ -389,8 +388,9 @@ def query_facet_performance(
sample_rate=sample_rate,
turbo=sample_rate is not None,
limit=limit,
- limitby=["tags_key", tag_key_limit] if not tag_key else None,
+ limitby=None if tag_key else ["tags_key", tag_key_limit],
)
+
translated_aggregate_column = tag_query.resolve_column(aggregate_column)
# Aggregate (avg) and count of all transactions for this query
@@ -472,7 +472,7 @@ def query_facet_performance_key_histogram(
tag_values = [x["tags_value"] for x in top_tags]
- results = discover.histogram_query(
+ return discover.histogram_query(
fields=[
aggregate_column,
],
@@ -489,4 +489,3 @@ def query_facet_performance_key_histogram(
referrer="api.organization-events-facets-performance-histogram",
normalize_results=False,
)
- return results
diff --git a/src/sentry/api/endpoints/organization_events_histogram.py b/src/sentry/api/endpoints/organization_events_histogram.py
index 55ead4215845d2..f770a76f8b3028 100644
--- a/src/sentry/api/endpoints/organization_events_histogram.py
+++ b/src/sentry/api/endpoints/organization_events_histogram.py
@@ -61,22 +61,21 @@ def get(self, request: Request, organization) -> Response:
with sentry_sdk.start_span(op="discover.endpoint", description="histogram"):
serializer = HistogramSerializer(data=request.GET)
- if serializer.is_valid():
- data = serializer.validated_data
-
- with self.handle_query_errors():
- results = dataset.histogram_query(
- data["field"],
- data.get("query"),
- params,
- data["numBuckets"],
- data["precision"],
- min_value=data.get("min"),
- max_value=data.get("max"),
- data_filter=data.get("dataFilter"),
- referrer="api.organization-events-histogram",
- )
-
- return Response(results)
- else:
+ if not serializer.is_valid():
return Response(serializer.errors, status=400)
+ data = serializer.validated_data
+
+ with self.handle_query_errors():
+ results = dataset.histogram_query(
+ data["field"],
+ data.get("query"),
+ params,
+ data["numBuckets"],
+ data["precision"],
+ min_value=data.get("min"),
+ max_value=data.get("max"),
+ data_filter=data.get("dataFilter"),
+ referrer="api.organization-events-histogram",
+ )
+
+ return Response(results)
diff --git a/src/sentry/api/endpoints/organization_events_spans_histogram.py b/src/sentry/api/endpoints/organization_events_spans_histogram.py
index 5ed83d3bb3f982..63e9ab6b7e4c44 100644
--- a/src/sentry/api/endpoints/organization_events_spans_histogram.py
+++ b/src/sentry/api/endpoints/organization_events_spans_histogram.py
@@ -51,22 +51,21 @@ def get(self, request: Request, organization) -> Response:
with sentry_sdk.start_span(op="discover.endpoint", description="spans_histogram"):
serializer = SpansHistogramSerializer(data=request.GET)
- if serializer.is_valid():
- data = serializer.validated_data
+ if not serializer.is_valid():
+ return Response(serializer.errors, status=400)
+ data = serializer.validated_data
- with self.handle_query_errors():
- results = discover.spans_histogram_query(
- data["span"],
- data.get("query"),
- params,
- data["numBuckets"],
- data["precision"],
- min_value=data.get("min"),
- max_value=data.get("max"),
- data_filter=data.get("dataFilter"),
- referrer="api.organization-events-spans-histogram",
- )
+ with self.handle_query_errors():
+ results = discover.spans_histogram_query(
+ data["span"],
+ data.get("query"),
+ params,
+ data["numBuckets"],
+ data["precision"],
+ min_value=data.get("min"),
+ max_value=data.get("max"),
+ data_filter=data.get("dataFilter"),
+ referrer="api.organization-events-spans-histogram",
+ )
- return Response(results)
- else:
- return Response(serializer.errors, status=400)
+ return Response(results)
diff --git a/src/sentry/api/endpoints/organization_events_spans_performance.py b/src/sentry/api/endpoints/organization_events_spans_performance.py
index fe4f6c4295253d..7223ae72a1fc8d 100644
--- a/src/sentry/api/endpoints/organization_events_spans_performance.py
+++ b/src/sentry/api/endpoints/organization_events_spans_performance.py
@@ -720,11 +720,15 @@ def get_span_description(
if trace_context["op"] == span_op and int(trace_context["hash"], 16) == int(span_group, 16):
return data["transaction"]
- for span in data.get("spans", []):
- if span["op"] == span_op and int(span["hash"], 16) == int(span_group, 16):
- return span.get("description")
-
- return None
+ return next(
+ (
+ span.get("description")
+ for span in data.get("spans", [])
+ if span["op"] == span_op
+ and int(span["hash"], 16) == int(span_group, 16)
+ ),
+ None,
+ )
def get_example_transaction(
diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py
index 21e9f88cca4081..3d5a6f05617b5c 100644
--- a/src/sentry/api/endpoints/organization_events_stats.py
+++ b/src/sentry/api/endpoints/organization_events_stats.py
@@ -179,11 +179,12 @@ def get_event_stats(
rollup=rollup,
limit=top_events,
organization=organization,
- referrer=referrer + ".find-topn",
+ referrer=f"{referrer}.find-topn",
allow_empty=False,
zerofill_results=zerofill_results,
include_other=include_other,
)
+
query_details = {
"selected_columns": query_columns,
"query": query,
diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py
index 884718e4cf1d69..9c71db27b50d77 100644
--- a/src/sentry/api/endpoints/organization_events_trace.py
+++ b/src/sentry/api/endpoints/organization_events_trace.py
@@ -318,9 +318,9 @@ def record_analytics(
sentry_sdk.set_tag(
"trace_view.transactions.grouped", format_grouped_length(len_transactions)
)
- projects: Set[int] = set()
- for transaction in transactions:
- projects.add(transaction["project.id"])
+ projects: Set[int] = {
+ transaction["project.id"] for transaction in transactions
+ }
len_projects = len(projects)
sentry_sdk.set_tag("trace_view.projects", len_projects)
@@ -664,7 +664,7 @@ def serialize(
root_traces.sort(key=child_sort_key)
orphans.sort(key=child_sort_key)
- if len(orphans) > 0:
+ if orphans:
sentry_sdk.set_tag("discover.trace-view.contains-orphans", "yes")
logger.warning("discover.trace-view.contains-orphans", extra=warning_extra)
diff --git a/src/sentry/api/endpoints/organization_events_vitals.py b/src/sentry/api/endpoints/organization_events_vitals.py
index 82edededf6c59e..3ab0e82124e816 100644
--- a/src/sentry/api/endpoints/organization_events_vitals.py
+++ b/src/sentry/api/endpoints/organization_events_vitals.py
@@ -31,7 +31,7 @@ def get(self, request: Request, organization) -> Response:
return Response([])
vitals = [vital.lower() for vital in request.GET.getlist("vital", [])]
- if len(vitals) == 0:
+ if not vitals:
raise ParseError(detail="Need to pass at least one vital")
performance_use_metrics = features.has(
diff --git a/src/sentry/api/endpoints/organization_group_index.py b/src/sentry/api/endpoints/organization_group_index.py
index 488bf6fb9d3f8b..3504bdb6ced0b3 100644
--- a/src/sentry/api/endpoints/organization_group_index.py
+++ b/src/sentry/api/endpoints/organization_group_index.py
@@ -65,7 +65,7 @@ def inbox_search(
if end_params:
end = min(end_params)
- end = end if end else now + ALLOWED_FUTURE_DELTA
+ end = end or now + ALLOWED_FUTURE_DELTA
# We only want to search back a week at most, since that's the oldest inbox rows
# can be.
@@ -77,11 +77,11 @@ def inbox_search(
if start >= end:
return Paginator(Group.objects.none()).get_result()
- # Make sure search terms are valid
- invalid_search_terms = [
- str(sf) for sf in search_filters if sf.key.name not in allowed_inbox_search_terms
- ]
- if invalid_search_terms:
+ if invalid_search_terms := [
+ str(sf)
+ for sf in search_filters
+ if sf.key.name not in allowed_inbox_search_terms
+ ]:
raise InvalidSearchQuery(f"Invalid search terms for 'inbox' search: {invalid_search_terms}")
# Make sure this is an inbox search
@@ -112,8 +112,9 @@ def inbox_search(
.distinct()
)
- owner_search = get_search_filter(search_filters, "assigned_or_suggested", "IN")
- if owner_search:
+ if owner_search := get_search_filter(
+ search_filters, "assigned_or_suggested", "IN"
+ ):
qs = qs.filter(
assigned_or_suggested_filter(owner_search, projects, field_filter="group_id")
)
@@ -166,7 +167,7 @@ def _search(
assert "environment" not in extra_query_kwargs
query_kwargs.update(extra_query_kwargs)
- query_kwargs["environments"] = environments if environments else None
+ query_kwargs["environments"] = environments or None
if query_kwargs["sort_by"] == "inbox":
query_kwargs.pop("sort_by")
result = inbox_search(**query_kwargs)
@@ -257,12 +258,8 @@ def get(self, request: Request, organization) -> Response:
{"detail": "You do not have the multi project stream feature enabled"}, status=400
)
- # we ignore date range for both short id and event ids
- query = request.GET.get("query", "").strip()
- if query:
- # check to see if we've got an event ID
- event_id = normalize_event_id(query)
- if event_id:
+ if query := request.GET.get("query", "").strip():
+ if event_id := normalize_event_id(query):
# For a direct hit lookup we want to use any passed project ids
# (we've already checked permissions on these) plus any other
# projects that the user is a member of. This gives us a better
@@ -283,12 +280,12 @@ def get(self, request: Request, organization) -> Response:
return Response(serialize(groups, request.user, serializer()))
group = get_by_short_id(organization.id, request.GET.get("shortIdLookup"), query)
- if group is not None:
- # check all projects user has access to
- if request.access.has_project_access(group.project):
- response = Response(serialize([group], request.user, serializer()))
- response["X-Sentry-Direct-Hit"] = "1"
- return response
+ if group is not None and request.access.has_project_access(
+ group.project
+ ):
+ response = Response(serialize([group], request.user, serializer()))
+ response["X-Sentry-Direct-Hit"] = "1"
+ return response
# If group ids specified, just ignore any query components
try:
diff --git a/src/sentry/api/endpoints/organization_group_index_stats.py b/src/sentry/api/endpoints/organization_group_index_stats.py
index 96753b0c3c3264..755c183bf6ecb6 100644
--- a/src/sentry/api/endpoints/organization_group_index_stats.py
+++ b/src/sentry/api/endpoints/organization_group_index_stats.py
@@ -74,14 +74,13 @@ def get(self, request: Request, organization) -> Response:
detail="You should include `groups` with your request. (i.e. groups=1,2,3)"
)
- else:
- groups = list(Group.objects.filter(id__in=group_ids, project_id__in=project_ids))
- if not groups:
- raise ParseError(detail="No matching groups found")
- elif len(groups) > 25:
- raise ParseError(detail="Too many groups requested.")
- elif any(g for g in groups if not request.access.has_project_access(g.project)):
- raise PermissionDenied
+ groups = list(Group.objects.filter(id__in=group_ids, project_id__in=project_ids))
+ if not groups:
+ raise ParseError(detail="No matching groups found")
+ elif len(groups) > 25:
+ raise ParseError(detail="Too many groups requested.")
+ elif any(g for g in groups if not request.access.has_project_access(g.project)):
+ raise PermissionDenied
if stats_period not in (None, "", "24h", "14d", "auto"):
raise ParseError(detail=ERR_INVALID_STATS_PERIOD)
@@ -112,5 +111,4 @@ def get(self, request: Request, organization) -> Response:
),
)
- response = Response(context)
- return response
+ return Response(context)
diff --git a/src/sentry/api/endpoints/organization_index.py b/src/sentry/api/endpoints/organization_index.py
index 5d07fdb97d4ecb..a0287d5a6fbd9a 100644
--- a/src/sentry/api/endpoints/organization_index.py
+++ b/src/sentry/api/endpoints/organization_index.py
@@ -76,12 +76,13 @@ def get(self, request: Request) -> Response:
member_set__user=request.user,
status=OrganizationStatus.VISIBLE,
)
- org_results = []
- for org in sorted(queryset, key=lambda x: x.name):
- # O(N) query
- org_results.append(
- {"organization": serialize(org), "singleOwner": org.has_single_owner()}
- )
+ org_results = [
+ {
+ "organization": serialize(org),
+ "singleOwner": org.has_single_owner(),
+ }
+ for org in sorted(queryset, key=lambda x: x.name)
+ ]
return Response(org_results)
@@ -90,8 +91,7 @@ def get(self, request: Request) -> Response:
id__in=OrganizationMember.objects.filter(user=request.user).values("organization")
)
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
tokens = tokenize_query(query)
for key, value in tokens.items():
if key == "query":
diff --git a/src/sentry/api/endpoints/organization_issues_count.py b/src/sentry/api/endpoints/organization_issues_count.py
index 2bdae71f482d41..b5e5f838275a21 100644
--- a/src/sentry/api/endpoints/organization_issues_count.py
+++ b/src/sentry/api/endpoints/organization_issues_count.py
@@ -31,8 +31,7 @@ def _count(
):
query_kwargs = {"projects": projects}
- query = query.strip()
- if query:
+ if query := query.strip():
search_filters = convert_query_values(
parse_search_query(query), projects, request.user, environments
)
@@ -41,9 +40,9 @@ def _count(
if extra_query_kwargs is not None:
assert "environment" not in extra_query_kwargs
- query_kwargs.update(extra_query_kwargs)
+ query_kwargs |= extra_query_kwargs
- query_kwargs["environments"] = environments if environments else None
+ query_kwargs["environments"] = environments or None
query_kwargs["max_hits"] = ISSUES_COUNT_MAX_HITS_LIMIT
diff --git a/src/sentry/api/endpoints/organization_member/details.py b/src/sentry/api/endpoints/organization_member/details.py
index 698f35d6287dad..e0aabee035f34e 100644
--- a/src/sentry/api/endpoints/organization_member/details.py
+++ b/src/sentry/api/endpoints/organization_member/details.py
@@ -182,11 +182,10 @@ def put(
return Response({"detail": ERR_RATE_LIMITED}, status=429)
if result.get("regenerate"):
- if request.access.has_scope("member:admin"):
- member.regenerate_token()
- member.save()
- else:
+ if not request.access.has_scope("member:admin"):
return Response({"detail": ERR_INSUFFICIENT_SCOPE}, status=400)
+ member.regenerate_token()
+ member.save()
if member.token_expired:
return Response({"detail": ERR_EXPIRED}, status=400)
member.send_invite_email()
@@ -215,8 +214,7 @@ def put(
[OrganizationMemberTeam(team=team, organizationmember=member) for team in teams]
)
- assigned_role = result.get("role")
- if assigned_role:
+ if assigned_role := result.get("role"):
allowed_roles = get_allowed_roles(request, organization)
allowed_role_ids = {r.id for r in allowed_roles}
diff --git a/src/sentry/api/endpoints/organization_member/index.py b/src/sentry/api/endpoints/organization_member/index.py
index 106a25f3e3bd01..c8056f4f80c415 100644
--- a/src/sentry/api/endpoints/organization_member/index.py
+++ b/src/sentry/api/endpoints/organization_member/index.py
@@ -51,16 +51,19 @@ def validate_email(self, email):
)
if queryset.filter(invite_status=InviteStatus.APPROVED.value).exists():
- raise MemberConflictValidationError("The user %s is already a member" % email)
+ raise MemberConflictValidationError(f"The user {email} is already a member")
- if not self.context.get("allow_existing_invite_request"):
- if queryset.filter(
+ if (
+ not self.context.get("allow_existing_invite_request")
+ and queryset.filter(
Q(invite_status=InviteStatus.REQUESTED_TO_BE_INVITED.value)
| Q(invite_status=InviteStatus.REQUESTED_TO_JOIN.value)
- ).exists():
- raise MemberConflictValidationError(
- "There is an existing invite request for %s" % email
- )
+ ).exists()
+ ):
+ raise MemberConflictValidationError(
+ f"There is an existing invite request for {email}"
+ )
+
return email
def validate_teams(self, teams):
@@ -96,8 +99,7 @@ def get(self, request: Request, organization) -> Response:
.order_by("email", "user__email")
)
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
tokens = tokenize_query(query)
for key, value in tokens.items():
if key == "email":
diff --git a/src/sentry/api/endpoints/organization_member/requests/join.py b/src/sentry/api/endpoints/organization_member/requests/join.py
index 0c59d0c2b4c7c5..b6cbb4271a8cc7 100644
--- a/src/sentry/api/endpoints/organization_member/requests/join.py
+++ b/src/sentry/api/endpoints/organization_member/requests/join.py
@@ -79,9 +79,9 @@ def post(self, request: Request, organization) -> Response:
result = serializer.validated_data
email = result["email"]
- member = create_organization_join_request(organization, email, ip_address)
-
- if member:
+ if member := create_organization_join_request(
+ organization, email, ip_address
+ ):
async_send_notification(JoinRequestNotification, member, request.user)
# legacy analytics
join_request_created.send_robust(sender=self, member=member)
diff --git a/src/sentry/api/endpoints/organization_member_unreleased_commits.py b/src/sentry/api/endpoints/organization_member_unreleased_commits.py
index 8e046b4cbb5e2c..1a1a49ddaad4b4 100644
--- a/src/sentry/api/endpoints/organization_member_unreleased_commits.py
+++ b/src/sentry/api/endpoints/organization_member_unreleased_commits.py
@@ -50,9 +50,7 @@ def get(self, request: Request, organization, member) -> Response:
)
params = [organization.id, organization.id]
- for e in email_list:
- params.append(e.upper())
-
+ params.extend(e.upper() for e in email_list)
queryset = Commit.objects.raw(query % (", ".join("%s" for _ in email_list),), params)
results = list(queryset)
diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py
index 44388bb77dcb62..3de20a0b9b521e 100644
--- a/src/sentry/api/endpoints/organization_metrics.py
+++ b/src/sentry/api/endpoints/organization_metrics.py
@@ -143,13 +143,12 @@ def get_result(self, limit, cursor=None):
offset = cursor.offset if cursor is not None else 0
data = self.data_fn(offset=offset, limit=limit + 1)
- if isinstance(data.get("groups"), list):
- has_more = len(data["groups"]) == limit + 1
- if has_more:
- data["groups"].pop()
- else:
+ if not isinstance(data.get("groups"), list):
raise NotImplementedError
+ has_more = len(data["groups"]) == limit + 1
+ if has_more:
+ data["groups"].pop()
return CursorResult(
data,
prev=Cursor(0, max(0, offset - limit), True, offset > 0),
diff --git a/src/sentry/api/endpoints/organization_monitors.py b/src/sentry/api/endpoints/organization_monitors.py
index 73e99b3e45039c..40332475e8d94d 100644
--- a/src/sentry/api/endpoints/organization_monitors.py
+++ b/src/sentry/api/endpoints/organization_monitors.py
@@ -45,8 +45,7 @@ def get(self, request: Request, organization) -> Response:
queryset = Monitor.objects.filter(
organization_id=organization.id, project_id__in=filter_params["project_id"]
).exclude(status__in=[MonitorStatus.PENDING_DELETION, MonitorStatus.DELETION_IN_PROGRESS])
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
tokens = tokenize_query(query)
for key, value in tokens.items():
if key == "query":
diff --git a/src/sentry/api/endpoints/organization_pinned_searches.py b/src/sentry/api/endpoints/organization_pinned_searches.py
index 1029f4adcb3439..dadf644a710a5f 100644
--- a/src/sentry/api/endpoints/organization_pinned_searches.py
+++ b/src/sentry/api/endpoints/organization_pinned_searches.py
@@ -66,7 +66,11 @@ def delete(self, request: Request, organization) -> Response:
try:
search_type = SearchType(int(request.data.get("type", 0)))
except ValueError as e:
- return Response({"detail": "Invalid input for `type`. Error: %s" % str(e)}, status=400)
+ return Response(
+ {"detail": f"Invalid input for `type`. Error: {str(e)}"},
+ status=400,
+ )
+
SavedSearch.objects.filter(
organization=organization, owner=request.user, type=search_type.value
).delete()
diff --git a/src/sentry/api/endpoints/organization_profiling_profiles.py b/src/sentry/api/endpoints/organization_profiling_profiles.py
index 231889a5d30fce..87b3a7a2153f79 100644
--- a/src/sentry/api/endpoints/organization_profiling_profiles.py
+++ b/src/sentry/api/endpoints/organization_profiling_profiles.py
@@ -28,12 +28,11 @@ def get_profiling_params(self, request: Request, organization: Organization) ->
except InvalidSearchQuery as err:
raise ParseError(detail=str(err))
- params.update(
- {
- key: value.isoformat() if key in {"start", "end"} else value
- for key, value in self.get_filter_params(request, organization).items()
- }
- )
+ params |= {
+ key: value.isoformat() if key in {"start", "end"} else value
+ for key, value in self.get_filter_params(request, organization).items()
+ }
+
return params
diff --git a/src/sentry/api/endpoints/organization_recent_searches.py b/src/sentry/api/endpoints/organization_recent_searches.py
index df9ea7136dbab7..8cacf159b5d58e 100644
--- a/src/sentry/api/endpoints/organization_recent_searches.py
+++ b/src/sentry/api/endpoints/organization_recent_searches.py
@@ -36,12 +36,20 @@ def get(self, request: Request, organization) -> Response:
try:
search_type = SearchType(int(request.GET.get("type", 0)))
except ValueError as e:
- return Response({"detail": "Invalid input for `type`. Error: %s" % str(e)}, status=400)
+ return Response(
+ {"detail": f"Invalid input for `type`. Error: {str(e)}"},
+ status=400,
+ )
+
try:
limit = int(request.GET.get("limit", 3))
except ValueError as e:
- return Response({"detail": "Invalid input for `limit`. Error: %s" % str(e)}, status=400)
+ return Response(
+ {"detail": f"Invalid input for `limit`. Error: {str(e)}"},
+ status=400,
+ )
+
query_kwargs = {"organization": organization, "user": request.user, "type": search_type}
diff --git a/src/sentry/api/endpoints/organization_release_details.py b/src/sentry/api/endpoints/organization_release_details.py
index c329e70066329b..68ff2090408e2a 100644
--- a/src/sentry/api/endpoints/organization_release_details.py
+++ b/src/sentry/api/endpoints/organization_release_details.py
@@ -317,39 +317,37 @@ def get(self, request: Request, organization, version) -> Response:
if project_id:
# Add sessions time bound to current project meta data
environments = set(request.GET.getlist("environment")) or None
- current_project_meta.update(
- {
- **release_health.get_release_sessions_time_bounds(
- project_id=int(project_id),
- release=release.version,
- org_id=organization.id,
- environments=environments,
- )
- }
- )
+ current_project_meta |= {
+ **release_health.get_release_sessions_time_bounds(
+ project_id=int(project_id),
+ release=release.version,
+ org_id=organization.id,
+ environments=environments,
+ )
+ }
+
# Get prev and next release to current release
try:
filter_params = self.get_filter_params(request, organization)
- current_project_meta.update(
- {
- **self.get_adjacent_releases_to_current_release(
- org=organization,
- release=release,
- filter_params=filter_params,
- stats_period=summary_stats_period,
- sort=sort,
- status_filter=status_filter,
- query=query,
- ),
- **self.get_first_and_last_releases(
- org=organization,
- environment=filter_params.get("environment"),
- project_id=[project_id],
- sort=sort,
- ),
- }
- )
+ current_project_meta |= {
+ **self.get_adjacent_releases_to_current_release(
+ org=organization,
+ release=release,
+ filter_params=filter_params,
+ stats_period=summary_stats_period,
+ sort=sort,
+ status_filter=status_filter,
+ query=query,
+ ),
+ **self.get_first_and_last_releases(
+ org=organization,
+ environment=filter_params.get("environment"),
+ project_id=[project_id],
+ sort=sort,
+ ),
+ }
+
except InvalidSortException:
return Response({"detail": "invalid sort"}, status=400)
@@ -461,10 +459,8 @@ def put(self, request: Request, organization, version) -> Response:
}
for r in result.get("headCommits", [])
]
- # Clear commits in release
- else:
- if result.get("refs") == []:
- release.clear_commits()
+ elif result.get("refs") == []:
+ release.clear_commits()
scope.set_tag("has_refs", bool(refs))
if refs:
diff --git a/src/sentry/api/endpoints/organization_releases.py b/src/sentry/api/endpoints/organization_releases.py
index 1050db0568d4f2..a2bc40ee184674 100644
--- a/src/sentry/api/endpoints/organization_releases.py
+++ b/src/sentry/api/endpoints/organization_releases.py
@@ -145,13 +145,13 @@ def debounce_update_release_health_data(organization, project_ids):
"""This causes a flush of snuba health data to the postgres tables once
per minute for the given projects.
"""
- # Figure out which projects need to get updates from the snuba.
- should_update = {}
cache_keys = ["debounce-health:%d" % id for id in project_ids]
cache_data = cache.get_many(cache_keys)
- for project_id, cache_key in zip(project_ids, cache_keys):
- if cache_data.get(cache_key) is None:
- should_update[project_id] = cache_key
+ should_update = {
+ project_id: cache_key
+ for project_id, cache_key in zip(project_ids, cache_keys)
+ if cache_data.get(cache_key) is None
+ }
if not should_update:
return
@@ -173,12 +173,7 @@ def debounce_update_release_health_data(organization, project_ids):
release__version__in=[x[1] for x in project_releases],
).values_list("project_id", "release__version")
)
- to_upsert = []
- for key in project_releases:
- if key not in existing:
- to_upsert.append(key)
-
- if to_upsert:
+ if to_upsert := [key for key in project_releases if key not in existing]:
dates = release_health.get_oldest_health_data_for_releases(to_upsert)
for project_id, version in to_upsert:
@@ -323,26 +318,26 @@ def qs_load_func(queryset, total_offset, qs_offset, limit):
: total_offset + limit
]
)
- releases_with_session_data = release_health.check_releases_have_health_data(
- organization.id,
- filter_params["project_id"],
- release_versions,
- filter_params["start"]
- if filter_params["start"]
- else datetime.utcnow() - timedelta(days=90),
- filter_params["end"] if filter_params["end"] else datetime.utcnow(),
+ releases_with_session_data = (
+ release_health.check_releases_have_health_data(
+ organization.id,
+ filter_params["project_id"],
+ release_versions,
+ filter_params["start"] or datetime.utcnow() - timedelta(days=90),
+ filter_params["end"] or datetime.utcnow(),
+ )
)
+
valid_versions = [
rv for rv in release_versions if rv not in releases_with_session_data
]
- results = list(
+ return list(
Release.objects.filter(
organization_id=organization.id,
version__in=valid_versions,
).order_by_recent()[qs_offset : qs_offset + limit]
)
- return results
paginator_cls = MergingOffsetPaginator
paginator_kwargs.update(
@@ -528,15 +523,7 @@ def post(self, request: Request, organization) -> Response:
scope.set_tag("failure_reason", "InvalidRepository")
return Response({"refs": [str(e)]}, status=400)
- if not created and not new_projects:
- # This is the closest status code that makes sense, and we want
- # a unique 2xx response code so people can understand when
- # behavior differs.
- # 208 Already Reported (WebDAV; RFC 5842)
- status = 208
- else:
- status = 201
-
+ status = 208 if not created and not new_projects else 201
analytics.record(
"release.created",
user_id=request.user.id if request.user and request.user.id else None,
diff --git a/src/sentry/api/endpoints/organization_repositories.py b/src/sentry/api/endpoints/organization_repositories.py
index d976c307d44386..4fdbf78cb81543 100644
--- a/src/sentry/api/endpoints/organization_repositories.py
+++ b/src/sentry/api/endpoints/organization_repositories.py
@@ -34,8 +34,7 @@ def get(self, request: Request, organization) -> Response:
queryset = Repository.objects.filter(organization_id=organization.id)
status = request.GET.get("status", "active")
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
queryset = queryset.filter(Q(name__icontains=query))
if status == "active":
queryset = queryset.filter(status=ObjectStatus.VISIBLE)
diff --git a/src/sentry/api/endpoints/organization_repository_details.py b/src/sentry/api/endpoints/organization_repository_details.py
index 49232fd0a05de7..f5342f9802fc5c 100644
--- a/src/sentry/api/endpoints/organization_repository_details.py
+++ b/src/sentry/api/endpoints/organization_repository_details.py
@@ -95,10 +95,10 @@ def delete(self, request: Request, organization, repo_id) -> Response:
raise ResourceDoesNotExist
with transaction.atomic():
- updated = Repository.objects.filter(
- id=repo.id, status__in=[ObjectStatus.VISIBLE, ObjectStatus.DISABLED]
- ).update(status=ObjectStatus.PENDING_DELETION)
- if updated:
+ if updated := Repository.objects.filter(
+ id=repo.id,
+ status__in=[ObjectStatus.VISIBLE, ObjectStatus.DISABLED],
+ ).update(status=ObjectStatus.PENDING_DELETION):
repo.status = ObjectStatus.PENDING_DELETION
# if repo doesn't have commits, delete immediately
diff --git a/src/sentry/api/endpoints/organization_searches.py b/src/sentry/api/endpoints/organization_searches.py
index 7daed9ae66d53a..6f966fc90e334e 100644
--- a/src/sentry/api/endpoints/organization_searches.py
+++ b/src/sentry/api/endpoints/organization_searches.py
@@ -37,7 +37,11 @@ def get(self, request: Request, organization) -> Response:
try:
search_type = SearchType(int(request.GET.get("type", 0)))
except ValueError as e:
- return Response({"detail": "Invalid input for `type`. Error: %s" % str(e)}, status=400)
+ return Response(
+ {"detail": f"Invalid input for `type`. Error: {str(e)}"},
+ status=400,
+ )
+
org_searches_q = Q(Q(owner=request.user) | Q(owner__isnull=True), organization=organization)
global_searches_q = Q(is_global=True)
saved_searches = list(
@@ -48,12 +52,10 @@ def get(self, request: Request, organization) -> Response:
)
results = []
if saved_searches:
- pinned_search = None
# If the saved search has an owner then it's the user's pinned
# search. The user can only have one pinned search.
results.append(saved_searches[0])
- if saved_searches[0].is_pinned:
- pinned_search = saved_searches[0]
+ pinned_search = saved_searches[0] if saved_searches[0].is_pinned else None
for saved_search in saved_searches[1:]:
# If a search has the same query and sort as the pinned search we
# want to use that search as the pinned search
@@ -80,9 +82,11 @@ def post(self, request: Request, organization) -> Response:
query=result["query"],
).exists():
return Response(
- {"detail": "Query {} already exists".format(result["query"])}, status=400
+ {"detail": f'Query {result["query"]} already exists'},
+ status=400,
)
+
try:
saved_search = SavedSearch.objects.create(
organization=organization,
@@ -101,11 +105,10 @@ def post(self, request: Request, organization) -> Response:
except IntegrityError:
return Response(
{
- "detail": "The combination ({}, {}, {}) for query {} already exists.".format(
- organization.id, result["name"], result["type"], result["query"]
- )
+ "detail": f'The combination ({organization.id}, {result["name"]}, {result["type"]}) for query {result["query"]} already exists.'
},
status=400,
)
+
return Response(serializer.errors, status=400)
diff --git a/src/sentry/api/endpoints/organization_sessions.py b/src/sentry/api/endpoints/organization_sessions.py
index 5a66deb1b2ec27..b88277029a629d 100644
--- a/src/sentry/api/endpoints/organization_sessions.py
+++ b/src/sentry/api/endpoints/organization_sessions.py
@@ -22,15 +22,10 @@ def get(self, request: Request, organization) -> Response:
def data_fn(offset: int, limit: int):
with self.handle_query_errors():
with sentry_sdk.start_span(
- op="sessions.endpoint", description="build_sessions_query"
- ):
- request_limit = None
- if request.GET.get("per_page") is not None:
- request_limit = limit
- request_offset = None
- if request.GET.get("cursor") is not None:
- request_offset = offset
-
+ op="sessions.endpoint", description="build_sessions_query"
+ ):
+ request_limit = limit if request.GET.get("per_page") is not None else None
+ request_offset = offset if request.GET.get("cursor") is not None else None
query = self.build_sessions_query(
request, organization, offset=request_offset, limit=request_limit
)
@@ -90,13 +85,12 @@ def get_result(self, limit, cursor=None):
offset = cursor.offset if cursor is not None else 0
data = self.data_fn(offset=offset, limit=limit + 1)
- if isinstance(data.get("groups"), list):
- has_more = len(data["groups"]) == limit + 1
- if has_more:
- data["groups"].pop()
- else:
+ if not isinstance(data.get("groups"), list):
raise NotImplementedError
+ has_more = len(data["groups"]) == limit + 1
+ if has_more:
+ data["groups"].pop()
return CursorResult(
data,
prev=Cursor(0, max(0, offset - limit), True, offset > 0),
diff --git a/src/sentry/api/endpoints/organization_slugs.py b/src/sentry/api/endpoints/organization_slugs.py
index 76e0389c5c3f30..a3f8b2c8b01bb7 100644
--- a/src/sentry/api/endpoints/organization_slugs.py
+++ b/src/sentry/api/endpoints/organization_slugs.py
@@ -56,9 +56,8 @@ def put(self, request: Request, organization) -> Response:
.exclude(id=project.id)
.first()
)
- if other is not None:
- if len(slugs) != len(slugs.values()):
- return Response({"detail": "Duplicate slug %s" % slug}, status=400)
+ if other is not None and len(slugs) != len(slugs.values()):
+ return Response({"detail": f"Duplicate slug {slug}"}, status=400)
project.slug = slug
project.update_option("sentry:reviewed-slug", True)
project.save()
diff --git a/src/sentry/api/endpoints/organization_stats.py b/src/sentry/api/endpoints/organization_stats.py
index 64cdbbd26d70d8..bb372a485f2b43 100644
--- a/src/sentry/api/endpoints/organization_stats.py
+++ b/src/sentry/api/endpoints/organization_stats.py
@@ -39,16 +39,15 @@ def get(self, request: Request, organization) -> Response:
elif group == "project":
team_list = Team.objects.get_for_user(organization=organization, user=request.user)
- project_ids = request.GET.getlist("projectID")
- if not project_ids:
+ if project_ids := request.GET.getlist("projectID"):
+ project_list = Project.objects.filter(teams__in=team_list, id__in=project_ids)
+ else:
project_list = []
for team in team_list:
project_list.extend(Project.objects.get_for_user(team=team, user=request.user))
- else:
- project_list = Project.objects.filter(teams__in=team_list, id__in=project_ids)
keys = list({p.id for p in project_list})
else:
- raise ValueError("Invalid group: %s" % group)
+ raise ValueError(f"Invalid group: {group}")
if "id" in request.GET:
id_filter_set = frozenset(map(int, request.GET.getlist("id")))
diff --git a/src/sentry/api/endpoints/organization_stats_v2.py b/src/sentry/api/endpoints/organization_stats_v2.py
index 9551c04213e8e4..416a5160acff7f 100644
--- a/src/sentry/api/endpoints/organization_stats_v2.py
+++ b/src/sentry/api/endpoints/organization_stats_v2.py
@@ -191,9 +191,9 @@ def get(self, request: Request, organization) -> Response:
def build_outcomes_query(self, request: Request, organization):
params = {"organization_id": organization.id}
- project_ids = self._get_projects_for_orgstats_query(request, organization)
-
- if project_ids:
+ if project_ids := self._get_projects_for_orgstats_query(
+ request, organization
+ ):
params["project_id"] = project_ids
return QueryDefinition(request.GET, params)
@@ -205,11 +205,10 @@ def _get_projects_for_orgstats_query(self, request: Request, organization):
req_proj_ids = self.get_requested_project_ids_unchecked(request)
if self._is_org_total_query(request, req_proj_ids):
return None
- else:
- projects = self.get_projects(request, organization, project_ids=req_proj_ids)
- if not projects:
- raise NoProjects("No projects available")
- return [p.id for p in projects]
+ projects = self.get_projects(request, organization, project_ids=req_proj_ids)
+ if not projects:
+ raise NoProjects("No projects available")
+ return [p.id for p in projects]
def _is_org_total_query(self, request: Request, project_ids):
return all(
diff --git a/src/sentry/api/endpoints/organization_teams.py b/src/sentry/api/endpoints/organization_teams.py
index 1dbf1f72742c2d..cf7ab4ff5a49db 100644
--- a/src/sentry/api/endpoints/organization_teams.py
+++ b/src/sentry/api/endpoints/organization_teams.py
@@ -82,9 +82,7 @@ def get(self, request: Request, organization) -> Response:
organization=organization, status=TeamStatus.VISIBLE
).order_by("slug")
- query = request.GET.get("query")
-
- if query:
+ if query := request.GET.get("query"):
tokens = tokenize_query(query)
for key, value in tokens.items():
if key == "hasExternalTeams":
diff --git a/src/sentry/api/endpoints/organization_user_issues.py b/src/sentry/api/endpoints/organization_user_issues.py
index 49e147afe849cb..d441fb9835f365 100644
--- a/src/sentry/api/endpoints/organization_user_issues.py
+++ b/src/sentry/api/endpoints/organization_user_issues.py
@@ -23,15 +23,13 @@ def get(self, request: Request, organization, user_id) -> Response:
"team_id", flat=True
),
)
- has_team_access = any([request.access.has_team_access(t) for t in teams])
+ has_team_access = any(request.access.has_team_access(t) for t in teams)
if not has_team_access:
return Response([])
other_eusers = euser.find_similar_users(request.user)
- event_users = [euser] + list(other_eusers)
-
- if event_users:
+ if event_users := [euser] + list(other_eusers):
tags = tagstore.get_group_tag_values_for_users(event_users, limit=limit)
else:
tags = []
diff --git a/src/sentry/api/endpoints/organization_user_issues_search.py b/src/sentry/api/endpoints/organization_user_issues_search.py
index 6c9fe067f860f2..c7809fbe12fe21 100644
--- a/src/sentry/api/endpoints/organization_user_issues_search.py
+++ b/src/sentry/api/endpoints/organization_user_issues_search.py
@@ -29,9 +29,11 @@ def get(self, request: Request, organization) -> Response:
).values_list("id", flat=True)[:1000]
)
- event_users = list(EventUser.objects.filter(email=email, project_id__in=project_ids)[:1000])
-
- if event_users:
+ if event_users := list(
+ EventUser.objects.filter(email=email, project_id__in=project_ids)[
+ :1000
+ ]
+ ):
groups = Group.objects.filter(
id__in=tagstore.get_group_ids_for_users(
project_ids=list({e.project_id for e in event_users}),
diff --git a/src/sentry/api/endpoints/project_create_sample_transaction.py b/src/sentry/api/endpoints/project_create_sample_transaction.py
index d60b7ce64c9570..04cb9f622486ad 100644
--- a/src/sentry/api/endpoints/project_create_sample_transaction.py
+++ b/src/sentry/api/endpoints/project_create_sample_transaction.py
@@ -17,14 +17,16 @@
def get_json_name(project):
- for base_platform in base_platforms_with_transactions:
- if project.platform and project.platform.startswith(base_platform):
- # special case for javascript
- if base_platform == "javascript":
- return "react-transaction.json"
- return f"{base_platform}-transaction.json"
- # default
- return "react-transaction.json"
+ return next(
+ (
+ "react-transaction.json"
+ if base_platform == "javascript"
+ else f"{base_platform}-transaction.json"
+ for base_platform in base_platforms_with_transactions
+ if project.platform and project.platform.startswith(base_platform)
+ ),
+ "react-transaction.json",
+ )
def fix_event_data(data):
diff --git a/src/sentry/api/endpoints/project_details.py b/src/sentry/api/endpoints/project_details.py
index ed9164dda5539c..3b79e9167e450c 100644
--- a/src/sentry/api/endpoints/project_details.py
+++ b/src/sentry/api/endpoints/project_details.py
@@ -54,8 +54,7 @@ def clean_newline_inputs(value, case_insensitive=True):
for v in value.split("\n"):
if case_insensitive:
v = v.lower()
- v = v.strip()
- if v:
+ if v := v.strip():
result.append(v)
return result
@@ -198,8 +197,9 @@ def validate_slug(self, slug):
)
if other is not None:
raise serializers.ValidationError(
- "Another project (%s) is already using that slug" % other.name
+ f"Another project ({other.name}) is already using that slug"
)
+
return slug
def validate_relayPiiConfig(self, value):
@@ -364,8 +364,7 @@ class ProjectDetailsEndpoint(ProjectEndpoint):
def _get_unresolved_count(self, project):
queryset = Group.objects.filter(status=GroupStatus.UNRESOLVED, project=project)
- resolve_age = project.get_option("sentry:resolve_age", None)
- if resolve_age:
+ if resolve_age := project.get_option("sentry:resolve_age", None):
queryset = queryset.filter(
last_seen__gte=timezone.now() - timedelta(hours=int(resolve_age))
)
diff --git a/src/sentry/api/endpoints/project_filters.py b/src/sentry/api/endpoints/project_filters.py
index 48e84947c4ccd7..f39d684cd20a82 100644
--- a/src/sentry/api/endpoints/project_filters.py
+++ b/src/sentry/api/endpoints/project_filters.py
@@ -15,18 +15,16 @@ def get(self, request: Request, project) -> Response:
{method} {path}
"""
- results = []
- for flt in inbound_filters.get_all_filter_specs():
- results.append(
- {
- "id": flt.id,
- # 'active' will be either a boolean or list for the legacy browser filters
- # all other filters will be boolean
- "active": inbound_filters.get_filter_state(flt.id, project),
- "description": flt.description,
- "name": flt.name,
- "hello": flt.id + " - " + flt.name,
- }
- )
+ results = [
+ {
+ "id": flt.id,
+ "active": inbound_filters.get_filter_state(flt.id, project),
+ "description": flt.description,
+ "name": flt.name,
+ "hello": f"{flt.id} - {flt.name}",
+ }
+ for flt in inbound_filters.get_all_filter_specs()
+ ]
+
results.sort(key=lambda x: x["name"])
return Response(results)
diff --git a/src/sentry/api/endpoints/project_group_index.py b/src/sentry/api/endpoints/project_group_index.py
index b2cf886c208c06..d2548235c2ca86 100644
--- a/src/sentry/api/endpoints/project_group_index.py
+++ b/src/sentry/api/endpoints/project_group_index.py
@@ -93,8 +93,7 @@ def get(self, request: Request, project) -> Response:
if query:
matching_group = None
matching_event = None
- event_id = normalize_event_id(query)
- if event_id:
+ if event_id := normalize_event_id(query):
# check to see if we've got an event ID
try:
matching_group = Group.objects.from_event_id(project, event_id)
diff --git a/src/sentry/api/endpoints/project_index.py b/src/sentry/api/endpoints/project_index.py
index ae11d6cf58cc10..13495d37a6b7f0 100644
--- a/src/sentry/api/endpoints/project_index.py
+++ b/src/sentry/api/endpoints/project_index.py
@@ -53,8 +53,7 @@ def get(self, request: Request) -> Response:
else:
queryset = queryset.filter(teams__organizationmember__user=request.user)
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
tokens = tokenize_query(query)
for key, value in tokens.items():
if key == "query":
diff --git a/src/sentry/api/endpoints/project_key_details.py b/src/sentry/api/endpoints/project_key_details.py
index 0185c5f9dff35d..6284fed0a44dd8 100644
--- a/src/sentry/api/endpoints/project_key_details.py
+++ b/src/sentry/api/endpoints/project_key_details.py
@@ -54,10 +54,11 @@ def put(self, request: Request, project, key_id) -> Response:
if result.get("name"):
key.label = result["name"]
- if not result.get("browserSdkVersion"):
- key.data = {"browserSdkVersion": default_version}
- else:
- key.data = {"browserSdkVersion": result["browserSdkVersion"]}
+ key.data = (
+ {"browserSdkVersion": result["browserSdkVersion"]}
+ if result.get("browserSdkVersion")
+ else {"browserSdkVersion": default_version}
+ )
if result.get("isActive") is True:
key.status = ProjectKeyStatus.ACTIVE
diff --git a/src/sentry/api/endpoints/project_key_stats.py b/src/sentry/api/endpoints/project_key_stats.py
index 38f26b006f2c3d..90ccaaab556d4c 100644
--- a/src/sentry/api/endpoints/project_key_stats.py
+++ b/src/sentry/api/endpoints/project_key_stats.py
@@ -73,17 +73,16 @@ def get(self, request: Request, project, key_id) -> Response:
raise ParseError(detail="Invalid request data")
# Initialize the response results.
- response = []
- for time_string in results["intervals"]:
- response.append(
- {
- "ts": int(parse_timestamp(time_string).timestamp()),
- "total": 0,
- "dropped": 0,
- "accepted": 0,
- "filtered": 0,
- }
- )
+ response = [
+ {
+ "ts": int(parse_timestamp(time_string).timestamp()),
+ "total": 0,
+ "dropped": 0,
+ "accepted": 0,
+ "filtered": 0,
+ }
+ for time_string in results["intervals"]
+ ]
# We rely on groups and intervals being index aligned
for group_result in results["groups"]:
diff --git a/src/sentry/api/endpoints/project_ownership.py b/src/sentry/api/endpoints/project_ownership.py
index 7ab18829913362..0efe8633b8c528 100644
--- a/src/sentry/api/endpoints/project_ownership.py
+++ b/src/sentry/api/endpoints/project_ownership.py
@@ -86,9 +86,7 @@ def save(self):
ownership.codeowners_auto_sync = codeowners_auto_sync
changed = True
- changed = self.__modify_auto_assignment(ownership) or changed
-
- if changed:
+ if changed := self.__modify_auto_assignment(ownership) or changed:
now = timezone.now()
if ownership.date_created is None:
ownership.date_created = now
diff --git a/src/sentry/api/endpoints/project_plugins.py b/src/sentry/api/endpoints/project_plugins.py
index 7a55471da0d66b..ea6be546d7404f 100644
--- a/src/sentry/api/endpoints/project_plugins.py
+++ b/src/sentry/api/endpoints/project_plugins.py
@@ -10,8 +10,9 @@
class ProjectPluginsEndpoint(ProjectEndpoint):
def get(self, request: Request, project) -> Response:
context = serialize(
- [plugin for plugin in plugins.configurable_for_project(project, version=None)],
+ list(plugins.configurable_for_project(project, version=None)),
request.user,
PluginSerializer(project),
)
+
return Response(context)
diff --git a/src/sentry/api/endpoints/project_processingissues.py b/src/sentry/api/endpoints/project_processingissues.py
index 0f1a839e566e41..76c58216f2dc2b 100644
--- a/src/sentry/api/endpoints/project_processingissues.py
+++ b/src/sentry/api/endpoints/project_processingissues.py
@@ -29,15 +29,16 @@ def get(self, request: Request, project) -> Response:
for x in ApiToken.objects.filter(user=request.user).all()
if "project:releases" in x.get_scopes()
]
- if not tokens:
- token = ApiToken.objects.create(
+ token = (
+ tokens[0]
+ if tokens
+ else ApiToken.objects.create(
user=request.user,
scope_list=["project:releases"],
refresh_token=None,
expires_at=None,
)
- else:
- token = tokens[0]
+ )
resp = render_to_response(
"sentry/reprocessing-script.sh",
diff --git a/src/sentry/api/endpoints/project_profiling_profile.py b/src/sentry/api/endpoints/project_profiling_profile.py
index 610622bdaec8c1..94c10065df7795 100644
--- a/src/sentry/api/endpoints/project_profiling_profile.py
+++ b/src/sentry/api/endpoints/project_profiling_profile.py
@@ -21,12 +21,11 @@ def get_profiling_params(self, request: Request, project: Project) -> Dict[str,
except InvalidSearchQuery as err:
raise ParseError(detail=str(err))
- params.update(
- {
- key: value.isoformat() if key in {"start", "end"} else value
- for key, value in self.get_filter_params(request, project).items()
- }
- )
+ params |= {
+ key: value.isoformat() if key in {"start", "end"} else value
+ for key, value in self.get_filter_params(request, project).items()
+ }
+
return params
diff --git a/src/sentry/api/endpoints/project_release_commits.py b/src/sentry/api/endpoints/project_release_commits.py
index d0520988d6f901..f30cbe2d0fa40c 100644
--- a/src/sentry/api/endpoints/project_release_commits.py
+++ b/src/sentry/api/endpoints/project_release_commits.py
@@ -41,9 +41,7 @@ def get(self, request: Request, project, version) -> Response:
"commit", "commit__author"
)
- repo_name = request.query_params.get("repo_name")
-
- if repo_name:
+ if repo_name := request.query_params.get("repo_name"):
try:
repo = Repository.objects.get(organization_id=organization_id, name=repo_name)
queryset = queryset.filter(commit__repository_id=repo.id)
diff --git a/src/sentry/api/endpoints/project_release_details.py b/src/sentry/api/endpoints/project_release_details.py
index 5933cb31c3f32f..938df4ee2b0b99 100644
--- a/src/sentry/api/endpoints/project_release_details.py
+++ b/src/sentry/api/endpoints/project_release_details.py
@@ -119,8 +119,7 @@ def put(self, request: Request, project, version) -> Response:
if kwargs:
release.update(**kwargs)
- commit_list = result.get("commits")
- if commit_list:
+ if commit_list := result.get("commits"):
hook = ReleaseHook(project)
# TODO(dcramer): handle errors with release payloads
hook.set_commits(release.version, commit_list)
diff --git a/src/sentry/api/endpoints/project_release_file_details.py b/src/sentry/api/endpoints/project_release_file_details.py
index 1e04efeeb41199..bfdb0583ff2367 100644
--- a/src/sentry/api/endpoints/project_release_file_details.py
+++ b/src/sentry/api/endpoints/project_release_file_details.py
@@ -137,13 +137,16 @@ def get_releasefile(cls, request, release, file_id, check_permission_fn):
getter = _entry_from_index if download_requested else _get_from_index
releasefile = cls._get_releasefile(release, file_id, getter)
- if download_requested and check_permission_fn():
- if isinstance(releasefile, ReleaseFile):
- return cls.download(releasefile)
+ if download_requested:
+ if check_permission_fn():
+ return (
+ cls.download(releasefile)
+ if isinstance(releasefile, ReleaseFile)
+ else cls.download_from_archive(release, releasefile)
+ )
+
else:
- return cls.download_from_archive(release, releasefile)
- elif download_requested:
- return Response(status=403)
+ return Response(status=403)
return Response(serialize(releasefile, request.user))
diff --git a/src/sentry/api/endpoints/project_release_files.py b/src/sentry/api/endpoints/project_release_files.py
index c65cc602ecb86d..ac04d6ed33a292 100644
--- a/src/sentry/api/endpoints/project_release_files.py
+++ b/src/sentry/api/endpoints/project_release_files.py
@@ -114,11 +114,10 @@ def post_releasefile(request, release, logger):
{"detail": "File name must not contain special whitespace characters"}, status=400
)
- dist_name = request.data.get("dist")
- dist = None
- if dist_name:
+ if dist_name := request.data.get("dist"):
dist = release.add_dist(dist_name)
-
+ else:
+ dist = None
# Quickly check for the presence of this file before continuing with
# the costly file upload process.
if ReleaseFile.objects.filter(
diff --git a/src/sentry/api/endpoints/project_release_setup.py b/src/sentry/api/endpoints/project_release_setup.py
index dfe08d0ea00a36..cb319fa8c17084 100644
--- a/src/sentry/api/endpoints/project_release_setup.py
+++ b/src/sentry/api/endpoints/project_release_setup.py
@@ -19,10 +19,12 @@ def get(self, request: Request, project) -> Response:
4. tell sentry about a deploy
"""
- tag_key = "onboard_tag:1:%s" % (project.id)
- repo_key = "onboard_repo:1:%s" % (project.organization_id)
- commit_key = "onboard_commit:1:%s" % hash_values([project.organization_id, project.id])
- deploy_key = "onboard_deploy:1:%s" % hash_values([project.organization_id, project.id])
+ tag_key = f"onboard_tag:1:{project.id}"
+ repo_key = f"onboard_repo:1:{project.organization_id}"
+ commit_key = f"onboard_commit:1:{hash_values([project.organization_id, project.id])}"
+
+ deploy_key = f"onboard_deploy:1:{hash_values([project.organization_id, project.id])}"
+
onboard_cache = cache.get_many([tag_key, repo_key, commit_key, deploy_key])
tag = onboard_cache.get(tag_key)
diff --git a/src/sentry/api/endpoints/project_release_stats.py b/src/sentry/api/endpoints/project_release_stats.py
index 82d6f51feedd04..0f6ef65ff85d6a 100644
--- a/src/sentry/api/endpoints/project_release_stats.py
+++ b/src/sentry/api/endpoints/project_release_stats.py
@@ -14,8 +14,9 @@ def upsert_missing_release(project, version):
try:
return ReleaseProject.objects.get(project=project, release__version=version).release
except ReleaseProject.DoesNotExist:
- rows = release_health.get_oldest_health_data_for_releases([(project.id, version)])
- if rows:
+ if rows := release_health.get_oldest_health_data_for_releases(
+ [(project.id, version)]
+ ):
oldest = next(rows.values())
release = Release.get_or_create(project=project, version=version, date_added=oldest)
release.add_project(project)
@@ -73,22 +74,21 @@ def get(self, request: Request, project, version) -> Response:
environments=params.get("environment"),
)
- users_breakdown = []
- for data in release_health.get_crash_free_breakdown(
- project_id=params["project_id"][0],
- release=version,
- environments=params.get("environment"),
- start=release.date_added,
- ):
- users_breakdown.append(
- {
- "date": data["date"],
- "totalUsers": data["total_users"],
- "crashFreeUsers": data["crash_free_users"],
- "totalSessions": data["total_sessions"],
- "crashFreeSessions": data["crash_free_sessions"],
- }
+ users_breakdown = [
+ {
+ "date": data["date"],
+ "totalUsers": data["total_users"],
+ "crashFreeUsers": data["crash_free_users"],
+ "totalSessions": data["total_sessions"],
+ "crashFreeSessions": data["crash_free_sessions"],
+ }
+ for data in release_health.get_crash_free_breakdown(
+ project_id=params["project_id"][0],
+ release=version,
+ environments=params.get("environment"),
+ start=release.date_added,
)
+ ]
return Response(
serialize({"stats": stats, "statTotals": totals, "usersBreakdown": users_breakdown}),
diff --git a/src/sentry/api/endpoints/project_releases.py b/src/sentry/api/endpoints/project_releases.py
index 7d286336fcee6a..7dbd1b353f450f 100644
--- a/src/sentry/api/endpoints/project_releases.py
+++ b/src/sentry/api/endpoints/project_releases.py
@@ -151,8 +151,7 @@ def post(self, request: Request, project) -> Response:
created = release.add_project(project)
- commit_list = result.get("commits")
- if commit_list:
+ if commit_list := result.get("commits"):
hook = ReleaseHook(project)
# TODO(dcramer): handle errors with release payloads
hook.set_commits(release.version, commit_list)
@@ -166,15 +165,7 @@ def post(self, request: Request, project) -> Response:
datetime=release.date_released,
)
- if not created:
- # This is the closest status code that makes sense, and we want
- # a unique 2xx response code so people can understand when
- # behavior differs.
- # 208 Already Reported (WebDAV; RFC 5842)
- status = 208
- else:
- status = 201
-
+ status = 201 if created else 208
analytics.record(
"release.created",
user_id=request.user.id if request.user and request.user.id else None,
diff --git a/src/sentry/api/endpoints/project_repo_path_parsing.py b/src/sentry/api/endpoints/project_repo_path_parsing.py
index 6e122e18efb187..16d963784a98c4 100644
--- a/src/sentry/api/endpoints/project_repo_path_parsing.py
+++ b/src/sentry/api/endpoints/project_repo_path_parsing.py
@@ -64,7 +64,7 @@ def validate_source_url(self, source_url):
)
def integration_match(integration):
- return source_url.startswith("https://{}".format(integration.metadata["domain_name"]))
+ return source_url.startswith(f'https://{integration.metadata["domain_name"]}')
def repo_match(repo):
return source_url.startswith(repo.url)
diff --git a/src/sentry/api/endpoints/project_rule_details.py b/src/sentry/api/endpoints/project_rule_details.py
index a15bc922708cbb..b47ecc2bdcf681 100644
--- a/src/sentry/api/endpoints/project_rule_details.py
+++ b/src/sentry/api/endpoints/project_rule_details.py
@@ -121,8 +121,7 @@ def put(self, request: Request, project, rule) -> Response:
"actions": data["actions"],
"frequency": data.get("frequency"),
}
- owner = data.get("owner")
- if owner:
+ if owner := data.get("owner"):
try:
kwargs["owner"] = owner.resolve_to_actor().id
except (User.DoesNotExist, Team.DoesNotExist):
@@ -133,7 +132,7 @@ def put(self, request: Request, project, rule) -> Response:
if data.get("pending_save"):
client = RedisRuleStatus()
- kwargs.update({"uuid": client.uuid, "rule_id": rule.id})
+ kwargs |= {"uuid": client.uuid, "rule_id": rule.id}
find_channel_id_for_rule.apply_async(kwargs=kwargs)
context = {"uuid": client.uuid}
diff --git a/src/sentry/api/endpoints/project_rules.py b/src/sentry/api/endpoints/project_rules.py
index 942cbe299c570d..bc45c17e0a4366 100644
--- a/src/sentry/api/endpoints/project_rules.py
+++ b/src/sentry/api/endpoints/project_rules.py
@@ -90,8 +90,7 @@ def post(self, request: Request, project) -> Response:
"frequency": data.get("frequency"),
"user_id": request.user.id,
}
- owner = data.get("owner")
- if owner:
+ if owner := data.get("owner"):
try:
kwargs["owner"] = owner.resolve_to_actor().id
except (User.DoesNotExist, Team.DoesNotExist):
@@ -103,7 +102,7 @@ def post(self, request: Request, project) -> Response:
if data.get("pending_save"):
client = RedisRuleStatus()
uuid_context = {"uuid": client.uuid}
- kwargs.update(uuid_context)
+ kwargs |= uuid_context
find_channel_id_for_rule.apply_async(kwargs=kwargs)
return Response(uuid_context, status=202)
diff --git a/src/sentry/api/endpoints/project_rules_configuration.py b/src/sentry/api/endpoints/project_rules_configuration.py
index d5ab4e6045725d..7c2330fa1e8f2d 100644
--- a/src/sentry/api/endpoints/project_rules_configuration.py
+++ b/src/sentry/api/endpoints/project_rules_configuration.py
@@ -33,8 +33,7 @@ def get(self, request: Request, project) -> Response:
continue
if node.id in SENTRY_APP_ACTIONS:
- custom_actions = node.get_custom_actions(project)
- if custom_actions:
+ if custom_actions := node.get_custom_actions(project):
action_list.extend(custom_actions)
continue
diff --git a/src/sentry/api/endpoints/project_stacktrace_link.py b/src/sentry/api/endpoints/project_stacktrace_link.py
index 52f061a6a0fd7b..38151491b294c6 100644
--- a/src/sentry/api/endpoints/project_stacktrace_link.py
+++ b/src/sentry/api/endpoints/project_stacktrace_link.py
@@ -56,16 +56,16 @@ def get(self, request: Request, project) -> Response:
commit_id = request.GET.get("commitId")
platform = request.GET.get("platform")
- result = {"config": None, "sourceUrl": None}
-
integrations = Integration.objects.filter(organizations=project.organization_id)
- # TODO(meredith): should use get_provider.has_feature() instead once this is
- # no longer feature gated and is added as an IntegrationFeature
- result["integrations"] = [
- serialize(i, request.user)
- for i in integrations
- if i.has_feature(IntegrationFeatures.STACKTRACE_LINK)
- ]
+ result = {
+ "config": None,
+ "sourceUrl": None,
+ "integrations": [
+ serialize(i, request.user)
+ for i in integrations
+ if i.has_feature(IntegrationFeatures.STACKTRACE_LINK)
+ ],
+ }
# xxx(meredith): if there are ever any changes to this query, make
# sure that we are still ordering by `id` because we want to make sure
diff --git a/src/sentry/api/endpoints/project_stats.py b/src/sentry/api/endpoints/project_stats.py
index c8cca493a49d8e..4ba8ee8db52940 100644
--- a/src/sentry/api/endpoints/project_stats.py
+++ b/src/sentry/api/endpoints/project_stats.py
@@ -58,7 +58,7 @@ def get(self, request: Request, project) -> Response:
try:
stat_model = FILTER_STAT_KEYS_TO_VALUES[stat]
except KeyError:
- raise ValueError("Invalid stat: %s" % stat)
+ raise ValueError(f"Invalid stat: {stat}")
data = tsdb.get_range(
model=stat_model, keys=[project.id], **self._parse_args(request, **query_kwargs)
diff --git a/src/sentry/api/endpoints/project_tags.py b/src/sentry/api/endpoints/project_tags.py
index 33ed33500fa542..445d33438b4146 100644
--- a/src/sentry/api/endpoints/project_tags.py
+++ b/src/sentry/api/endpoints/project_tags.py
@@ -29,15 +29,14 @@ def get(self, request: Request, project) -> Response:
key=lambda x: x.key,
)
- data = []
- for tag_key in tag_keys:
- data.append(
- {
- "key": tagstore.get_standardized_key(tag_key.key),
- "name": tagstore.get_tag_key_label(tag_key.key),
- "uniqueValues": tag_key.values_seen,
- "canDelete": tag_key.key not in PROTECTED_TAG_KEYS,
- }
- )
+ data = [
+ {
+ "key": tagstore.get_standardized_key(tag_key.key),
+ "name": tagstore.get_tag_key_label(tag_key.key),
+ "uniqueValues": tag_key.values_seen,
+ "canDelete": tag_key.key not in PROTECTED_TAG_KEYS,
+ }
+ for tag_key in tag_keys
+ ]
return Response(data)
diff --git a/src/sentry/api/endpoints/project_transfer.py b/src/sentry/api/endpoints/project_transfer.py
index 00e6c7d64e3f75..c7100327e3e228 100644
--- a/src/sentry/api/endpoints/project_transfer.py
+++ b/src/sentry/api/endpoints/project_transfer.py
@@ -81,13 +81,14 @@ def post(self, request: Request, project) -> Response:
"requester": request.user,
}
MessageBuilder(
- subject="{}Request for Project Transfer".format(options.get("mail.subject-prefix")),
+ subject=f'{options.get("mail.subject-prefix")}Request for Project Transfer',
template="sentry/emails/transfer_project.txt",
html_template="sentry/emails/transfer_project.html",
type="org.confirm_project_transfer_request",
context=context,
).send_async([email])
+
self.create_audit_entry(
request=request,
organization=project.organization,
diff --git a/src/sentry/api/endpoints/prompts_activity.py b/src/sentry/api/endpoints/prompts_activity.py
index 5169a219d79787..bdd4764ca35813 100644
--- a/src/sentry/api/endpoints/prompts_activity.py
+++ b/src/sentry/api/endpoints/prompts_activity.py
@@ -44,7 +44,7 @@ def get(self, request: Request) -> Response:
conditions = None
for feature in features:
if not prompt_config.has(feature):
- return Response({"detail": "Invalid feature name " + feature}, status=400)
+ return Response({"detail": f"Invalid feature name {feature}"}, status=400)
required_fields = prompt_config.required_fields(feature)
for field in required_fields:
@@ -56,12 +56,11 @@ def get(self, request: Request) -> Response:
result = PromptsActivity.objects.filter(conditions, user=request.user)
featuredata = {k.feature: k.data for k in result}
- if len(features) == 1:
- result = result.first()
- data = None if result is None else result.data
- return Response({"data": data, "features": featuredata})
- else:
+ if len(features) != 1:
return Response({"features": featuredata})
+ result = result.first()
+ data = None if result is None else result.data
+ return Response({"data": data, "features": featuredata})
def put(self, request: Request):
serializer = PromptsActivitySerializer(data=request.data)
diff --git a/src/sentry/api/endpoints/relay/project_configs.py b/src/sentry/api/endpoints/relay/project_configs.py
index 1be86278fc46a4..1ae5f760999bac 100644
--- a/src/sentry/api/endpoints/relay/project_configs.py
+++ b/src/sentry/api/endpoints/relay/project_configs.py
@@ -78,12 +78,11 @@ def _post_or_schedule_by_key(self, request: Request):
proj_configs = {}
pending = []
for key in public_keys:
- computed = self._get_cached_or_schedule(key)
- if not computed:
- pending.append(key)
- else:
+ if computed := self._get_cached_or_schedule(key):
proj_configs[key] = computed
+ else:
+ pending.append(key)
metrics.incr("relay.project_configs.post_v3.pending", amount=len(pending), sample_rate=1)
metrics.incr(
"relay.project_configs.post_v3.fetched", amount=len(proj_configs), sample_rate=1
@@ -99,8 +98,7 @@ def _get_cached_or_schedule(self, public_key) -> Optional[dict]:
Debouncing of the project happens after the task has been scheduled.
"""
- cached_config = projectconfig_cache.get(public_key)
- if cached_config:
+ if cached_config := projectconfig_cache.get(public_key):
return cached_config
schedule_update_config_cache(
@@ -202,10 +200,9 @@ def _post_by_project(self, request: Request, full_config_requested):
projects = {}
with start_span(op="relay_fetch_orgs"):
- # Preload all organizations and their options to prevent repeated
- # database access when computing the project configuration.
- org_ids = {project.organization_id for project in projects.values()}
- if org_ids:
+ if org_ids := {
+ project.organization_id for project in projects.values()
+ }:
with metrics.timer("relay_project_configs.fetching_orgs.duration"):
orgs = Organization.objects.get_many_from_cache(org_ids)
orgs = {o.id: o for o in orgs if request.relay.has_org_access(o)}
@@ -213,7 +210,7 @@ def _post_by_project(self, request: Request, full_config_requested):
orgs = {}
with metrics.timer("relay_project_configs.fetching_org_options.duration"):
- for org_id in orgs.keys():
+ for org_id in orgs:
OrganizationOption.objects.get_all_values(org_id)
with start_span(op="relay_fetch_keys"):
diff --git a/src/sentry/api/endpoints/relay/register_challenge.py b/src/sentry/api/endpoints/relay/register_challenge.py
index 31dc75dff2dee2..c7877c0a8bf41e 100644
--- a/src/sentry/api/endpoints/relay/register_challenge.py
+++ b/src/sentry/api/endpoints/relay/register_challenge.py
@@ -87,12 +87,11 @@ def post(self, request: Request) -> Response:
relay, static = relay_from_id(request, relay_id)
- if relay is not None:
- if relay.public_key != str(public_key):
- # This happens if we have an ID collision or someone copies an existing id
- return Response(
- {"detail": "Attempted to register agent with a different public key"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ if relay is not None and relay.public_key != str(public_key):
+ # This happens if we have an ID collision or someone copies an existing id
+ return Response(
+ {"detail": "Attempted to register agent with a different public key"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
return Response(serialize(challenge))
diff --git a/src/sentry/api/endpoints/setup_wizard.py b/src/sentry/api/endpoints/setup_wizard.py
index 2c65836bc68193..89282bf7764d98 100644
--- a/src/sentry/api/endpoints/setup_wizard.py
+++ b/src/sentry/api/endpoints/setup_wizard.py
@@ -43,11 +43,9 @@ def get(self, request: Request, wizard_hash=None) -> Response:
return Response(serialize(wizard_data))
else:
- # This creates a new available hash url for the project wizard
- rate_limited = ratelimits.is_limited(
- key="rl:setup-wizard:ip:%s" % request.META["REMOTE_ADDR"], limit=10
- )
- if rate_limited:
+ if rate_limited := ratelimits.is_limited(
+ key=f'rl:setup-wizard:ip:{request.META["REMOTE_ADDR"]}', limit=10
+ ):
logger.info("setup-wizard.rate-limit")
return Response({"Too many wizard requests"}, status=403)
wizard_hash = get_random_string(64, allowed_chars="abcdefghijklmnopqrstuvwxyz012345679")
diff --git a/src/sentry/api/endpoints/system_options.py b/src/sentry/api/endpoints/system_options.py
index deb4cd1283c6b9..897849c7ca8e70 100644
--- a/src/sentry/api/endpoints/system_options.py
+++ b/src/sentry/api/endpoints/system_options.py
@@ -64,10 +64,10 @@ def put(self, request: Request):
)
try:
- if not (option.flags & options.FLAG_ALLOW_EMPTY) and not v:
- options.delete(k)
- else:
+ if option.flags & options.FLAG_ALLOW_EMPTY or v:
options.set(k, v)
+ else:
+ options.delete(k)
except (TypeError, AssertionError) as e:
# TODO(chadwhitacre): Use a custom exception for the
# immutability case, especially since asserts disappear with
diff --git a/src/sentry/api/endpoints/team_details.py b/src/sentry/api/endpoints/team_details.py
index 100fdd5d361e3d..67b1fbdc2938e4 100644
--- a/src/sentry/api/endpoints/team_details.py
+++ b/src/sentry/api/endpoints/team_details.py
@@ -102,11 +102,10 @@ def delete(self, request: Request, team) -> Response:
immediate. Teams will have their slug released while waiting for deletion.
"""
suffix = uuid4().hex
- new_slug = f"{team.slug}-{suffix}"[0:50]
- updated = Team.objects.filter(id=team.id, status=TeamStatus.VISIBLE).update(
- slug=new_slug, status=TeamStatus.PENDING_DELETION
- )
- if updated:
+ new_slug = f"{team.slug}-{suffix}"[:50]
+ if updated := Team.objects.filter(
+ id=team.id, status=TeamStatus.VISIBLE
+ ).update(slug=new_slug, status=TeamStatus.PENDING_DELETION):
scheduled = ScheduledDeletion.schedule(team, days=0, actor=request.user)
self.create_audit_entry(
request=request,
diff --git a/src/sentry/api/endpoints/user_authenticator_details.py b/src/sentry/api/endpoints/user_authenticator_details.py
index 47fc31e45ab8e4..bb7235136b2ed0 100644
--- a/src/sentry/api/endpoints/user_authenticator_details.py
+++ b/src/sentry/api/endpoints/user_authenticator_details.py
@@ -18,14 +18,17 @@ class UserAuthenticatorDetailsEndpoint(UserEndpoint):
def _get_device_for_rename(self, authenticator, interface_device_id):
devices = authenticator.config
- for device in devices["devices"]:
- # this is for devices registered with webauthn, since the stored data is not a string, we need to decode it
- if type(device["binding"]) == AuthenticatorData:
- if decode_credential_id(device) == interface_device_id:
- return device
- elif device["binding"]["keyHandle"] == interface_device_id:
- return device
- return None
+ return next(
+ (
+ device
+ for device in devices["devices"]
+ if type(device["binding"]) == AuthenticatorData
+ and decode_credential_id(device) == interface_device_id
+ or type(device["binding"]) != AuthenticatorData
+ and device["binding"]["keyHandle"] == interface_device_id
+ ),
+ None,
+ )
def _rename_device(self, authenticator, interface_device_id, new_name):
device = self._get_device_for_rename(authenticator, interface_device_id)
diff --git a/src/sentry/api/endpoints/user_authenticator_enroll.py b/src/sentry/api/endpoints/user_authenticator_enroll.py
index 999b316d823ecf..93f6eef9a673d0 100644
--- a/src/sentry/api/endpoints/user_authenticator_enroll.py
+++ b/src/sentry/api/endpoints/user_authenticator_enroll.py
@@ -229,7 +229,7 @@ def post(self, request: Request, user, interface_id) -> Response:
serializer.data["deviceName"],
state,
)
- context.update({"device_name": serializer.data["deviceName"]})
+ context["device_name"] = serializer.data["deviceName"]
if interface.status == EnrollmentStatus.ROTATION:
interface.rotate_in_place()
@@ -239,7 +239,7 @@ def post(self, request: Request, user, interface_id) -> Response:
except Authenticator.AlreadyEnrolled:
return Response(ALREADY_ENROLLED_ERR, status=status.HTTP_400_BAD_REQUEST)
- context.update({"authenticator": interface.authenticator})
+ context["authenticator"] = interface.authenticator
capture_security_activity(
account=request.user,
type="mfa-added",
diff --git a/src/sentry/api/endpoints/user_details.py b/src/sentry/api/endpoints/user_details.py
index 8a46ac79fdf815..71eaa4ec4fe581 100644
--- a/src/sentry/api/endpoints/user_details.py
+++ b/src/sentry/api/endpoints/user_details.py
@@ -72,10 +72,12 @@ def validate_username(self, value):
def validate(self, attrs):
attrs = super().validate(attrs)
- if self.instance.email == self.instance.username:
- if attrs.get("username", self.instance.email) != self.instance.email:
- # ... this probably needs to handle newsletters and such?
- attrs.setdefault("email", attrs["username"])
+ if (
+ self.instance.email == self.instance.username
+ and attrs.get("username", self.instance.email) != self.instance.email
+ ):
+ # ... this probably needs to handle newsletters and such?
+ attrs.setdefault("email", attrs["username"])
return attrs
@@ -222,9 +224,10 @@ def delete(self, request: Request, user) -> Response:
status=OrganizationStatus.VISIBLE,
)
- org_results = []
- for org in org_list:
- org_results.append({"organization": org, "single_owner": org.has_single_owner()})
+ org_results = [
+ {"organization": org, "single_owner": org.has_single_owner()}
+ for org in org_list
+ ]
avail_org_slugs = {o["organization"].slug for o in org_results}
orgs_to_remove = set(serializer.validated_data.get("organizations")).intersection(
@@ -238,11 +241,11 @@ def delete(self, request: Request, user) -> Response:
for org_slug in orgs_to_remove:
client.delete(path=f"/organizations/{org_slug}/", request=request, is_sudo=True)
- remaining_org_ids = [
- o.id for o in org_list if o.slug in avail_org_slugs.difference(orgs_to_remove)
- ]
-
- if remaining_org_ids:
+ if remaining_org_ids := [
+ o.id
+ for o in org_list
+ if o.slug in avail_org_slugs.difference(orgs_to_remove)
+ ]:
OrganizationMember.objects.filter(
organization__in=remaining_org_ids, user=user
).delete()
diff --git a/src/sentry/api/endpoints/user_identity.py b/src/sentry/api/endpoints/user_identity.py
index aca8317a76868e..ed35292fb1bfbf 100644
--- a/src/sentry/api/endpoints/user_identity.py
+++ b/src/sentry/api/endpoints/user_identity.py
@@ -18,8 +18,7 @@ def get(self, request: Request, user) -> Response:
"""
queryset = Identity.objects.filter(user=user)
- provider = request.GET.get("provider")
- if provider:
+ if provider := request.GET.get("provider"):
queryset = queryset.filter(idp__type=provider.lower())
return self.paginate(
diff --git a/src/sentry/api/endpoints/user_identity_config.py b/src/sentry/api/endpoints/user_identity_config.py
index 0f0a817817ab8c..ba398fc769eeb1 100644
--- a/src/sentry/api/endpoints/user_identity_config.py
+++ b/src/sentry/api/endpoints/user_identity_config.py
@@ -92,18 +92,17 @@ class UserIdentityConfigDetailsEndpoint(UserEndpoint):
def _get_identity(user, category, identity_id) -> Optional[UserIdentityConfig]:
identity_id = int(identity_id)
- # This fetches and iterates over all the user's identities.
- # If needed, we could optimize to look directly for the one
- # object, but we would still need to examine the full set of
- # Identity objects in order to correctly set the status.
- for identity in get_identities(user):
- if identity.category == category and identity.id == identity_id:
- return identity
- return None
+ return next(
+ (
+ identity
+ for identity in get_identities(user)
+ if identity.category == category and identity.id == identity_id
+ ),
+ None,
+ )
def get(self, request: Request, user, category, identity_id) -> Response:
- identity = self._get_identity(user, category, identity_id)
- if identity:
+ if identity := self._get_identity(user, category, identity_id):
return Response(serialize(identity))
else:
return Response(status=status.HTTP_404_NOT_FOUND)
diff --git a/src/sentry/api/endpoints/user_index.py b/src/sentry/api/endpoints/user_index.py
index 92a0ebbc162a73..c7c3ece367cb36 100644
--- a/src/sentry/api/endpoints/user_index.py
+++ b/src/sentry/api/endpoints/user_index.py
@@ -17,8 +17,7 @@ class UserIndexEndpoint(Endpoint):
def get(self, request: Request) -> Response:
queryset = User.objects.distinct()
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
tokens = tokenize_query(query)
for key, value in tokens.items():
if key == "query":
diff --git a/src/sentry/api/endpoints/user_notification_details.py b/src/sentry/api/endpoints/user_notification_details.py
index aa4bbf5a870bc9..b316091f46e941 100644
--- a/src/sentry/api/endpoints/user_notification_details.py
+++ b/src/sentry/api/endpoints/user_notification_details.py
@@ -88,12 +88,12 @@ def put(self, request: Request, user) -> Response:
key = UserOptionsSettingsKey(key)
except ValueError:
return Response(
- {"detail": "Unknown key: %s." % key},
+ {"detail": f"Unknown key: {key}."},
status=status.HTTP_400_BAD_REQUEST,
)
- type = get_type_from_user_option_settings_key(key)
- if type:
+
+ if type := get_type_from_user_option_settings_key(key):
NotificationSetting.objects.update_settings(
ExternalProviders.EMAIL,
type,
diff --git a/src/sentry/api/endpoints/user_notification_fine_tuning.py b/src/sentry/api/endpoints/user_notification_fine_tuning.py
index 90a2c37028799d..dc17a5785837d4 100644
--- a/src/sentry/api/endpoints/user_notification_fine_tuning.py
+++ b/src/sentry/api/endpoints/user_notification_fine_tuning.py
@@ -28,10 +28,11 @@ def get(self, request: Request, user, notification_type) -> Response:
notification_type = FineTuningAPIKey(notification_type)
except ValueError:
return Response(
- {"detail": "Unknown notification type: %s." % notification_type},
+ {"detail": f"Unknown notification type: {notification_type}."},
status=status.HTTP_404_NOT_FOUND,
)
+
notifications = UserNotificationsSerializer()
return Response(
serialize(
@@ -69,10 +70,11 @@ def put(self, request: Request, user, notification_type) -> Response:
notification_type = FineTuningAPIKey(notification_type)
except ValueError:
return Response(
- {"detail": "Unknown notification type: %s." % notification_type},
+ {"detail": f"Unknown notification type: {notification_type}."},
status=status.HTTP_404_NOT_FOUND,
)
+
if notification_type == FineTuningAPIKey.REPORTS:
return self._handle_put_reports(user, request.data)
diff --git a/src/sentry/api/endpoints/user_organizationintegrations.py b/src/sentry/api/endpoints/user_organizationintegrations.py
index ba267e152c3e82..0734a2a4d36d29 100644
--- a/src/sentry/api/endpoints/user_organizationintegrations.py
+++ b/src/sentry/api/endpoints/user_organizationintegrations.py
@@ -22,8 +22,7 @@ def get(self, request: Request, user) -> Response:
status=ObjectStatus.VISIBLE,
integration__status=ObjectStatus.VISIBLE,
)
- provider = request.GET.get("provider")
- if provider:
+ if provider := request.GET.get("provider"):
queryset = queryset.filter(integration__provider=provider.lower())
return self.paginate(
diff --git a/src/sentry/api/endpoints/user_organizations.py b/src/sentry/api/endpoints/user_organizations.py
index 6171b552759718..89c740c35ffcdc 100644
--- a/src/sentry/api/endpoints/user_organizations.py
+++ b/src/sentry/api/endpoints/user_organizations.py
@@ -11,8 +11,7 @@ class UserOrganizationsEndpoint(UserEndpoint):
def get(self, request: Request, user) -> Response:
queryset = user.get_orgs()
- query = request.GET.get("query")
- if query:
+ if query := request.GET.get("query"):
queryset = queryset.filter(Q(name__icontains=query) | Q(slug__icontains=query))
return self.paginate(
diff --git a/src/sentry/api/endpoints/user_permissions_config.py b/src/sentry/api/endpoints/user_permissions_config.py
index 6ade5d6cc15f00..be793eb7b765bc 100644
--- a/src/sentry/api/endpoints/user_permissions_config.py
+++ b/src/sentry/api/endpoints/user_permissions_config.py
@@ -13,4 +13,4 @@ def get(self, request: Request, user) -> Response:
"""
List all available permissions that can be applied to a user.
"""
- return self.respond([p for p in settings.SENTRY_USER_PERMISSIONS])
+ return self.respond(list(settings.SENTRY_USER_PERMISSIONS))
diff --git a/src/sentry/api/endpoints/user_social_identity_details.py b/src/sentry/api/endpoints/user_social_identity_details.py
index 46e5a70378bf67..b9eed4c6e420e2 100644
--- a/src/sentry/api/endpoints/user_social_identity_details.py
+++ b/src/sentry/api/endpoints/user_social_identity_details.py
@@ -40,8 +40,6 @@ def delete(self, request: Request, user, identity_id) -> Response:
exc_tb = sys.exc_info()[2]
raise exc.with_traceback(exc_tb)
- del exc_tb
-
# XXX(dcramer): we experienced an issue where the identity still existed,
# and given that this is a cheap query, lets error hard in that case
assert not UserSocialAuth.objects.filter(user=user, id=identity_id).exists()
diff --git a/src/sentry/api/event_search.py b/src/sentry/api/event_search.py
index cd7104c21634e9..0aa1990dc38a51 100644
--- a/src/sentry/api/event_search.py
+++ b/src/sentry/api/event_search.py
@@ -189,7 +189,7 @@ def translate_wildcard(pat: str) -> str:
res = ""
while i < n:
c = pat[i]
- i = i + 1
+ i += 1
# fnmatch.translate has no way to handle escaping metacharacters.
# Applied this basic patch to handle it:
# https://bugs.python.org/file27570/issue8402.1.patch
@@ -202,7 +202,7 @@ def translate_wildcard(pat: str) -> str:
res += re.escape(c)
else:
res += c
- return "^" + res + "$"
+ return f"^{res}$"
def translate_escape_sequences(string: str) -> str:
@@ -215,7 +215,7 @@ def translate_escape_sequences(string: str) -> str:
res = ""
while i < n:
c = string[i]
- i = i + 1
+ i += 1
if c == "\\" and i < n:
d = string[i]
if d == "*":
@@ -346,9 +346,11 @@ def value(self):
return self.raw_value
def is_wildcard(self) -> bool:
- if not isinstance(self.raw_value, str):
- return False
- return bool(WILDCARD_CHARS.search(self.raw_value))
+ return (
+ bool(WILDCARD_CHARS.search(self.raw_value))
+ if isinstance(self.raw_value, str)
+ else False
+ )
def is_event_id(self) -> bool:
"""Return whether the current value is a valid event id
@@ -366,9 +368,11 @@ def is_span_id(self) -> bool:
Empty strings are valid, so that it can be used for has:trace.span queries
"""
- if not isinstance(self.raw_value, str):
- return False
- return is_span_id(self.raw_value) or self.raw_value == ""
+ return (
+ is_span_id(self.raw_value) or self.raw_value == ""
+ if isinstance(self.raw_value, str)
+ else False
+ )
class SearchFilter(NamedTuple):
@@ -528,9 +532,13 @@ def visit_free_text_unquoted(self, node, children):
return node.text.strip(" ") or None
def visit_free_text(self, node, children):
- if not children[0]:
- return None
- return SearchFilter(SearchKey(self.config.free_text_key), "=", SearchValue(children[0]))
+ return (
+ SearchFilter(
+ SearchKey(self.config.free_text_key), "=", SearchValue(children[0])
+ )
+ if children[0]
+ else None
+ )
def visit_paren_group(self, node, children):
if not self.config.allow_boolean:
@@ -674,7 +682,9 @@ def visit_boolean_filter(self, node, children):
return SearchFilter(search_key, "=", search_value)
search_value = SearchValue(search_value.text)
- return self._handle_basic_filter(search_key, "=" if not negated else "!=", search_value)
+ return self._handle_basic_filter(
+ search_key, "!=" if negated else "=", search_value
+ )
def visit_numeric_in_filter(self, node, children):
(negation, search_key, _, search_value) = children
diff --git a/src/sentry/api/fields/avatar.py b/src/sentry/api/fields/avatar.py
index 11058e8743eb6a..744a4fa9ac38f1 100644
--- a/src/sentry/api/fields/avatar.py
+++ b/src/sentry/api/fields/avatar.py
@@ -36,9 +36,7 @@ def __init__(
self.is_sentry_app = is_sentry_app
def to_representation(self, value):
- if not value:
- return ""
- return value.getvalue()
+ return value.getvalue() if value else ""
def to_internal_value(self, data):
if not data:
diff --git a/src/sentry/api/fields/empty_decimal.py b/src/sentry/api/fields/empty_decimal.py
index 2ef254476d3025..9877549c448110 100644
--- a/src/sentry/api/fields/empty_decimal.py
+++ b/src/sentry/api/fields/empty_decimal.py
@@ -10,11 +10,7 @@ class EmptyDecimalField(serializers.DecimalField):
"""
def to_internal_value(self, data):
- if data == "":
- return None
- return super().to_internal_value(data)
+ return None if data == "" else super().to_internal_value(data)
def run_validation(self, data=empty):
- if data == "":
- return None
- return super().run_validation(data)
+ return None if data == "" else super().run_validation(data)
diff --git a/src/sentry/api/fields/empty_integer.py b/src/sentry/api/fields/empty_integer.py
index e5e8d6f5825966..170b3d52ea9d23 100644
--- a/src/sentry/api/fields/empty_integer.py
+++ b/src/sentry/api/fields/empty_integer.py
@@ -10,11 +10,7 @@ class EmptyIntegerField(serializers.IntegerField):
"""
def to_internal_value(self, data):
- if data == "":
- return None
- return super().to_internal_value(data)
+ return None if data == "" else super().to_internal_value(data)
def run_validation(self, data=empty):
- if data == "":
- return None
- return super().run_validation(data)
+ return None if data == "" else super().run_validation(data)
diff --git a/src/sentry/api/helpers/events.py b/src/sentry/api/helpers/events.py
index 99613a2bd8f6fa..bddd30eaa7bdfb 100644
--- a/src/sentry/api/helpers/events.py
+++ b/src/sentry/api/helpers/events.py
@@ -11,8 +11,7 @@ def get_direct_hit_response(request, query, snuba_params, referrer):
Checks whether a query is a direct hit for an event, and if so returns
a response. Otherwise returns None
"""
- event_id = normalize_event_id(query)
- if event_id:
+ if event_id := normalize_event_id(query):
snuba_filter = get_filter(query=f"id:{event_id}", params=snuba_params)
snuba_filter.conditions.append(["event.type", "!=", "transaction"])
diff --git a/src/sentry/api/helpers/group_index/delete.py b/src/sentry/api/helpers/group_index/delete.py
index 4ce2f48c3d783c..e10af588d6ed61 100644
--- a/src/sentry/api/helpers/group_index/delete.py
+++ b/src/sentry/api/helpers/group_index/delete.py
@@ -95,8 +95,7 @@ def delete_groups(
`search_fn` refers to the `search.query` method with the appropriate
project, org, environment, and search params already bound
"""
- group_ids = request.GET.getlist("id")
- if group_ids:
+ if group_ids := request.GET.getlist("id"):
group_list = list(
Group.objects.filter(
project__in=projects,
diff --git a/src/sentry/api/helpers/group_index/index.py b/src/sentry/api/helpers/group_index/index.py
index 27c4c6a1d190d3..95bc0af1255059 100644
--- a/src/sentry/api/helpers/group_index/index.py
+++ b/src/sentry/api/helpers/group_index/index.py
@@ -41,8 +41,7 @@ def build_query_params_from_request(
) -> MutableMapping[str, Any]:
query_kwargs = {"projects": projects, "sort_by": request.GET.get("sort", DEFAULT_SORT_OPTION)}
- limit = request.GET.get("limit")
- if limit:
+ if limit := request.GET.get("limit"):
try:
query_kwargs["limit"] = int(limit)
except ValueError:
@@ -211,11 +210,7 @@ def get_first_last_release(
group: "Group",
) -> Tuple[Optional[Mapping[str, Any]], Optional[Mapping[str, Any]]]:
first_release = group.get_first_release()
- if first_release is not None:
- last_release = group.get_last_release()
- else:
- last_release = None
-
+ last_release = group.get_last_release() if first_release is not None else None
if first_release is not None and last_release is not None:
first_release, last_release = get_first_last_release_info(
request, group, [first_release, last_release]
@@ -238,9 +233,7 @@ def get_release_info(request: Request, group: "Group", version: str) -> Mapping[
except Release.DoesNotExist:
release = {"version": version}
- # Explicitly typing to satisfy mypy.
- release_ifo: Mapping[str, Any] = serialize(release, request.user)
- return release_ifo
+ return serialize(release, request.user)
def get_first_last_release_info(
diff --git a/src/sentry/api/helpers/teams.py b/src/sentry/api/helpers/teams.py
index afdd97a1471191..5ebf0d218dd475 100644
--- a/src/sentry/api/helpers/teams.py
+++ b/src/sentry/api/helpers/teams.py
@@ -18,11 +18,9 @@ def get_teams(request, organization, teams=None):
myteams = Team.objects.filter(
organization=organization, status=TeamStatus.VISIBLE
).values_list("id", flat=True)
- verified_ids.update(myteams)
else:
myteams = [t.id for t in request.access.teams]
- verified_ids.update(myteams)
-
+ verified_ids.update(myteams)
for team_id in requested_teams: # Verify each passed Team id is numeric
if type(team_id) is not int and not team_id.isdigit():
raise InvalidParams(f"Invalid Team ID: {team_id}")
diff --git a/src/sentry/api/helpers/user_reports.py b/src/sentry/api/helpers/user_reports.py
index 8d33fe10164352..a5daadf6aa75ce 100644
--- a/src/sentry/api/helpers/user_reports.py
+++ b/src/sentry/api/helpers/user_reports.py
@@ -2,12 +2,13 @@
def user_reports_filter_to_unresolved(user_reports):
- group_ids = {ur.group_id for ur in user_reports if ur.group_id}
- unresolved_group_ids = set()
- if group_ids:
+ if group_ids := {ur.group_id for ur in user_reports if ur.group_id}:
unresolved_group_ids = set(
- Group.objects.filter(id__in=group_ids, status=GroupStatus.UNRESOLVED).values_list(
- "id", flat=True
- )
+ Group.objects.filter(
+ id__in=group_ids, status=GroupStatus.UNRESOLVED
+ ).values_list("id", flat=True)
)
+
+ else:
+ unresolved_group_ids = set()
return [ur for ur in user_reports if ur.group_id is None or ur.group_id in unresolved_group_ids]
diff --git a/src/sentry/api/invite_helper.py b/src/sentry/api/invite_helper.py
index 78da88b8b319ad..5adf1d5fcb549f 100644
--- a/src/sentry/api/invite_helper.py
+++ b/src/sentry/api/invite_helper.py
@@ -106,7 +106,7 @@ def handle_success(self):
member_joined.send_robust(
member=self.om,
organization=self.om.organization,
- sender=self.instance if self.instance else self,
+ sender=self.instance or self,
)
def handle_member_already_exists(self):
@@ -153,12 +153,13 @@ def user_authenticated(self):
@property
def member_already_exists(self):
- if not self.user_authenticated:
- return False
-
- return OrganizationMember.objects.filter(
- organization=self.om.organization, user=self.request.user
- ).exists()
+ return (
+ OrganizationMember.objects.filter(
+ organization=self.om.organization, user=self.request.user
+ ).exists()
+ if self.user_authenticated
+ else False
+ )
@property
def valid_request(self):
@@ -187,11 +188,15 @@ def accept_invite(self, user=None):
provider = None
# If SSO is required, check for valid AuthIdentity
- if provider and not provider.flags.allow_unlinked:
- # AuthIdentity has a unique constraint on provider and user
- if not AuthIdentity.objects.filter(auth_provider=provider, user=user).exists():
- self.handle_member_has_no_sso()
- return
+ if (
+ provider
+ and not provider.flags.allow_unlinked
+ and not AuthIdentity.objects.filter(
+ auth_provider=provider, user=user
+ ).exists()
+ ):
+ self.handle_member_has_no_sso()
+ return
om.set_user(user)
om.save()
diff --git a/src/sentry/api/paginator.py b/src/sentry/api/paginator.py
index fc6662742955a1..ebda2c12d4c373 100644
--- a/src/sentry/api/paginator.py
+++ b/src/sentry/api/paginator.py
@@ -65,17 +65,16 @@ def build_queryset(self, value, is_prev):
new_order_by = list(queryset.query.order_by)
new_order_by[index] = f"-{queryset.query.order_by[index]}"
queryset.query.order_by = tuple(new_order_by)
- elif ("-%s" % self.key) in queryset.query.order_by:
+ elif f"-{self.key}" in queryset.query.order_by:
if asc:
index = queryset.query.order_by.index(f"-{self.key}")
new_order_by = list(queryset.query.order_by)
new_order_by[index] = queryset.query.order_by[index][1:]
queryset.query.order_b = tuple(new_order_by)
+ elif asc:
+ queryset = queryset.order_by(self.key)
else:
- if asc:
- queryset = queryset.order_by(self.key)
- else:
- queryset = queryset.order_by("-%s" % self.key)
+ queryset = queryset.order_by(f"-{self.key}")
if value:
assert self.key
@@ -109,11 +108,7 @@ def get_result(self, limit=100, cursor=None, count_hits=False, known_hits=None,
limit = min(limit, self.max_limit)
- if cursor.value:
- cursor_value = self.value_from_cursor(cursor)
- else:
- cursor_value = 0
-
+ cursor_value = self.value_from_cursor(cursor) if cursor.value else 0
queryset = self.build_queryset(cursor_value, cursor.is_prev)
# TODO(dcramer): this does not yet work correctly for ``is_prev`` when
@@ -603,13 +598,12 @@ def value_from_cursor(self, cursor):
return datetime.fromtimestamp(float(cursor.value) / self.multiplier).replace(
tzinfo=timezone.utc
)
- else:
- value = cursor.value
- if isinstance(value, float):
- return math.floor(value) if self._is_asc(cursor.is_prev) else math.ceil(value)
- if isinstance(value, str):
- return unquote(value)
- return value
+ value = cursor.value
+ if isinstance(value, float):
+ return math.floor(value) if self._is_asc(cursor.is_prev) else math.ceil(value)
+ if isinstance(value, str):
+ return unquote(value)
+ return value
def _is_asc(self, is_prev):
return (self.desc and is_prev) or not (self.desc or is_prev)
@@ -647,8 +641,7 @@ def _build_combined_querysets(self, value, is_prev, limit, extra):
combined_querysets += list(queryset)
def _sort_combined_querysets(item):
- sort_keys = []
- sort_keys.append(self.get_item_key(item, is_prev))
+ sort_keys = [self.get_item_key(item, is_prev)]
if len(self.model_key_map.get(type(item))) > 1:
sort_keys.extend(iter(self.model_key_map.get(type(item))[1:]))
sort_keys.append(type(item).__name__)
@@ -665,11 +658,7 @@ def get_result(self, cursor=None, limit=100):
if cursor is None:
cursor = Cursor(0, 0, 0)
- if cursor.value:
- cursor_value = self.value_from_cursor(cursor)
- else:
- cursor_value = None
-
+ cursor_value = self.value_from_cursor(cursor) if cursor.value else None
limit = min(limit, MAX_LIMIT)
offset = cursor.offset
diff --git a/src/sentry/api/serializers/base.py b/src/sentry/api/serializers/base.py
index a2148202429f2a..1202548674cd9e 100644
--- a/src/sentry/api/serializers/base.py
+++ b/src/sentry/api/serializers/base.py
@@ -92,9 +92,7 @@ def __call__(
self, obj: Any, attrs: Mapping[Any, Any], user: Any, **kwargs: Any
) -> Optional[MutableMapping[str, Any]]:
"""See documentation for `serialize`."""
- if obj is None:
- return None
- return self.serialize(obj, attrs, user, **kwargs)
+ return None if obj is None else self.serialize(obj, attrs, user, **kwargs)
def get_attrs(self, item_list: List[Any], user: Any, **kwargs: Any) -> MutableMapping[Any, Any]:
"""
diff --git a/src/sentry/api/serializers/snuba.py b/src/sentry/api/serializers/snuba.py
index 064a4d236d9591..cdc2553c4364bb 100644
--- a/src/sentry/api/serializers/snuba.py
+++ b/src/sentry/api/serializers/snuba.py
@@ -93,9 +93,7 @@ def serialize_noop(organization, item_list, user, lookup):
def encoder_noop(row):
- if not row:
- return None
- return row[0]
+ return row[0] if row else None
def value_from_row(row, tagkey):
@@ -232,7 +230,7 @@ def get(cls, name):
# Simple tags don't need any special treatment
for _tag in ("transaction", "os", "os.name", "browser", "browser.name", "device", "device.family"):
- SnubaLookup(_tag, "tags[%s]" % _tag)
+ SnubaLookup(_tag, f"tags[{_tag}]")
class BaseSnubaSerializer:
diff --git a/src/sentry/constants.py b/src/sentry/constants.py
index 6d4a72154166c6..02ba4740208b03 100644
--- a/src/sentry/constants.py
+++ b/src/sentry/constants.py
@@ -317,8 +317,7 @@ def _load_platform_data() -> None:
return
for platform in data["platforms"]:
- integrations = platform.pop("integrations")
- if integrations:
+ if integrations := platform.pop("integrations"):
for integration in integrations:
integration_id = integration.pop("id")
if integration["type"] != "language":
diff --git a/src/sentry/culprit.py b/src/sentry/culprit.py
index 0bc567ea3a95af..7a10a1e8bc5d01 100644
--- a/src/sentry/culprit.py
+++ b/src/sentry/culprit.py
@@ -14,8 +14,7 @@
def generate_culprit(data):
platform = data.get("platform")
- exceptions = get_path(data, "exception", "values", filter=True)
- if exceptions:
+ if exceptions := get_path(data, "exception", "values", filter=True):
# Synthetic events no longer get a culprit
last_exception = get_path(exceptions, -1)
if get_path(last_exception, "mechanism", "synthetic"):
@@ -24,11 +23,7 @@ def generate_culprit(data):
stacktraces = [e["stacktrace"] for e in exceptions if get_path(e, "stacktrace", "frames")]
else:
stacktrace = data.get("stacktrace")
- if stacktrace and stacktrace.get("frames"):
- stacktraces = [stacktrace]
- else:
- stacktraces = None
-
+ stacktraces = [stacktrace] if stacktrace and stacktrace.get("frames") else None
culprit = None
if not culprit and stacktraces:
@@ -46,8 +41,7 @@ def get_stacktrace_culprit(stacktrace, platform):
if not frame:
continue
if frame.get("in_app"):
- culprit = get_frame_culprit(frame, platform=platform)
- if culprit:
+ if culprit := get_frame_culprit(frame, platform=platform):
return culprit
elif default is None:
default = get_frame_culprit(frame, platform=platform)
@@ -67,5 +61,5 @@ def get_frame_culprit(frame, platform):
elif platform in ("javascript", "node"):
# function and fileloc might be unicode here, so let it coerce
# to a unicode string if needed.
- return "{}({})".format(frame.get("function") or "?", fileloc)
- return "{} in {}".format(fileloc, frame.get("function") or "?")
+ return f'{frame.get("function") or "?"}({fileloc})'
+ return f'{fileloc} in {frame.get("function") or "?"}'
diff --git a/src/sentry/datascrubbing.py b/src/sentry/datascrubbing.py
index 17930b0c8547fd..c6e66d5b4ba092 100644
--- a/src/sentry/datascrubbing.py
+++ b/src/sentry/datascrubbing.py
@@ -44,12 +44,13 @@ def _decode(value):
def get_datascrubbing_settings(project):
org = project.organization
- rv = {}
-
exclude_fields_key = "sentry:safe_fields"
- rv["excludeFields"] = org.get_option(exclude_fields_key, []) + project.get_option(
- exclude_fields_key, []
- )
+ rv = {
+ "excludeFields": (
+ org.get_option(exclude_fields_key, [])
+ + project.get_option(exclude_fields_key, [])
+ )
+ }
rv["scrubData"] = org.get_option("sentry:require_scrub_data", False) or project.get_option(
"sentry:scrub_data", True
@@ -72,9 +73,7 @@ def get_datascrubbing_settings(project):
def get_all_pii_configs(project):
- # Note: This logic is duplicated in Relay store.
- pii_config = get_pii_config(project)
- if pii_config:
+ if pii_config := get_pii_config(project):
yield pii_config
yield sentry_relay.convert_datascrubbing_config(get_datascrubbing_settings(project))
@@ -149,17 +148,16 @@ def _prefix_rule_references_in_rule(custom_rules, rule_def, prefix):
if not isinstance(rule_def, dict):
return rule_def
- if rule_def.get("type") == "multiple" and rule_def.get("rules"):
- rule_def = copy.deepcopy(rule_def)
- rule_def["rules"] = list(
- f"{prefix}{x}" if x in custom_rules else x for x in rule_def["rules"]
- )
- elif (
- rule_def.get("type") == "multiple"
- and rule_def.get("rule")
- and rule_def["rule"] in custom_rules
- ):
- rule_def = copy.deepcopy(rule_def)
- rule_def["rule"] = "{}{}".format(prefix, rule_def["rule"])
+ if rule_def.get("type") == "multiple":
+ if rule_def.get("rules"):
+ rule_def = copy.deepcopy(rule_def)
+ rule_def["rules"] = [
+ f"{prefix}{x}" if x in custom_rules else x
+ for x in rule_def["rules"]
+ ]
+
+ elif rule_def.get("rule") and rule_def["rule"] in custom_rules:
+ rule_def = copy.deepcopy(rule_def)
+ rule_def["rule"] = f'{prefix}{rule_def["rule"]}'
return rule_def
diff --git a/src/sentry/http.py b/src/sentry/http.py
index fa93a6a2a3b631..a5d60b59eab01c 100644
--- a/src/sentry/http.py
+++ b/src/sentry/http.py
@@ -95,7 +95,7 @@ def safe_urlopen(
if method is None:
method = "POST" if (data or json) else "GET"
- response = session.request(
+ return session.request(
method=method,
url=url,
allow_redirects=allow_redirects,
@@ -105,8 +105,6 @@ def safe_urlopen(
**kwargs,
)
- return response
-
def safe_urlread(response):
return response.content
@@ -155,8 +153,7 @@ def fetch_file(
# lock down domains that are problematic
if domain_lock_enabled:
domain_key = get_domain_key(url)
- domain_result = cache.get(domain_key)
- if domain_result:
+ if domain_result := cache.get(domain_key):
domain_result["url"] = url
raise CannotFetch(domain_result)
diff --git a/src/sentry/reprocessing.py b/src/sentry/reprocessing.py
index fe843a68f08268..d3332fd8777818 100644
--- a/src/sentry/reprocessing.py
+++ b/src/sentry/reprocessing.py
@@ -19,10 +19,10 @@ def event_supports_reprocessing(data):
return True
elif platform not in JAVASCRIPT_PLATFORMS:
return False
- for stacktrace_info in find_stacktraces_in_data(data):
- if not stacktrace_info.platforms.isdisjoint(NATIVE_PLATFORMS):
- return True
- return False
+ return any(
+ not stacktrace_info.platforms.isdisjoint(NATIVE_PLATFORMS)
+ for stacktrace_info in find_stacktraces_in_data(data)
+ )
def get_reprocessing_revision(project, cached=True):
diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py
index 25a4e5d4885656..84d85c4bb6b1a1 100644
--- a/src/sentry/reprocessing2.py
+++ b/src/sentry/reprocessing2.py
@@ -205,9 +205,9 @@ def pull_event_data(project_id, event_id) -> ReprocessableEvent:
project_id=project_id, event_id=event_id, type__in=list(required_attachment_types)
)
)
- missing_attachment_types = required_attachment_types - {ea.type for ea in attachments}
-
- if missing_attachment_types:
+ if missing_attachment_types := required_attachment_types - {
+ ea.type for ea in attachments
+ }:
raise CannotReprocess("attachment.not_found")
return ReprocessableEvent(event=event, data=data, attachments=attachments)
diff --git a/src/sentry/sdk_updates.py b/src/sentry/sdk_updates.py
index cf547742adbcee..49b1daf573645f 100644
--- a/src/sentry/sdk_updates.py
+++ b/src/sentry/sdk_updates.py
@@ -98,7 +98,7 @@ def get_new_state(self, old_state):
new_sdk_version = self.new_sdk_version
if self.ignore_patch_version:
- new_sdk_version = ".".join(v for v in new_sdk_version.split(".")[:2])
+ new_sdk_version = ".".join(new_sdk_version.split(".")[:2])
try:
has_newer_version = LooseVersion(old_state.sdk_version) < LooseVersion(new_sdk_version)
@@ -333,10 +333,11 @@ def get_sdk_index():
The cache is filled by a regular background task (see sentry/tasks/release_registry)
"""
- if not settings.SENTRY_RELEASE_REGISTRY_BASEURL:
- return {}
-
- return cache.get(SDK_INDEX_CACHE_KEY) or {}
+ return (
+ cache.get(SDK_INDEX_CACHE_KEY) or {}
+ if settings.SENTRY_RELEASE_REGISTRY_BASEURL
+ else {}
+ )
def get_sdk_versions():
@@ -352,7 +353,7 @@ def get_sdk_versions():
def get_sdk_urls():
try:
rv = dict(settings.SDK_URLS)
- rv.update((key, info["main_docs_url"]) for (key, info) in get_sdk_index().items())
+ rv |= ((key, info["main_docs_url"]) for (key, info) in get_sdk_index().items())
return rv
except Exception:
logger.exception("sentry-release-registry.sdk-urls")