From fa5d4f2674c880903c2738297b34ae3376cb8b37 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 11 Oct 2022 12:04:33 -0600 Subject: [PATCH 01/49] Add region_slug column to organization --- engine/apps/api/serializers/organization.py | 12 +++++++++++- engine/apps/grafana_plugin/helpers/gcom.py | 3 +++ .../views/self_hosted_install.py | 1 + .../0004_organization_region_slug.py | 18 ++++++++++++++++++ .../user_management/models/organization.py | 1 + engine/apps/user_management/sync.py | 2 ++ engine/settings/base.py | 1 + 7 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 engine/apps/user_management/migrations/0004_organization_region_slug.py diff --git a/engine/apps/api/serializers/organization.py b/engine/apps/api/serializers/organization.py index 58013052a6..30241a453b 100644 --- a/engine/apps/api/serializers/organization.py +++ b/engine/apps/api/serializers/organization.py @@ -170,4 +170,14 @@ class PluginOrganizationSerializer(serializers.ModelSerializer): class Meta: model = Organization - fields = ["pk", "stack_id", "stack_slug", "grafana_url", "org_id", "org_slug", "org_title", "grafana_token"] + fields = [ + "pk", + "stack_id", + "stack_slug", + "grafana_url", + "org_id", + "org_slug", + "org_title", + "region_slug", + "grafana_token", + ] diff --git a/engine/apps/grafana_plugin/helpers/gcom.py b/engine/apps/grafana_plugin/helpers/gcom.py index 2fbdf44b5e..58577fc03d 100644 --- a/engine/apps/grafana_plugin/helpers/gcom.py +++ b/engine/apps/grafana_plugin/helpers/gcom.py @@ -57,6 +57,7 @@ def check_gcom_permission(token_string: str, context) -> Optional["GcomToken"]: org_id=str(instance_info["orgId"]), org_slug=instance_info["orgSlug"], org_title=instance_info["orgName"], + region_slug=instance_info["regionSlug"], gcom_token=token_string, gcom_token_org_last_time_synced=timezone.now(), ) @@ -64,6 +65,7 @@ def check_gcom_permission(token_string: str, context) -> Optional["GcomToken"]: organization.stack_slug = instance_info["slug"] organization.org_slug = instance_info["orgSlug"] organization.org_title = instance_info["orgName"] + organization.region_slug = instance_info["regionSlug"] organization.grafana_url = instance_info["url"] organization.gcom_token = token_string organization.gcom_token_org_last_time_synced = timezone.now() @@ -72,6 +74,7 @@ def check_gcom_permission(token_string: str, context) -> Optional["GcomToken"]: "stack_slug", "org_slug", "org_title", + "region_slug", "grafana_url", "gcom_token", "gcom_token_org_last_time_synced", diff --git a/engine/apps/grafana_plugin/views/self_hosted_install.py b/engine/apps/grafana_plugin/views/self_hosted_install.py index 16dbd7bc9a..f4159ea6a2 100644 --- a/engine/apps/grafana_plugin/views/self_hosted_install.py +++ b/engine/apps/grafana_plugin/views/self_hosted_install.py @@ -46,6 +46,7 @@ def post(self, request: Request) -> Response: org_id=org_id, org_slug=settings.SELF_HOSTED_SETTINGS["ORG_SLUG"], org_title=settings.SELF_HOSTED_SETTINGS["ORG_TITLE"], + region_slug=settings.SELF_HOSTED_SETTINGS["REGION_SLUG"], grafana_url=self.instance_context["grafana_url"], api_token=self.instance_context["grafana_token"], ) diff --git a/engine/apps/user_management/migrations/0004_organization_region_slug.py b/engine/apps/user_management/migrations/0004_organization_region_slug.py new file mode 100644 index 0000000000..06f026302a --- /dev/null +++ b/engine/apps/user_management/migrations/0004_organization_region_slug.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.15 on 2022-10-11 17:54 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('user_management', '0003_user_hide_phone_number'), + ] + + operations = [ + migrations.AddField( + model_name='organization', + name='region_slug', + field=models.CharField(default=None, max_length=300, null=True), + ), + ] diff --git a/engine/apps/user_management/models/organization.py b/engine/apps/user_management/models/organization.py index fd37ba81da..a7a8c40b7b 100644 --- a/engine/apps/user_management/models/organization.py +++ b/engine/apps/user_management/models/organization.py @@ -53,6 +53,7 @@ def _get_subscription_strategy(self): stack_slug = models.CharField(max_length=300) org_slug = models.CharField(max_length=300) org_title = models.CharField(max_length=300) + region_slug = models.CharField(max_length=300, null=True, default=None) grafana_url = models.URLField() diff --git a/engine/apps/user_management/sync.py b/engine/apps/user_management/sync.py index 7b0c91d7b7..80826c5a8f 100644 --- a/engine/apps/user_management/sync.py +++ b/engine/apps/user_management/sync.py @@ -29,6 +29,7 @@ def sync_organization(organization): "stack_slug", "org_slug", "org_title", + "region_slug", "grafana_url", "last_time_synced", "api_token_status", @@ -47,6 +48,7 @@ def sync_instance_info(organization): organization.stack_slug = instance_info["slug"] organization.org_slug = instance_info["orgSlug"] organization.org_title = instance_info["orgName"] + organization.region_slug = instance_info["regionSlug"] organization.grafana_url = instance_info["url"] organization.gcom_token_org_last_time_synced = timezone.now() diff --git a/engine/settings/base.py b/engine/settings/base.py index d9ec9f3630..cdc511f087 100644 --- a/engine/settings/base.py +++ b/engine/settings/base.py @@ -558,6 +558,7 @@ class BrokerTypes: "ORG_ID": 100, "ORG_SLUG": "self_hosted_org", "ORG_TITLE": "Self-Hosted Organization", + "REGION_SLUG": "self_hosted_region", } GRAFANA_INCIDENT_STATIC_API_KEY = os.environ.get("GRAFANA_INCIDENT_STATIC_API_KEY", None) From bc16795fa8357033dfaaa224777ae909f6ff5c7f Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 11 Oct 2022 14:47:16 -0600 Subject: [PATCH 02/49] Allow use of API keys as alternative to account auth token for Twilio credentials --- engine/apps/base/models/live_setting.py | 20 ++++++++--- engine/apps/base/utils.py | 45 +++++++++++++++++++++++-- engine/apps/twilioapp/twilio_client.py | 7 +++- engine/settings/base.py | 2 ++ 4 files changed, 66 insertions(+), 8 deletions(-) diff --git a/engine/apps/base/models/live_setting.py b/engine/apps/base/models/live_setting.py index abd5cf1e1a..129d7e9416 100644 --- a/engine/apps/base/models/live_setting.py +++ b/engine/apps/base/models/live_setting.py @@ -35,6 +35,8 @@ class LiveSetting(models.Model): AVAILABLE_NAMES = ( "TWILIO_ACCOUNT_SID", "TWILIO_AUTH_TOKEN", + "TWILIO_API_KEY_SID", + "TWILIO_API_KEY_SECRET", "TWILIO_NUMBER", "TWILIO_VERIFY_SERVICE_SID", "TELEGRAM_TOKEN", @@ -80,14 +82,24 @@ class LiveSetting(models.Model): "after you update them." ), "TWILIO_ACCOUNT_SID": ( - "Twilio username to allow amixr send sms and make phone calls, " + "Twilio account SID/username to allow OnCall send sms and make phone calls, " "" - "more info." + "more info. Required." + ), + "TWILIO_API_KEY_SID": ( + "Twilio API key SID/username to allow OnCall send sms and make phone calls, " + "" + "more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." + ), + "TWILIO_API_KEY_SECRET": ( + "Twilio API key secret/password to allow OnCall send sms and make phone calls, " + "" + "more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." ), "TWILIO_AUTH_TOKEN": ( - "Twilio password to allow amixr send sms and make calls, " + "Twilio password to allow OnCall send sms and make calls, " "" - "more info." + "more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." ), "TWILIO_NUMBER": ( "Number from which you will receive calls and SMS, " diff --git a/engine/apps/base/utils.py b/engine/apps/base/utils.py index 0fe0d8a933..e57a78d707 100644 --- a/engine/apps/base/utils.py +++ b/engine/apps/base/utils.py @@ -50,24 +50,63 @@ def get_error(self): check_fn = getattr(self, check_fn_name) return check_fn(self.live_setting.value) + @classmethod + def _check_twilio_api_key_sid(cls, twilio_api_key_sid): + if live_settings.TWILIO_AUTH_TOKEN: + return + + try: + Client( + twilio_api_key_sid, live_settings.TWILIO_API_KEY_SECRET, live_settings.TWILIO_ACCOUNT_SID + ).api.applications.list(limit=1) + except Exception as e: + return cls._prettify_twilio_error(e) + + @classmethod + def _check_twilio_api_key_secret(cls, twilio_api_key_secret): + if live_settings.TWILIO_AUTH_TOKEN: + return + + try: + Client( + live_settings.TWILIO_API_KEY_SID, twilio_api_key_secret, live_settings.TWILIO_ACCOUNT_SID + ).api.applications.list(limit=1) + except Exception as e: + return cls._prettify_twilio_error(e) + @classmethod def _check_twilio_account_sid(cls, twilio_account_sid): try: - Client(twilio_account_sid, live_settings.TWILIO_AUTH_TOKEN).api.accounts.list(limit=1) + if live_settings.TWILIO_API_KEY_SID and live_settings.TWILIO_API_KEY_SECRET: + Client( + live_settings.TWILIO_API_KEY_SID, live_settings.TWILIO_API_KEY_SECRET, twilio_account_sid + ).api.applications.list(limit=1) + else: + Client(twilio_account_sid, live_settings.TWILIO_AUTH_TOKEN).api.applications.list(limit=1) except Exception as e: return cls._prettify_twilio_error(e) @classmethod def _check_twilio_auth_token(cls, twilio_auth_token): + if live_settings.TWILIO_API_KEY_SID and live_settings.TWILIO_API_KEY_SECRET: + return + try: - Client(live_settings.TWILIO_ACCOUNT_SID, twilio_auth_token).api.accounts.list(limit=1) + Client(live_settings.TWILIO_ACCOUNT_SID, twilio_auth_token).api.applications.list(limit=1) except Exception as e: return cls._prettify_twilio_error(e) @classmethod def _check_twilio_verify_service_sid(cls, twilio_verify_service_sid): try: - twilio_client = Client(live_settings.TWILIO_ACCOUNT_SID, live_settings.TWILIO_AUTH_TOKEN) + if live_settings.TWILIO_API_KEY_SID and live_settings.TWILIO_API_KEY_SECRET: + twilio_client = Client( + live_settings.TWILIO_API_KEY_SID, + live_settings.TWILIO_API_KEY_SECRET, + live_settings.TWILIO_ACCOUNT_SID, + ) + else: + twilio_client = Client(live_settings.TWILIO_ACCOUNT_SID, live_settings.TWILIO_AUTH_TOKEN) twilio_client.verify.services(twilio_verify_service_sid).rate_limits.list(limit=1) except Exception as e: return cls._prettify_twilio_error(e) diff --git a/engine/apps/twilioapp/twilio_client.py b/engine/apps/twilioapp/twilio_client.py index 007d9e7225..75d0640382 100644 --- a/engine/apps/twilioapp/twilio_client.py +++ b/engine/apps/twilioapp/twilio_client.py @@ -17,7 +17,12 @@ class TwilioClient: @property def twilio_api_client(self): - return Client(live_settings.TWILIO_ACCOUNT_SID, live_settings.TWILIO_AUTH_TOKEN) + if live_settings.TWILIO_API_KEY_SID and live_settings.TWILIO_API_KEY_SECRET: + return Client( + live_settings.TWILIO_API_KEY_SID, live_settings.TWILIO_API_KEY_SECRET, live_settings.TWILIO_ACCOUNT_SID + ) + else: + return Client(live_settings.TWILIO_ACCOUNT_SID, live_settings.TWILIO_AUTH_TOKEN) @property def twilio_number(self): diff --git a/engine/settings/base.py b/engine/settings/base.py index d9ec9f3630..9a5dd09ddf 100644 --- a/engine/settings/base.py +++ b/engine/settings/base.py @@ -56,6 +56,8 @@ GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED = getenv_boolean("GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED", default=True) GRAFANA_CLOUD_NOTIFICATIONS_ENABLED = getenv_boolean("GRAFANA_CLOUD_NOTIFICATIONS_ENABLED", default=True) +TWILIO_API_KEY_SID = os.environ.get("TWILIO_API_KEY_SID") +TWILIO_API_KEY_SECRET = os.environ.get("TWILIO_API_KEY_SECRET") TWILIO_ACCOUNT_SID = os.environ.get("TWILIO_ACCOUNT_SID") TWILIO_AUTH_TOKEN = os.environ.get("TWILIO_AUTH_TOKEN") TWILIO_NUMBER = os.environ.get("TWILIO_NUMBER") From d0b851299267a8e1e091ee9205bfcb88fb222ec0 Mon Sep 17 00:00:00 2001 From: Joey Orlando Date: Wed, 12 Oct 2022 11:01:17 +0200 Subject: [PATCH 03/49] Grammar --- engine/apps/base/models/live_setting.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/engine/apps/base/models/live_setting.py b/engine/apps/base/models/live_setting.py index 129d7e9416..770e3eb48e 100644 --- a/engine/apps/base/models/live_setting.py +++ b/engine/apps/base/models/live_setting.py @@ -82,24 +82,24 @@ class LiveSetting(models.Model): "after you update them." ), "TWILIO_ACCOUNT_SID": ( - "Twilio account SID/username to allow OnCall send sms and make phone calls, " + "Twilio account SID/username to allow OnCall to send SMSes and make phone calls, see " "" - "more info. Required." + "here for more info. Required." ), "TWILIO_API_KEY_SID": ( - "Twilio API key SID/username to allow OnCall send sms and make phone calls, " + "Twilio API key SID/username to allow OnCall to send SMSes and make phone calls, see" "" - "more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." + "here for more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." ), "TWILIO_API_KEY_SECRET": ( - "Twilio API key secret/password to allow OnCall send sms and make phone calls, " + "Twilio API key secret/password to allow OnCall to send SMSes and make phone calls, see" "" - "more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." + "here for more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." ), "TWILIO_AUTH_TOKEN": ( - "Twilio password to allow OnCall send sms and make calls, " + "Twilio password to allow OnCall to send SMSes and make calls, " "" - "more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." + "here for more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." ), "TWILIO_NUMBER": ( "Number from which you will receive calls and SMS, " From 0152c27f5e77495f3428a58398cb94227fecf5d4 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 18 Oct 2022 16:05:49 -0600 Subject: [PATCH 04/49] Re-validate all twilio settings when any change as they depend on each other --- engine/apps/api/views/live_setting.py | 18 +++++++++--------- engine/apps/base/models/live_setting.py | 25 ++++++++++++++++++++----- engine/apps/base/utils.py | 9 ++++++++- 3 files changed, 37 insertions(+), 15 deletions(-) diff --git a/engine/apps/api/views/live_setting.py b/engine/apps/api/views/live_setting.py index d2c77ab0f5..8040ee4882 100644 --- a/engine/apps/api/views/live_setting.py +++ b/engine/apps/api/views/live_setting.py @@ -38,40 +38,40 @@ def get_queryset(self): return queryset def perform_update(self, serializer): + name = serializer.instance.name old_value = serializer.instance.value new_value = serializer.validated_data["value"] super().perform_update(serializer) if new_value != old_value: - self._post_update_hook(old_value) + self._post_update_hook(name, old_value) def perform_destroy(self, instance): + name = instance.name old_value = instance.value new_value = instance.default_value super().perform_destroy(instance) if new_value != old_value: - self._post_update_hook(old_value) + self._post_update_hook(name, old_value) - def _post_update_hook(self, old_value): - instance = self.get_object() - - if instance.name == "TELEGRAM_TOKEN": + def _post_update_hook(self, name, old_value): + if name == "TELEGRAM_TOKEN": self._reset_telegram_integration(old_token=old_value) register_telegram_webhook.delay() - if instance.name == "TELEGRAM_WEBHOOK_HOST": + if name == "TELEGRAM_WEBHOOK_HOST": register_telegram_webhook.delay() - if instance.name in ["SLACK_CLIENT_OAUTH_ID", "SLACK_CLIENT_OAUTH_SECRET"]: + if name in ["SLACK_CLIENT_OAUTH_ID", "SLACK_CLIENT_OAUTH_SECRET"]: organization = self.request.auth.organization slack_team_identity = organization.slack_team_identity if slack_team_identity is not None: unpopulate_slack_user_identities.delay(organization_pk=organization.pk, force=True) - if instance.name == "GRAFANA_CLOUD_ONCALL_TOKEN": + if name == "GRAFANA_CLOUD_ONCALL_TOKEN": from apps.oss_installation.models import CloudConnector CloudConnector.remove_sync() diff --git a/engine/apps/base/models/live_setting.py b/engine/apps/base/models/live_setting.py index 770e3eb48e..f69c25a35a 100644 --- a/engine/apps/base/models/live_setting.py +++ b/engine/apps/base/models/live_setting.py @@ -87,22 +87,22 @@ class LiveSetting(models.Model): "here for more info. Required." ), "TWILIO_API_KEY_SID": ( - "Twilio API key SID/username to allow OnCall to send SMSes and make phone calls, see" + "Twilio API key SID/username to allow OnCall to send SMSes and make phone calls, see " "" "here for more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." ), "TWILIO_API_KEY_SECRET": ( - "Twilio API key secret/password to allow OnCall to send SMSes and make phone calls, see" + "Twilio API key secret/password to allow OnCall to send SMSes and make phone calls, see " "" "here for more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." ), "TWILIO_AUTH_TOKEN": ( - "Twilio password to allow OnCall to send SMSes and make calls, " + "Twilio password to allow OnCall to send SMSes and make calls, see " "" "here for more info. Either (TWILIO_API_KEY_SID + TWILIO_API_KEY_SECRET) or TWILIO_AUTH_TOKEN is required." ), "TWILIO_NUMBER": ( - "Number from which you will receive calls and SMS, " + "Number from which you will receive calls and SMSes, " "more info." ), "TWILIO_VERIFY_SERVICE_SID": ( @@ -140,6 +140,8 @@ class LiveSetting(models.Model): SECRET_SETTING_NAMES = ( "TWILIO_ACCOUNT_SID", "TWILIO_AUTH_TOKEN", + "TWILIO_API_KEY_SID", + "TWILIO_API_KEY_SECRET", "TWILIO_VERIFY_SERVICE_SID", "SENDGRID_API_KEY", "SENDGRID_SECRET_KEY", @@ -186,8 +188,21 @@ def populate_settings_if_needed(cls): settings_in_db = cls.objects.filter(name__in=cls.AVAILABLE_NAMES).values_list("name", flat=True) setting_names_to_populate = set(cls.AVAILABLE_NAMES) - set(settings_in_db) + revalidate_twilio = False for setting_name in setting_names_to_populate: - cls.objects.create(name=setting_name, value=cls._get_setting_from_setting_file(setting_name)) + setting = cls.objects.create(name=setting_name, value=cls._get_setting_from_setting_file(setting_name)) + if setting.name.startswith("TWILIO"): + revalidate_twilio = True + + if revalidate_twilio: + cls.revalidate_twilio() + + @classmethod + def revalidate_twilio(cls): + twilio_settings = cls.objects.filter(name__startswith="TWILIO") + for setting in twilio_settings: + setting.error = LiveSettingValidator(live_setting=setting).get_error() + setting.save(update_fields=["error"]) @staticmethod def _get_setting_from_setting_file(setting_name): diff --git a/engine/apps/base/utils.py b/engine/apps/base/utils.py index e57a78d707..377dd443db 100644 --- a/engine/apps/base/utils.py +++ b/engine/apps/base/utils.py @@ -34,13 +34,20 @@ def __setattr__(self, key, value): class LiveSettingValidator: + + EMPTY_VALID_NAMES = ( + "TWILIO_AUTH_TOKEN", + "TWILIO_API_KEY_SID", + "TWILIO_API_KEY_SECRET", + ) + def __init__(self, live_setting): self.live_setting = live_setting def get_error(self): check_fn_name = f"_check_{self.live_setting.name.lower()}" - if self.live_setting.value is None: + if self.live_setting.value is None and self.live_setting.name not in self.EMPTY_VALID_NAMES: return "Empty" # skip validation if there's no handler for it From 363f01935604ba65177feeef7974a42f07a5b04e Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 18 Oct 2022 16:12:57 -0600 Subject: [PATCH 05/49] Update CHANGELOG.md --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01e271574f..b2e3d12d55 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Change Log +## v1.0.41 (TBD) +- Allow use of API keys as alternative to account auth token for Twilio + ## v1.0.40 (2022-10-05) - Improved database and celery backends support - Added script to import PagerDuty users to Grafana From f7ee35d1bdd2e73f2b9c09c2d78624dfc0b25627 Mon Sep 17 00:00:00 2001 From: Maxim Date: Thu, 20 Oct 2022 14:08:58 +0100 Subject: [PATCH 06/49] use render props in WithPermissionConrtol --- .../WithPermissionControl.tsx | 33 +++++++++++++++++++ grafana-plugin/src/pages/test/Test.tsx | 29 ++++------------ grafana-plugin/src/state/rootBaseStore.ts | 26 ++++++++------- 3 files changed, 53 insertions(+), 35 deletions(-) create mode 100644 grafana-plugin/src/containers/WithPermissionControl2/WithPermissionControl.tsx diff --git a/grafana-plugin/src/containers/WithPermissionControl2/WithPermissionControl.tsx b/grafana-plugin/src/containers/WithPermissionControl2/WithPermissionControl.tsx new file mode 100644 index 0000000000..b507495651 --- /dev/null +++ b/grafana-plugin/src/containers/WithPermissionControl2/WithPermissionControl.tsx @@ -0,0 +1,33 @@ +import React, { ReactElement, useMemo } from 'react'; + +import { Tooltip } from '@grafana/ui'; +import { observer } from 'mobx-react'; + +import { useStore } from 'state/useStore'; +import { UserAction } from 'state/userAction'; + +interface WithPermissionControlProps { + userAction: UserAction; + children: (disabled?: boolean) => ReactElement; +} + +export const WithPermissionControl = observer((props: WithPermissionControlProps) => { + const { userAction, children } = props; + + const store = useStore(); + + const disabled = !store.isUserActionAllowed(userAction); + + const element = useMemo(() => children(disabled), [disabled]); + + return disabled ? ( + + {element} + + ) : ( + element + ); +}); diff --git a/grafana-plugin/src/pages/test/Test.tsx b/grafana-plugin/src/pages/test/Test.tsx index 34562e4cfd..ab40b2d026 100644 --- a/grafana-plugin/src/pages/test/Test.tsx +++ b/grafana-plugin/src/pages/test/Test.tsx @@ -1,10 +1,11 @@ import React from 'react'; +import { Button } from '@grafana/ui'; import cn from 'classnames/bind'; import { observer } from 'mobx-react'; -import GSelect from 'containers/GSelect/GSelect'; -import { PRIVATE_CHANNEL_NAME } from 'models/slack_channel/slack_channel.config'; +import { WithPermissionControl } from 'containers/WithPermissionControl2/WithPermissionControl'; +import { UserAction } from 'state/userAction'; import { withMobXProviderContext } from 'state/withStore'; import styles from './Test.module.css'; @@ -13,33 +14,15 @@ const cx = cn.bind(styles); @observer class Test extends React.Component { - async componentDidMount() {} - - componentDidUpdate() {} - render() { return (
- + + {(disabled) => } +
); } - - slackChannelChangeHandler = (value: any) => { - console.log(value); - }; } export default withMobXProviderContext(Test); diff --git a/grafana-plugin/src/state/rootBaseStore.ts b/grafana-plugin/src/state/rootBaseStore.ts index ce4ebb1dd6..4062749bd2 100644 --- a/grafana-plugin/src/state/rootBaseStore.ts +++ b/grafana-plugin/src/state/rootBaseStore.ts @@ -36,8 +36,9 @@ import { getPluginSyncStatus, installPlugin, startPluginSync, - SYNC_STATUS_RETRY_LIMIT, syncStatusDelay, - updateGrafanaToken + SYNC_STATUS_RETRY_LIMIT, + syncStatusDelay, + updateGrafanaToken, } from './plugin'; import { UserAction } from './userAction'; @@ -190,28 +191,29 @@ export class RootBaseStore { } async waitForSyncStatus(retryCount = 0) { - if (retryCount > SYNC_STATUS_RETRY_LIMIT) { this.retrySync = true; return; } - getPluginSyncStatus().then((get_sync_response) => { - if (get_sync_response.hasOwnProperty('token_ok')) { - this.finishSync(get_sync_response); - } else { - syncStatusDelay(retryCount + 1) - .then(() => this.waitForSyncStatus(retryCount + 1)) - } - }).catch((e) => { + getPluginSyncStatus() + .then((get_sync_response) => { + if (get_sync_response.hasOwnProperty('token_ok')) { + this.finishSync(get_sync_response); + } else { + syncStatusDelay(retryCount + 1).then(() => this.waitForSyncStatus(retryCount + 1)); + } + }) + .catch((e) => { this.handleSyncException(e); }); - } async setupPlugin(meta: AppPluginMeta) { this.resetStatusToDefault(); + console.log(meta); + if (!meta.jsonData?.onCallApiUrl) { this.pluginIsInitialized = false; return; From 0a1a9ab4d80e1127aeb03ce4cbeb892c9379dc79 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Thu, 20 Oct 2022 09:45:48 -0600 Subject: [PATCH 07/49] Add region object --- .../user_management/models/organization.py | 6 ++++ engine/apps/user_management/models/region.py | 33 +++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 engine/apps/user_management/models/region.py diff --git a/engine/apps/user_management/models/organization.py b/engine/apps/user_management/models/organization.py index 5c283b9819..0b0f06620a 100644 --- a/engine/apps/user_management/models/organization.py +++ b/engine/apps/user_management/models/organization.py @@ -54,6 +54,12 @@ def _get_subscription_strategy(self): org_slug = models.CharField(max_length=300) org_title = models.CharField(max_length=300) region_slug = models.CharField(max_length=300, null=True, default=None) + migration_destination = models.ForeignKey( + to="user_management.Region", + on_delete=models.SET_NULL, + related_name="regions", + default=None, + ) grafana_url = models.URLField() diff --git a/engine/apps/user_management/models/region.py b/engine/apps/user_management/models/region.py new file mode 100644 index 0000000000..1b9b2cca50 --- /dev/null +++ b/engine/apps/user_management/models/region.py @@ -0,0 +1,33 @@ +from django.conf import settings +from django.core.validators import MinLengthValidator +from django.db import models + +from common.public_primary_keys import generate_public_primary_key, increase_public_primary_key_length + + +def generate_public_primary_key_for_region(): + prefix = "R" + new_public_primary_key = generate_public_primary_key(prefix) + + failure_counter = 0 + while Region.objects.filter(public_primary_key=new_public_primary_key).exists(): + new_public_primary_key = increase_public_primary_key_length( + failure_counter=failure_counter, prefix=prefix, model_name="Region" + ) + failure_counter += 1 + + return new_public_primary_key + + +class Region(models.Model): + public_primary_key = models.CharField( + max_length=20, + validators=[MinLengthValidator(settings.PUBLIC_PRIMARY_KEY_MIN_LENGTH + 1)], + unique=True, + default=generate_public_primary_key_for_region, + ) + + name = models.CharField(max_length=300) + slug = models.CharField(max_length=300, unique=True) + oncall_backend_url = models.URLField() + is_default = models.BooleanField(default=False) From ef37e6a6fd85b4ff62aa8e351d76b788fb94471f Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Thu, 20 Oct 2022 10:11:36 -0600 Subject: [PATCH 08/49] Revalidate all when any settings change instead of making a special case for twilio --- engine/apps/base/models/live_setting.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/engine/apps/base/models/live_setting.py b/engine/apps/base/models/live_setting.py index f69c25a35a..e07886f81d 100644 --- a/engine/apps/base/models/live_setting.py +++ b/engine/apps/base/models/live_setting.py @@ -187,20 +187,18 @@ def get_setting(cls, setting_name): def populate_settings_if_needed(cls): settings_in_db = cls.objects.filter(name__in=cls.AVAILABLE_NAMES).values_list("name", flat=True) setting_names_to_populate = set(cls.AVAILABLE_NAMES) - set(settings_in_db) + if len(setting_names_to_populate) == 0: + return - revalidate_twilio = False for setting_name in setting_names_to_populate: - setting = cls.objects.create(name=setting_name, value=cls._get_setting_from_setting_file(setting_name)) - if setting.name.startswith("TWILIO"): - revalidate_twilio = True + cls.objects.create(name=setting_name, value=cls._get_setting_from_setting_file(setting_name)) - if revalidate_twilio: - cls.revalidate_twilio() + cls.revalidate_settings() @classmethod - def revalidate_twilio(cls): - twilio_settings = cls.objects.filter(name__startswith="TWILIO") - for setting in twilio_settings: + def revalidate_settings(cls): + settings_to_validate = cls.objects.all() + for setting in settings_to_validate: setting.error = LiveSettingValidator(live_setting=setting).get_error() setting.save(update_fields=["error"]) From febe1b2185eb89f41ec77449f8f153bb45d443d4 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Thu, 20 Oct 2022 15:04:58 -0600 Subject: [PATCH 09/49] Add basic organization moved exception handling and middleware --- engine/apps/auth_token/auth.py | 11 +++++ .../mixins/alert_channel_defining_mixin.py | 5 +++ engine/apps/user_management/middlewares.py | 41 +++++++++++++++++++ .../migrations/0005_auto_20221020_1845.py | 32 +++++++++++++++ .../migrations/0006_alter_region_slug.py | 18 ++++++++ .../apps/user_management/models/__init__.py | 1 + .../user_management/models/organization.py | 1 + engine/apps/user_management/models/region.py | 20 ++++++++- engine/settings/base.py | 1 + 9 files changed, 129 insertions(+), 1 deletion(-) create mode 100644 engine/apps/user_management/middlewares.py create mode 100644 engine/apps/user_management/migrations/0005_auto_20221020_1845.py create mode 100644 engine/apps/user_management/migrations/0006_alter_region_slug.py diff --git a/engine/apps/auth_token/auth.py b/engine/apps/auth_token/auth.py index 551116c648..1c92cebbfc 100644 --- a/engine/apps/auth_token/auth.py +++ b/engine/apps/auth_token/auth.py @@ -18,6 +18,7 @@ from .models import ApiAuthToken, PluginAuthToken, ScheduleExportAuthToken, SlackAuthToken, UserScheduleExportAuthToken from .models.mobile_app_auth_token import MobileAppAuthToken from .models.mobile_app_verification_token import MobileAppVerificationToken +from ..user_management.models.region import OrganizationMovedException logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) @@ -46,6 +47,10 @@ def authenticate_credentials(self, token): auth_token = self.model.validate_token_string(token) except InvalidToken: raise exceptions.AuthenticationFailed("Invalid token.") + + if auth_token.organization.migration_destination is not None: + raise OrganizationMovedException(auth_token.organization) + return auth_token.user, auth_token @@ -167,6 +172,9 @@ def authenticate_credentials( except InvalidToken: raise exceptions.AuthenticationFailed("Invalid token.") + if auth_token.organization.migration_destination is not None: + raise OrganizationMovedException(auth_token.organization) + if auth_token.schedule.public_primary_key != public_primary_key: raise exceptions.AuthenticationFailed("Invalid schedule export token for schedule") @@ -197,6 +205,9 @@ def authenticate_credentials( except InvalidToken: raise exceptions.AuthenticationFailed("Invalid token") + if auth_token.organization.migration_destination is not None: + raise OrganizationMovedException(auth_token.organization) + if auth_token.user.public_primary_key != public_primary_key: raise exceptions.AuthenticationFailed("Invalid schedule export token for user") diff --git a/engine/apps/integrations/mixins/alert_channel_defining_mixin.py b/engine/apps/integrations/mixins/alert_channel_defining_mixin.py index 3e1cc25709..8bd79f51f6 100644 --- a/engine/apps/integrations/mixins/alert_channel_defining_mixin.py +++ b/engine/apps/integrations/mixins/alert_channel_defining_mixin.py @@ -7,6 +7,8 @@ from django.core.exceptions import PermissionDenied from django.db import OperationalError +from apps.user_management.models.region import OrganizationMovedException + logger = logging.getLogger(__name__) @@ -64,6 +66,9 @@ def dispatch(self, *args, **kwargs): logger.info("Cache is empty!") raise + if alert_receive_channel.organization.migration_destination is not None: + raise OrganizationMovedException(alert_receive_channel.organization) + del kwargs["alert_channel_key"] kwargs["alert_receive_channel"] = alert_receive_channel diff --git a/engine/apps/user_management/middlewares.py b/engine/apps/user_management/middlewares.py new file mode 100644 index 0000000000..0fd6d4f3ca --- /dev/null +++ b/engine/apps/user_management/middlewares.py @@ -0,0 +1,41 @@ +import logging +import re + +import requests +from django.http import HttpResponse +from django.utils.deprecation import MiddlewareMixin + +from apps.user_management.models.region import OrganizationMovedException +from common.api_helpers.utils import create_engine_url + +logger = logging.getLogger(__name__) + + +class OrganizationMovedMiddleware(MiddlewareMixin): + def process_exception(self, request, exception): + if isinstance(exception, OrganizationMovedException): + region = exception.organization.migration_destination + url = create_engine_url(request.path, override_base=region.oncall_backend_url) + if request.META['QUERY_STRING']: + url = f"{url}?{request.META['QUERY_STRING']}" + + regex = re.compile('^HTTP_') + headers = dict( + (regex.sub('', header), value) for (header, value) in request.META.items() if header.startswith('HTTP_') + ) + + if request.method == "GET": + response = requests.get(url, headers=headers) + elif request.method == "POST": + response = requests.post(url, data=request.body, headers=headers) + elif request.method == "PUT": + response = requests.put(url, data=request.body, headers=headers) + elif request.method == "DELETE": + response = requests.delete(url, headers=headers) + elif request.method == "OPTIONS": + response = requests.options(url, headers=headers) + + response.raise_for_status() + + return HttpResponse(response.content, status=response.status_code) + diff --git a/engine/apps/user_management/migrations/0005_auto_20221020_1845.py b/engine/apps/user_management/migrations/0005_auto_20221020_1845.py new file mode 100644 index 0000000000..7cfe249b49 --- /dev/null +++ b/engine/apps/user_management/migrations/0005_auto_20221020_1845.py @@ -0,0 +1,32 @@ +# Generated by Django 3.2.15 on 2022-10-20 18:45 + +import apps.user_management.models.region +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('user_management', '0004_organization_region_slug'), + ] + + operations = [ + migrations.CreateModel( + name='Region', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('public_primary_key', models.CharField(default=apps.user_management.models.region.generate_public_primary_key_for_region, max_length=20, unique=True, validators=[django.core.validators.MinLengthValidator(13)])), + ('name', models.CharField(max_length=300)), + ('slug', models.CharField(max_length=300, unique=True)), + ('oncall_backend_url', models.URLField()), + ('is_default', models.BooleanField(default=False)), + ], + ), + migrations.AddField( + model_name='organization', + name='migration_destination', + field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='regions', to='user_management.region'), + ), + ] diff --git a/engine/apps/user_management/migrations/0006_alter_region_slug.py b/engine/apps/user_management/migrations/0006_alter_region_slug.py new file mode 100644 index 0000000000..780cc9b73f --- /dev/null +++ b/engine/apps/user_management/migrations/0006_alter_region_slug.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.15 on 2022-10-20 18:46 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('user_management', '0005_auto_20221020_1845'), + ] + + operations = [ + migrations.AlterField( + model_name='region', + name='slug', + field=models.CharField(max_length=50, unique=True), + ), + ] diff --git a/engine/apps/user_management/models/__init__.py b/engine/apps/user_management/models/__init__.py index 95ed32ab19..e2bcd4c7f0 100644 --- a/engine/apps/user_management/models/__init__.py +++ b/engine/apps/user_management/models/__init__.py @@ -1,3 +1,4 @@ from .user import User # noqa: F401, isort: skip from .organization import Organization # noqa: F401 +from .region import Region # noqa: F401 from .team import Team # noqa: F401 diff --git a/engine/apps/user_management/models/organization.py b/engine/apps/user_management/models/organization.py index 0b0f06620a..c1ba8316ed 100644 --- a/engine/apps/user_management/models/organization.py +++ b/engine/apps/user_management/models/organization.py @@ -59,6 +59,7 @@ def _get_subscription_strategy(self): on_delete=models.SET_NULL, related_name="regions", default=None, + null=True, ) grafana_url = models.URLField() diff --git a/engine/apps/user_management/models/region.py b/engine/apps/user_management/models/region.py index 1b9b2cca50..b41f422076 100644 --- a/engine/apps/user_management/models/region.py +++ b/engine/apps/user_management/models/region.py @@ -1,10 +1,24 @@ +import logging + from django.conf import settings from django.core.validators import MinLengthValidator from django.db import models +from rest_framework.request import Request +from rest_framework.response import Response +from apps.user_management.models import Organization from common.public_primary_keys import generate_public_primary_key, increase_public_primary_key_length +logger = logging.getLogger(__name__) + + +class OrganizationMovedException(Exception): + + def __init__(self, organization: Organization): + self.organization = organization + + def generate_public_primary_key_for_region(): prefix = "R" new_public_primary_key = generate_public_primary_key(prefix) @@ -19,6 +33,10 @@ def generate_public_primary_key_for_region(): return new_public_primary_key +def redirect_organization_request(organization: Organization, request: Request): + logger.info("**** Redirect! ****") + + class Region(models.Model): public_primary_key = models.CharField( max_length=20, @@ -28,6 +46,6 @@ class Region(models.Model): ) name = models.CharField(max_length=300) - slug = models.CharField(max_length=300, unique=True) + slug = models.CharField(max_length=50, unique=True) oncall_backend_url = models.URLField() is_default = models.BooleanField(default=False) diff --git a/engine/settings/base.py b/engine/settings/base.py index b8d4e9cb09..95c090de89 100644 --- a/engine/settings/base.py +++ b/engine/settings/base.py @@ -237,6 +237,7 @@ class DatabaseTypes: "social_django.middleware.SocialAuthExceptionMiddleware", "apps.social_auth.middlewares.SocialAuthAuthCanceledExceptionMiddleware", "apps.integrations.middlewares.IntegrationExceptionMiddleware", + "apps.user_management.middlewares.OrganizationMovedMiddleware", ] LOG_REQUEST_ID_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" From 724278fbc88d81fa80304cb69b3b3f29e8a03c36 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Fri, 21 Oct 2022 14:03:19 -0600 Subject: [PATCH 10/49] Regenerate migrations after complete to consolidate --- .../0004_organization_region_slug.py | 18 ----------- .../migrations/0005_auto_20221020_1845.py | 32 ------------------- .../migrations/0006_alter_region_slug.py | 18 ----------- 3 files changed, 68 deletions(-) delete mode 100644 engine/apps/user_management/migrations/0004_organization_region_slug.py delete mode 100644 engine/apps/user_management/migrations/0005_auto_20221020_1845.py delete mode 100644 engine/apps/user_management/migrations/0006_alter_region_slug.py diff --git a/engine/apps/user_management/migrations/0004_organization_region_slug.py b/engine/apps/user_management/migrations/0004_organization_region_slug.py deleted file mode 100644 index 06f026302a..0000000000 --- a/engine/apps/user_management/migrations/0004_organization_region_slug.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 3.2.15 on 2022-10-11 17:54 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('user_management', '0003_user_hide_phone_number'), - ] - - operations = [ - migrations.AddField( - model_name='organization', - name='region_slug', - field=models.CharField(default=None, max_length=300, null=True), - ), - ] diff --git a/engine/apps/user_management/migrations/0005_auto_20221020_1845.py b/engine/apps/user_management/migrations/0005_auto_20221020_1845.py deleted file mode 100644 index 7cfe249b49..0000000000 --- a/engine/apps/user_management/migrations/0005_auto_20221020_1845.py +++ /dev/null @@ -1,32 +0,0 @@ -# Generated by Django 3.2.15 on 2022-10-20 18:45 - -import apps.user_management.models.region -import django.core.validators -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('user_management', '0004_organization_region_slug'), - ] - - operations = [ - migrations.CreateModel( - name='Region', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('public_primary_key', models.CharField(default=apps.user_management.models.region.generate_public_primary_key_for_region, max_length=20, unique=True, validators=[django.core.validators.MinLengthValidator(13)])), - ('name', models.CharField(max_length=300)), - ('slug', models.CharField(max_length=300, unique=True)), - ('oncall_backend_url', models.URLField()), - ('is_default', models.BooleanField(default=False)), - ], - ), - migrations.AddField( - model_name='organization', - name='migration_destination', - field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='regions', to='user_management.region'), - ), - ] diff --git a/engine/apps/user_management/migrations/0006_alter_region_slug.py b/engine/apps/user_management/migrations/0006_alter_region_slug.py deleted file mode 100644 index 780cc9b73f..0000000000 --- a/engine/apps/user_management/migrations/0006_alter_region_slug.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 3.2.15 on 2022-10-20 18:46 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('user_management', '0005_auto_20221020_1845'), - ] - - operations = [ - migrations.AlterField( - model_name='region', - name='slug', - field=models.CharField(max_length=50, unique=True), - ), - ] From 5955b2a81c9312849c6b096860006abc0a21debf Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Mon, 24 Oct 2022 11:15:58 -0600 Subject: [PATCH 11/49] Remove unused function --- engine/apps/user_management/models/region.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/engine/apps/user_management/models/region.py b/engine/apps/user_management/models/region.py index b41f422076..538f673706 100644 --- a/engine/apps/user_management/models/region.py +++ b/engine/apps/user_management/models/region.py @@ -33,10 +33,6 @@ def generate_public_primary_key_for_region(): return new_public_primary_key -def redirect_organization_request(organization: Organization, request: Request): - logger.info("**** Redirect! ****") - - class Region(models.Model): public_primary_key = models.CharField( max_length=20, From 37825059ff16cff0922a48f90988728238e897b2 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Mon, 24 Oct 2022 21:25:32 -0600 Subject: [PATCH 12/49] Add region sync and reverse proxy for migration --- engine/apps/auth_token/auth.py | 2 +- engine/apps/grafana_plugin/helpers/client.py | 3 + engine/apps/grafana_plugin/helpers/gcom.py | 13 ++++ engine/apps/grafana_plugin/tasks/sync.py | 16 ++++- engine/apps/user_management/middlewares.py | 15 +++-- .../migrations/0004_auto_20221025_0316.py | 33 ++++++++++ .../user_management/models/organization.py | 2 + engine/apps/user_management/models/region.py | 61 ++++++++++--------- 8 files changed, 109 insertions(+), 36 deletions(-) create mode 100644 engine/apps/user_management/migrations/0004_auto_20221025_0316.py diff --git a/engine/apps/auth_token/auth.py b/engine/apps/auth_token/auth.py index 1c92cebbfc..cfa4931363 100644 --- a/engine/apps/auth_token/auth.py +++ b/engine/apps/auth_token/auth.py @@ -11,6 +11,7 @@ from apps.grafana_plugin.helpers.gcom import check_token from apps.user_management.models import User from apps.user_management.models.organization import Organization +from apps.user_management.models.region import OrganizationMovedException from common.constants.role import Role from .constants import SCHEDULE_EXPORT_TOKEN_NAME, SLACK_AUTH_TOKEN_NAME @@ -18,7 +19,6 @@ from .models import ApiAuthToken, PluginAuthToken, ScheduleExportAuthToken, SlackAuthToken, UserScheduleExportAuthToken from .models.mobile_app_auth_token import MobileAppAuthToken from .models.mobile_app_verification_token import MobileAppVerificationToken -from ..user_management.models.region import OrganizationMovedException logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) diff --git a/engine/apps/grafana_plugin/helpers/client.py b/engine/apps/grafana_plugin/helpers/client.py index 2dcc49886d..f232719e22 100644 --- a/engine/apps/grafana_plugin/helpers/client.py +++ b/engine/apps/grafana_plugin/helpers/client.py @@ -143,3 +143,6 @@ def is_stack_deleted(self, stack_id: str) -> bool: def post_active_users(self, body): return self.api_post("app-active-users", body) + + def get_stack_regions(self): + return self.api_get("stack-regions") diff --git a/engine/apps/grafana_plugin/helpers/gcom.py b/engine/apps/grafana_plugin/helpers/gcom.py index 58577fc03d..407f70e58e 100644 --- a/engine/apps/grafana_plugin/helpers/gcom.py +++ b/engine/apps/grafana_plugin/helpers/gcom.py @@ -112,3 +112,16 @@ def get_active_instance_ids() -> Tuple[Optional[set], bool]: def get_deleted_instance_ids() -> Tuple[Optional[set], bool]: return get_instance_ids(GcomAPIClient.DELETED_INSTANCE_QUERY) + + +def get_stack_regions() -> Tuple[Optional[set], bool]: + if not settings.GRAFANA_COM_API_TOKEN or settings.LICENSE != settings.CLOUD_LICENSE_NAME: + return None, False + + client = GcomAPIClient(settings.GRAFANA_COM_API_TOKEN) + regions, status = client.get_stack_regions() + + if not regions or "items" not in regions: + return None, True + + return regions["items"], True diff --git a/engine/apps/grafana_plugin/tasks/sync.py b/engine/apps/grafana_plugin/tasks/sync.py index a9571335d5..ed58968d39 100644 --- a/engine/apps/grafana_plugin/tasks/sync.py +++ b/engine/apps/grafana_plugin/tasks/sync.py @@ -5,8 +5,9 @@ from django.utils import timezone from apps.grafana_plugin.helpers import GcomAPIClient -from apps.grafana_plugin.helpers.gcom import get_active_instance_ids, get_deleted_instance_ids +from apps.grafana_plugin.helpers.gcom import get_active_instance_ids, get_deleted_instance_ids, get_stack_regions from apps.user_management.models import Organization +from apps.user_management.models.region import sync_regions from apps.user_management.sync import cleanup_organization, sync_organization from common.custom_celery_tasks import shared_dedicated_queue_retry_task @@ -103,3 +104,16 @@ def start_cleanup_deleted_organizations(): @shared_dedicated_queue_retry_task(autoretry_for=(Exception,), max_retries=1) def cleanup_organization_async(organization_pk): cleanup_organization(organization_pk) + + +@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), max_retries=1) +def start_sync_regions(): + regions, is_cloud_configured = get_stack_regions() + if not is_cloud_configured: + return + + if not regions: + logger.warning("Did not find any stack-regions!") + return + + sync_regions(regions) diff --git a/engine/apps/user_management/middlewares.py b/engine/apps/user_management/middlewares.py index 0fd6d4f3ca..8e51699947 100644 --- a/engine/apps/user_management/middlewares.py +++ b/engine/apps/user_management/middlewares.py @@ -4,6 +4,7 @@ import requests from django.http import HttpResponse from django.utils.deprecation import MiddlewareMixin +from rest_framework.status import HTTP_500_INTERNAL_SERVER_ERROR from apps.user_management.models.region import OrganizationMovedException from common.api_helpers.utils import create_engine_url @@ -15,13 +16,18 @@ class OrganizationMovedMiddleware(MiddlewareMixin): def process_exception(self, request, exception): if isinstance(exception, OrganizationMovedException): region = exception.organization.migration_destination + if not region.oncall_backend_url: + return HttpResponse( + "Organization migration destination undefined URL", status=HTTP_500_INTERNAL_SERVER_ERROR + ) + url = create_engine_url(request.path, override_base=region.oncall_backend_url) - if request.META['QUERY_STRING']: + if request.META["QUERY_STRING"]: url = f"{url}?{request.META['QUERY_STRING']}" - regex = re.compile('^HTTP_') + regex = re.compile("^HTTP_") headers = dict( - (regex.sub('', header), value) for (header, value) in request.META.items() if header.startswith('HTTP_') + (regex.sub("", header), value) for (header, value) in request.META.items() if header.startswith("HTTP_") ) if request.method == "GET": @@ -35,7 +41,4 @@ def process_exception(self, request, exception): elif request.method == "OPTIONS": response = requests.options(url, headers=headers) - response.raise_for_status() - return HttpResponse(response.content, status=response.status_code) - diff --git a/engine/apps/user_management/migrations/0004_auto_20221025_0316.py b/engine/apps/user_management/migrations/0004_auto_20221025_0316.py new file mode 100644 index 0000000000..8d1b15c868 --- /dev/null +++ b/engine/apps/user_management/migrations/0004_auto_20221025_0316.py @@ -0,0 +1,33 @@ +# Generated by Django 3.2.15 on 2022-10-25 03:16 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('user_management', '0003_user_hide_phone_number'), + ] + + operations = [ + migrations.CreateModel( + name='Region', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=300)), + ('slug', models.CharField(max_length=50, unique=True)), + ('oncall_backend_url', models.URLField(null=True)), + ], + ), + migrations.AddField( + model_name='organization', + name='region_slug', + field=models.CharField(default=None, max_length=300, null=True), + ), + migrations.AddField( + model_name='organization', + name='migration_destination', + field=models.ForeignKey(db_column='migration_destination_slug', default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='regions', to='user_management.region', to_field='slug'), + ), + ] diff --git a/engine/apps/user_management/models/organization.py b/engine/apps/user_management/models/organization.py index c1ba8316ed..9cac00a8bc 100644 --- a/engine/apps/user_management/models/organization.py +++ b/engine/apps/user_management/models/organization.py @@ -56,6 +56,8 @@ def _get_subscription_strategy(self): region_slug = models.CharField(max_length=300, null=True, default=None) migration_destination = models.ForeignKey( to="user_management.Region", + to_field="slug", + db_column="migration_destination_slug", on_delete=models.SET_NULL, related_name="regions", default=None, diff --git a/engine/apps/user_management/models/region.py b/engine/apps/user_management/models/region.py index 538f673706..a1d0b3124f 100644 --- a/engine/apps/user_management/models/region.py +++ b/engine/apps/user_management/models/region.py @@ -1,47 +1,52 @@ import logging -from django.conf import settings -from django.core.validators import MinLengthValidator +from django.apps import apps from django.db import models -from rest_framework.request import Request -from rest_framework.response import Response from apps.user_management.models import Organization -from common.public_primary_keys import generate_public_primary_key, increase_public_primary_key_length - logger = logging.getLogger(__name__) -class OrganizationMovedException(Exception): +def sync_regions(regions: list[dict]): + Region = apps.get_model("user_management", "Region") + gcom_regions = {region["slug"]: region for region in regions} + existing_region_slugs = set(Region.objects.all().values_list("slug", flat=True)) - def __init__(self, organization: Organization): - self.organization = organization + # create new regions + regions_to_create = tuple( + Region( + name=region["name"], + slug=region["slug"], + oncall_backend_url=region["oncallApiUrl"], + ) + for region in gcom_regions.values() + if region["slug"] not in existing_region_slugs + ) + Region.objects.bulk_create(regions_to_create, batch_size=5000) + # delete excess regions + regions_to_delete = existing_region_slugs - gcom_regions.keys() + Region.objects.filter(slug__in=regions_to_delete).delete() -def generate_public_primary_key_for_region(): - prefix = "R" - new_public_primary_key = generate_public_primary_key(prefix) + # update existing regions + regions_to_update = [] + for region in Region.objects.filter(slug__in=existing_region_slugs): + gcom_region = gcom_regions[region.slug] + if region.name != gcom_region["name"] or region.oncall_backend_url != gcom_region["oncallApiUrl"]: + region.name = gcom_region["name"] + region.oncall_backend_url = gcom_region["oncallApiUrl"] + regions_to_update.append(region) - failure_counter = 0 - while Region.objects.filter(public_primary_key=new_public_primary_key).exists(): - new_public_primary_key = increase_public_primary_key_length( - failure_counter=failure_counter, prefix=prefix, model_name="Region" - ) - failure_counter += 1 + Region.objects.bulk_update(regions_to_update, ["name", "oncall_backend_url"], batch_size=5000) - return new_public_primary_key +class OrganizationMovedException(Exception): + def __init__(self, organization: Organization): + self.organization = organization -class Region(models.Model): - public_primary_key = models.CharField( - max_length=20, - validators=[MinLengthValidator(settings.PUBLIC_PRIMARY_KEY_MIN_LENGTH + 1)], - unique=True, - default=generate_public_primary_key_for_region, - ) +class Region(models.Model): name = models.CharField(max_length=300) slug = models.CharField(max_length=50, unique=True) - oncall_backend_url = models.URLField() - is_default = models.BooleanField(default=False) + oncall_backend_url = models.URLField(null=True) From a912a786de608b3b131856624768d2e2886cc0cf Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Thu, 27 Oct 2022 15:40:46 -0600 Subject: [PATCH 13/49] Add tests --- engine/apps/user_management/middlewares.py | 28 +-- .../apps/user_management/tests/factories.py | 11 +- .../apps/user_management/tests/test_region.py | 219 ++++++++++++++++++ engine/conftest.py | 22 +- 4 files changed, 265 insertions(+), 15 deletions(-) create mode 100644 engine/apps/user_management/tests/test_region.py diff --git a/engine/apps/user_management/middlewares.py b/engine/apps/user_management/middlewares.py index 8e51699947..ff6aab569a 100644 --- a/engine/apps/user_management/middlewares.py +++ b/engine/apps/user_management/middlewares.py @@ -4,7 +4,7 @@ import requests from django.http import HttpResponse from django.utils.deprecation import MiddlewareMixin -from rest_framework.status import HTTP_500_INTERNAL_SERVER_ERROR +from rest_framework import status from apps.user_management.models.region import OrganizationMovedException from common.api_helpers.utils import create_engine_url @@ -18,7 +18,7 @@ def process_exception(self, request, exception): region = exception.organization.migration_destination if not region.oncall_backend_url: return HttpResponse( - "Organization migration destination undefined URL", status=HTTP_500_INTERNAL_SERVER_ERROR + "Organization migration destination undefined URL", status=status.HTTP_500_INTERNAL_SERVER_ERROR ) url = create_engine_url(request.path, override_base=region.oncall_backend_url) @@ -30,15 +30,17 @@ def process_exception(self, request, exception): (regex.sub("", header), value) for (header, value) in request.META.items() if header.startswith("HTTP_") ) - if request.method == "GET": - response = requests.get(url, headers=headers) - elif request.method == "POST": - response = requests.post(url, data=request.body, headers=headers) - elif request.method == "PUT": - response = requests.put(url, data=request.body, headers=headers) - elif request.method == "DELETE": - response = requests.delete(url, headers=headers) - elif request.method == "OPTIONS": - response = requests.options(url, headers=headers) - + response = self.make_request(request.method, url, headers, request.body) return HttpResponse(response.content, status=response.status_code) + + def make_request(self, method, url, headers, body): + if method == "GET": + return requests.get(url, headers=headers) + elif method == "POST": + return requests.post(url, data=body, headers=headers) + elif method == "PUT": + return requests.put(url, data=body, headers=headers) + elif method == "DELETE": + return requests.delete(url, headers=headers) + elif method == "OPTIONS": + return requests.options(url, headers=headers) diff --git a/engine/apps/user_management/tests/factories.py b/engine/apps/user_management/tests/factories.py index 79b20231df..c66099c0ca 100644 --- a/engine/apps/user_management/tests/factories.py +++ b/engine/apps/user_management/tests/factories.py @@ -1,6 +1,6 @@ import factory -from apps.user_management.models import Organization, Team, User +from apps.user_management.models import Organization, Region, Team, User from common.utils import UniqueFaker @@ -31,3 +31,12 @@ class TeamFactory(factory.DjangoModelFactory): class Meta: model = Team + + +class RegionFactory(factory.DjangoModelFactory): + name = factory.Faker("country") + slug = factory.Faker("slug") + oncall_backend_url = factory.Faker("url") + + class Meta: + model = Region diff --git a/engine/apps/user_management/tests/test_region.py b/engine/apps/user_management/tests/test_region.py new file mode 100644 index 0000000000..201ca1952e --- /dev/null +++ b/engine/apps/user_management/tests/test_region.py @@ -0,0 +1,219 @@ +from unittest.mock import patch + +import pytest +from django.http import HttpResponse +from django.urls import reverse +from rest_framework import status +from rest_framework.test import APIClient + +from apps.alerts.models import AlertReceiveChannel +from apps.auth_token.auth import ApiTokenAuthentication, ScheduleExportAuthentication, UserScheduleExportAuthentication +from apps.auth_token.models import ScheduleExportAuthToken, UserScheduleExportAuthToken +from apps.integrations.views import AlertManagerAPIView +from apps.schedules.models import OnCallScheduleWeb +from apps.user_management.models.region import OrganizationMovedException + + +@pytest.mark.django_db +def test_organization_region_delete( + make_organization_and_region, +): + organization, region = make_organization_and_region() + organization.save() + + organization.refresh_from_db() + assert organization.migration_destination.slug == region.slug + region.delete() + + organization.refresh_from_db() + assert organization.migration_destination is None + + +@pytest.mark.django_db +def test_integration_does_not_raise_exception_organization_moved( + make_organization, + make_alert_receive_channel, +): + organization = make_organization() + alert_receive_channel = make_alert_receive_channel( + organization=organization, + integration=AlertReceiveChannel.INTEGRATION_ALERTMANAGER, + ) + + try: + am = AlertManagerAPIView() + am.dispatch(alert_channel_key=alert_receive_channel.token) + assert False + except OrganizationMovedException: + assert False + except Exception: + assert True + + +@pytest.mark.django_db +def test_integration_raises_exception_organization_moved( + make_organization_and_region, + make_alert_receive_channel, +): + organization, region = make_organization_and_region() + organization.save() + + alert_receive_channel = make_alert_receive_channel( + organization=organization, + integration=AlertReceiveChannel.INTEGRATION_ALERTMANAGER, + ) + + try: + am = AlertManagerAPIView() + am.dispatch(alert_channel_key=alert_receive_channel.token) + assert False + except OrganizationMovedException as e: + assert e.organization == organization + + +@patch("apps.user_management.middlewares.OrganizationMovedMiddleware.make_request") +@pytest.mark.django_db +def test_organization_moved_middleware( + mocked_make_request, + make_organization_and_region, + make_alert_receive_channel, +): + organization, region = make_organization_and_region() + organization.save() + + alert_receive_channel = make_alert_receive_channel( + organization=organization, + integration=AlertReceiveChannel.INTEGRATION_ALERTMANAGER, + ) + + expected_message = bytes(f"Redirected to {region.oncall_backend_url}", 'utf-8') + mocked_make_request.return_value = HttpResponse(expected_message, status=status.HTTP_200_OK) + + client = APIClient() + url = reverse("integrations:alertmanager", kwargs={"alert_channel_key": alert_receive_channel.token}) + + data = {"value": "test"} + response = client.post(url, data, format="json") + assert mocked_make_request.called + assert response.content == expected_message + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.django_db +def test_api_token_does_not_raise_exception_organization_moved( + make_organization, + make_user_for_organization, + make_public_api_token, +): + organization = make_organization() + + admin = make_user_for_organization(organization) + _, token = make_public_api_token(admin, organization) + + try: + api_auth = ApiTokenAuthentication() + api_auth.authenticate_credentials(token) + assert True + except OrganizationMovedException: + assert False + + +@pytest.mark.django_db +def test_api_token_raises_exception_organization_moved( + make_organization_and_region, + make_user_for_organization, + make_public_api_token, +): + organization, region = make_organization_and_region() + organization.save() + + admin = make_user_for_organization(organization) + _, token = make_public_api_token(admin, organization) + + try: + api_auth = ApiTokenAuthentication() + api_auth.authenticate_credentials(token) + assert False + except OrganizationMovedException as e: + assert e.organization == organization + + +@pytest.mark.django_db +def test_schedule_export_token_does_not_raise_exception_organization_moved( + make_organization, + make_user_for_organization, + make_public_api_token, + make_schedule, +): + organization = make_organization() + schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb) + + admin = make_user_for_organization(organization) + _, token = ScheduleExportAuthToken.create_auth_token(admin, organization, schedule) + + try: + schedule_auth = ScheduleExportAuthentication() + schedule_auth.authenticate_credentials(token, schedule.public_primary_key) + assert True + except OrganizationMovedException: + assert False + + +@pytest.mark.django_db +def test_schedule_export_token_raises_exception_organization_moved( + make_organization_and_region, + make_user_for_organization, + make_public_api_token, + make_schedule, +): + organization, region = make_organization_and_region() + organization.save() + schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb) + + admin = make_user_for_organization(organization) + _, token = ScheduleExportAuthToken.create_auth_token(admin, organization, schedule) + + try: + schedule_auth = ScheduleExportAuthentication() + schedule_auth.authenticate_credentials(token, schedule.public_primary_key) + assert False + except OrganizationMovedException as e: + assert e.organization == organization + + +@pytest.mark.django_db +def test_user_schedule_export_token_does_not_raise_exception_organization_moved( + make_organization, + make_user_for_organization, + make_public_api_token, +): + organization = make_organization() + admin = make_user_for_organization(organization) + _, token = UserScheduleExportAuthToken.create_auth_token(admin, organization) + + try: + user_schedule_auth = UserScheduleExportAuthentication() + user_schedule_auth.authenticate_credentials(token, admin.public_primary_key) + assert True + except OrganizationMovedException: + assert False + + +@pytest.mark.django_db +def test_user_schedule_export_token_raises_exception_organization_moved( + make_organization_and_region, + make_user_for_organization, + make_public_api_token, +): + organization, region = make_organization_and_region() + organization.save() + + admin = make_user_for_organization(organization) + _, token = UserScheduleExportAuthToken.create_auth_token(admin, organization) + + try: + user_schedule_auth = UserScheduleExportAuthentication() + user_schedule_auth.authenticate_credentials(token, admin.public_primary_key) + assert False + except OrganizationMovedException as e: + assert e.organization == organization diff --git a/engine/conftest.py b/engine/conftest.py index 8291d921c3..4e88b798d8 100644 --- a/engine/conftest.py +++ b/engine/conftest.py @@ -70,7 +70,7 @@ ) from apps.twilioapp.tests.factories import PhoneCallFactory, SMSFactory from apps.user_management.models.user import User, listen_for_user_model_save -from apps.user_management.tests.factories import OrganizationFactory, TeamFactory, UserFactory +from apps.user_management.tests.factories import OrganizationFactory, RegionFactory, TeamFactory, UserFactory from common.constants.role import Role register(OrganizationFactory) @@ -666,3 +666,23 @@ def load_slack_urls(settings): reload(sys.modules[urlconf]) else: import_module(urlconf) + + +@pytest.fixture +def make_region(): + def _make_region(**kwargs): + region = RegionFactory(**kwargs) + return region + + return _make_region + + +@pytest.fixture +def make_organization_and_region(make_organization, make_region): + def _make_organization_and_region(): + organization = make_organization() + region = make_region() + organization.migration_destination = region + return organization, region + + return _make_organization_and_region From 942e30bdb3c9ab1278eb9438627089a4bf6f0c2d Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Thu, 27 Oct 2022 15:47:53 -0600 Subject: [PATCH 14/49] Lint --- engine/apps/user_management/tests/test_region.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/engine/apps/user_management/tests/test_region.py b/engine/apps/user_management/tests/test_region.py index 201ca1952e..02756b9eb7 100644 --- a/engine/apps/user_management/tests/test_region.py +++ b/engine/apps/user_management/tests/test_region.py @@ -86,7 +86,7 @@ def test_organization_moved_middleware( integration=AlertReceiveChannel.INTEGRATION_ALERTMANAGER, ) - expected_message = bytes(f"Redirected to {region.oncall_backend_url}", 'utf-8') + expected_message = bytes(f"Redirected to {region.oncall_backend_url}", "utf-8") mocked_make_request.return_value = HttpResponse(expected_message, status=status.HTTP_200_OK) client = APIClient() From 6267e31b22e43c12d43b4dadb68c2a6d78a19cbc Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Fri, 28 Oct 2022 15:45:51 -0600 Subject: [PATCH 15/49] Check id instead of object to avoid unnecessary query --- engine/apps/auth_token/auth.py | 6 +++--- .../integrations/mixins/alert_channel_defining_mixin.py | 2 +- engine/apps/user_management/models/organization.py | 4 ++++ 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/engine/apps/auth_token/auth.py b/engine/apps/auth_token/auth.py index cfa4931363..6802246973 100644 --- a/engine/apps/auth_token/auth.py +++ b/engine/apps/auth_token/auth.py @@ -48,7 +48,7 @@ def authenticate_credentials(self, token): except InvalidToken: raise exceptions.AuthenticationFailed("Invalid token.") - if auth_token.organization.migration_destination is not None: + if auth_token.organization.is_moved: raise OrganizationMovedException(auth_token.organization) return auth_token.user, auth_token @@ -172,7 +172,7 @@ def authenticate_credentials( except InvalidToken: raise exceptions.AuthenticationFailed("Invalid token.") - if auth_token.organization.migration_destination is not None: + if auth_token.organization.is_moved: raise OrganizationMovedException(auth_token.organization) if auth_token.schedule.public_primary_key != public_primary_key: @@ -205,7 +205,7 @@ def authenticate_credentials( except InvalidToken: raise exceptions.AuthenticationFailed("Invalid token") - if auth_token.organization.migration_destination is not None: + if auth_token.organization.is_moved: raise OrganizationMovedException(auth_token.organization) if auth_token.user.public_primary_key != public_primary_key: diff --git a/engine/apps/integrations/mixins/alert_channel_defining_mixin.py b/engine/apps/integrations/mixins/alert_channel_defining_mixin.py index 8bd79f51f6..0a867595a2 100644 --- a/engine/apps/integrations/mixins/alert_channel_defining_mixin.py +++ b/engine/apps/integrations/mixins/alert_channel_defining_mixin.py @@ -66,7 +66,7 @@ def dispatch(self, *args, **kwargs): logger.info("Cache is empty!") raise - if alert_receive_channel.organization.migration_destination is not None: + if alert_receive_channel.organization.is_moved: raise OrganizationMovedException(alert_receive_channel.organization) del kwargs["alert_channel_key"] diff --git a/engine/apps/user_management/models/organization.py b/engine/apps/user_management/models/organization.py index a48db58a98..51fc5392c8 100644 --- a/engine/apps/user_management/models/organization.py +++ b/engine/apps/user_management/models/organization.py @@ -283,3 +283,7 @@ def insight_logs_serialized(self): @property def insight_logs_metadata(self): return {} + + @property + def is_moved(self): + return self.migration_destination_id is not None From e019c70f671c9a6562a84f86787d1e95b7a3dc51 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 1 Nov 2022 18:22:50 -0600 Subject: [PATCH 16/49] Handle validation on update, remove redundant get_error call --- engine/apps/api/views/live_setting.py | 1 + engine/apps/base/models/live_setting.py | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/engine/apps/api/views/live_setting.py b/engine/apps/api/views/live_setting.py index 8040ee4882..23830fe450 100644 --- a/engine/apps/api/views/live_setting.py +++ b/engine/apps/api/views/live_setting.py @@ -46,6 +46,7 @@ def perform_update(self, serializer): if new_value != old_value: self._post_update_hook(name, old_value) + LiveSetting.revalidate_settings() def perform_destroy(self, instance): name = instance.name diff --git a/engine/apps/base/models/live_setting.py b/engine/apps/base/models/live_setting.py index c6ac28f0e8..0b41e47b04 100644 --- a/engine/apps/base/models/live_setting.py +++ b/engine/apps/base/models/live_setting.py @@ -198,7 +198,6 @@ def populate_settings_if_needed(cls): def revalidate_settings(cls): settings_to_validate = cls.objects.all() for setting in settings_to_validate: - setting.error = LiveSettingValidator(live_setting=setting).get_error() setting.save(update_fields=["error"]) @staticmethod From a331a3f392de5cb6042bccd057172ef09a1ea570 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 1 Nov 2022 18:24:44 -0600 Subject: [PATCH 17/49] Rename to validate_settings --- engine/apps/api/views/live_setting.py | 2 +- engine/apps/base/models/live_setting.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/engine/apps/api/views/live_setting.py b/engine/apps/api/views/live_setting.py index 23830fe450..57566f5208 100644 --- a/engine/apps/api/views/live_setting.py +++ b/engine/apps/api/views/live_setting.py @@ -46,7 +46,7 @@ def perform_update(self, serializer): if new_value != old_value: self._post_update_hook(name, old_value) - LiveSetting.revalidate_settings() + LiveSetting.validate_settings() def perform_destroy(self, instance): name = instance.name diff --git a/engine/apps/base/models/live_setting.py b/engine/apps/base/models/live_setting.py index 0b41e47b04..75219a00ef 100644 --- a/engine/apps/base/models/live_setting.py +++ b/engine/apps/base/models/live_setting.py @@ -192,10 +192,10 @@ def populate_settings_if_needed(cls): for setting_name in setting_names_to_populate: cls.objects.create(name=setting_name, value=cls._get_setting_from_setting_file(setting_name)) - cls.revalidate_settings() + cls.validate_settings() @classmethod - def revalidate_settings(cls): + def validate_settings(cls): settings_to_validate = cls.objects.all() for setting in settings_to_validate: setting.save(update_fields=["error"]) From 71c8d17c78572cb346a461cb12359339811c528f Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 1 Nov 2022 18:29:28 -0600 Subject: [PATCH 18/49] Changelog cleanup --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2092d0a62a..6dbcecb89e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -42,7 +42,6 @@ - Add personal email notifications - Bug fixes ->>>>>>> dev ## v1.0.40 (2022-10-05) From fd55acfea4ae01382753b41b7aa2d23478fd2111 Mon Sep 17 00:00:00 2001 From: Yulia Shanyrova Date: Wed, 2 Nov 2022 10:30:00 +0100 Subject: [PATCH 19/49] live settings availability check has been added --- .../DefaultPageLayout/DefaultPageLayout.helpers.tsx | 12 +++++++++--- .../DefaultPageLayout/DefaultPageLayout.tsx | 2 +- .../parts/tabs/TelegramInfo/TelegramInfo.tsx | 8 +++++--- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.helpers.tsx b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.helpers.tsx index 983597c332..a89dc45128 100644 --- a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.helpers.tsx +++ b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.helpers.tsx @@ -2,10 +2,12 @@ import React from 'react'; import PluginLink from 'components/PluginLink/PluginLink'; import { Team } from 'models/team/team.types'; +import { RootStore } from 'state'; +import { AppFeature } from 'state/features'; import { SlackError } from './DefaultPageLayout.types'; -export function getSlackMessage(slackError: SlackError, team: Team) { +export function getSlackMessage(slackError: SlackError, team: Team, store: RootStore) { if (slackError === SlackError.WRONG_WORKSPACE) { return ( <> @@ -30,8 +32,12 @@ export function getSlackMessage(slackError: SlackError, team: Team) { return ( <> An error has occurred with Slack authentication.{' '} - Check ENV variables related to Slack and try again - please. + {store.hasFeature(AppFeature.LiveSettings) && ( + <> + Check ENV variables related to Slack and try + again please. + + )} ); } diff --git a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx index 95d6d253c4..d9e395ff0d 100644 --- a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx +++ b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx @@ -79,7 +79,7 @@ const DefaultPageLayout: FC = observer((props) => { // @ts-ignore title="Slack integration warning" > - {getSlackMessage(showSlackInstallAlert, store.teamStore.currentTeam)} + {getSlackMessage(showSlackInstallAlert, store.teamStore.currentTeam, store)} )} {currentTeam?.banner.title != null && !getItem(currentTeam?.banner.title) && ( diff --git a/grafana-plugin/src/containers/UserSettings/parts/tabs/TelegramInfo/TelegramInfo.tsx b/grafana-plugin/src/containers/UserSettings/parts/tabs/TelegramInfo/TelegramInfo.tsx index f07508865a..3c186429f4 100644 --- a/grafana-plugin/src/containers/UserSettings/parts/tabs/TelegramInfo/TelegramInfo.tsx +++ b/grafana-plugin/src/containers/UserSettings/parts/tabs/TelegramInfo/TelegramInfo.tsx @@ -95,9 +95,11 @@ const TelegramInfo = observer((_props: TelegramInfoProps) => { - - - + {store.hasFeature(AppFeature.LiveSettings) && ( + + + + )} )} From c01e7d202b8cae167afab12ba9a764ecfeb3b412 Mon Sep 17 00:00:00 2001 From: Yulia Shanyrova Date: Wed, 2 Nov 2022 12:47:52 +0100 Subject: [PATCH 20/49] Changes after review --- .../DefaultPageLayout/DefaultPageLayout.helpers.tsx | 6 ++---- .../src/containers/DefaultPageLayout/DefaultPageLayout.tsx | 7 ++++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.helpers.tsx b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.helpers.tsx index a89dc45128..0f05c0d89a 100644 --- a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.helpers.tsx +++ b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.helpers.tsx @@ -2,12 +2,10 @@ import React from 'react'; import PluginLink from 'components/PluginLink/PluginLink'; import { Team } from 'models/team/team.types'; -import { RootStore } from 'state'; -import { AppFeature } from 'state/features'; import { SlackError } from './DefaultPageLayout.types'; -export function getSlackMessage(slackError: SlackError, team: Team, store: RootStore) { +export function getSlackMessage(slackError: SlackError, team: Team, hasLiveSettingsFeature: boolean) { if (slackError === SlackError.WRONG_WORKSPACE) { return ( <> @@ -32,7 +30,7 @@ export function getSlackMessage(slackError: SlackError, team: Team, store: RootS return ( <> An error has occurred with Slack authentication.{' '} - {store.hasFeature(AppFeature.LiveSettings) && ( + {hasLiveSettingsFeature && ( <> Check ENV variables related to Slack and try again please. diff --git a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx index d9e395ff0d..d40b3a26d5 100644 --- a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx +++ b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx @@ -9,6 +9,7 @@ import { observer } from 'mobx-react'; import PluginLink from 'components/PluginLink/PluginLink'; import { getIfChatOpsConnected } from 'containers/DefaultPageLayout/helper'; +import { AppFeature } from 'state/features'; import { useStore } from 'state/useStore'; import { UserAction } from 'state/userAction'; import { GRAFANA_LICENSE_OSS } from 'utils/consts'; @@ -79,7 +80,11 @@ const DefaultPageLayout: FC = observer((props) => { // @ts-ignore title="Slack integration warning" > - {getSlackMessage(showSlackInstallAlert, store.teamStore.currentTeam, store)} + {getSlackMessage( + showSlackInstallAlert, + store.teamStore.currentTeam, + store.hasFeature(AppFeature.LiveSettings) + )} )} {currentTeam?.banner.title != null && !getItem(currentTeam?.banner.title) && ( From da30c9c9eca7e9c71f44674bf29fb34028d14465 Mon Sep 17 00:00:00 2001 From: Maxim Date: Thu, 3 Nov 2022 13:08:29 +0000 Subject: [PATCH 21/49] auto adjust shift end, add overflow hidden, rotationtutorial stub --- .../containers/Rotation/Rotation.module.css | 49 ++++++-- .../src/containers/Rotation/Rotation.tsx | 2 + .../containers/Rotation/RotationTutorial.tsx | 107 ++++++++++++++++++ .../RotationForm/ScheduleOverrideForm.tsx | 18 ++- .../containers/Rotations/ScheduleFinal.tsx | 7 +- .../UsersTimezones/UsersTimezones.module.css | 1 + .../src/pages/schedule/Schedule.tsx | 26 ++--- 7 files changed, 179 insertions(+), 31 deletions(-) create mode 100644 grafana-plugin/src/containers/Rotation/RotationTutorial.tsx diff --git a/grafana-plugin/src/containers/Rotation/Rotation.module.css b/grafana-plugin/src/containers/Rotation/Rotation.module.css index 4fbcd226a5..dac98383d2 100644 --- a/grafana-plugin/src/containers/Rotation/Rotation.module.css +++ b/grafana-plugin/src/containers/Rotation/Rotation.module.css @@ -1,6 +1,7 @@ .root { transition: background-color 300ms; min-height: 28px; + overflow-x: hidden; } .loader { @@ -11,6 +12,10 @@ width: 100%; } +.root:first-child { + padding-top: 26px; +} + .root:last-child { padding-bottom: 26px; } @@ -24,11 +29,7 @@ flex-direction: column; gap: 5px; padding-bottom: 4px; - overflow: hidden; -} - -.root:first-child .timeline { - padding-top: 26px; + position: relative; } .root:last-child .timeline { @@ -42,14 +43,14 @@ opacity: 1; } -.slots__animate { - transition: transform 500ms ease; -} - .slots__transparent { opacity: 0.5; } +.slots__animate { + transition: transform 500ms ease; +} + .current-time { position: absolute; left: 450px; @@ -65,7 +66,33 @@ cursor: pointer; /* background: #5f505633; - border: 1px dashed #5c474d; - color: rgba(209, 14, 92, 0.5); */ + border: 1px dashed #5c474d; + color: rgba(209, 14, 92, 0.5); */ margin: 0 2px; } + +.slots--tutorial { + position: absolute; +} + +.pointer { + position: absolute; + top: -9px; +} + +.tutorial-slot { + width: 175px; + height: 28px; + background: rgba(61, 113, 217, 0.15); + + /* opacity: 0.15; */ + + /* background: var(--background-primary); */ + border-radius: 2px; + margin: 0 1px; + padding: 4px; +} + +.tutorial-slot--active { + box-shadow: var(--shadows-z3); +} diff --git a/grafana-plugin/src/containers/Rotation/Rotation.tsx b/grafana-plugin/src/containers/Rotation/Rotation.tsx index 0cb126e3e0..82effbfe41 100644 --- a/grafana-plugin/src/containers/Rotation/Rotation.tsx +++ b/grafana-plugin/src/containers/Rotation/Rotation.tsx @@ -9,6 +9,7 @@ import { Schedule, Event } from 'models/schedule/schedule.types'; import { Timezone } from 'models/timezone/timezone.types'; import { getLabel } from './Rotation.helpers'; +import RotationTutorial from './RotationTutorial'; import styles from './Rotation.module.css'; @@ -73,6 +74,7 @@ const Rotation: FC = (props) => { return (
+ {/**/} {events ? ( events.length ? (
= (props) => { + const { startMoment, days = 7 /* shiftStart, shiftEnd, rotationStart*/ } = props; + + const shiftStart = dayjs(startMoment); + const shiftEnd = dayjs(startMoment).add(1, 'days'); + const rotationStart = dayjs(startMoment).add(1, 'days'); + + const duration = shiftEnd.diff(shiftStart, 'seconds'); + + const events = useMemo(() => { + const events = []; + for (let i = 0; i < days; i++) { + events.push({ + start: dayjs(shiftStart).add(i, 'days'), + end: dayjs(shiftStart).add(duration, 'seconds').add(i, 'days'), + }); + } + return events; + }, []); + + const base = 60 * 60 * 24 * 7; + + const diff = dayjs(rotationStart).diff(startMoment, 'seconds'); + + const currentTimeX = diff / base; + + const x = useMemo(() => { + if (!events || !events.length) { + return 0; + } + + const firstEvent = events[0]; + const firstShiftOffset = dayjs(firstEvent.start).diff(startMoment, 'seconds'); + const base = 60 * 60 * 24 * days; + + return firstShiftOffset / base; + }, [events]); + + return ( +
+ + {events.map((event, index) => { + const duration = event.end.diff(event.start, 'seconds'); + const width = duration / base; + return ; + })} +
+ ); +}; + +const TutorialSlot = (props: { style: React.CSSProperties }) => { + const { style } = props; + + return
; +}; + +const Pointer = (props: { className: string; style: React.CSSProperties }) => { + const { className, style } = props; + + return ( + + + + + + + + + + ); +}; + +export default RotationTutorial; diff --git a/grafana-plugin/src/containers/RotationForm/ScheduleOverrideForm.tsx b/grafana-plugin/src/containers/RotationForm/ScheduleOverrideForm.tsx index 7e235df012..6a7b4ef7ed 100644 --- a/grafana-plugin/src/containers/RotationForm/ScheduleOverrideForm.tsx +++ b/grafana-plugin/src/containers/RotationForm/ScheduleOverrideForm.tsx @@ -55,10 +55,23 @@ const ScheduleOverrideForm: FC = (props) => { const store = useStore(); + const [shiftStart, setShiftStart] = useState(shiftMoment); + const [shiftEnd, setShiftEnd] = useState(shiftMoment.add(24, 'hours')); + const [offsetTop, setOffsetTop] = useState(0); const [isOpen, setIsOpen] = useState(false); + const updateShiftStart = useCallback( + (value) => { + const diff = shiftEnd.diff(shiftStart); + + setShiftStart(value); + setShiftEnd(value.add(diff)); + }, + [shiftStart, shiftEnd] + ); + useEffect(() => { if (isOpen) { waitForElement('#overrides-list').then((elm) => { @@ -76,9 +89,6 @@ const ScheduleOverrideForm: FC = (props) => { } }, [isOpen]); - const [shiftStart, setShiftStart] = useState(shiftMoment); - const [shiftEnd, setShiftEnd] = useState(shiftMoment.add(24, 'hours')); - const [userGroups, setUserGroups] = useState([[]]); const renderUser = (userPk: User['pk']) => { @@ -206,7 +216,7 @@ const ScheduleOverrideForm: FC = (props) => { } > - + void; + disabled?: boolean; } interface ScheduleOverridesState { @@ -108,9 +109,13 @@ class ScheduleFinal extends Component { - const { onClick } = this.props; + const { onClick, disabled } = this.props; return () => { + if (disabled) { + return; + } + onClick(shiftId); }; }; diff --git a/grafana-plugin/src/containers/UsersTimezones/UsersTimezones.module.css b/grafana-plugin/src/containers/UsersTimezones/UsersTimezones.module.css index 863519c24e..ab7947210d 100644 --- a/grafana-plugin/src/containers/UsersTimezones/UsersTimezones.module.css +++ b/grafana-plugin/src/containers/UsersTimezones/UsersTimezones.module.css @@ -41,6 +41,7 @@ .users { position: relative; height: 76px; + overflow: hidden; } .avatar-group { diff --git a/grafana-plugin/src/pages/schedule/Schedule.tsx b/grafana-plugin/src/pages/schedule/Schedule.tsx index b49cb1bf74..ed8242658b 100644 --- a/grafana-plugin/src/pages/schedule/Schedule.tsx +++ b/grafana-plugin/src/pages/schedule/Schedule.tsx @@ -16,7 +16,7 @@ import ScheduleFinal from 'containers/Rotations/ScheduleFinal'; import ScheduleOverrides from 'containers/Rotations/ScheduleOverrides'; import ScheduleForm from 'containers/ScheduleForm/ScheduleForm'; import UsersTimezones from 'containers/UsersTimezones/UsersTimezones'; -import { Shift } from 'models/schedule/schedule.types'; +import { ScheduleType, Shift } from 'models/schedule/schedule.types'; import { Timezone } from 'models/timezone/timezone.types'; import { WithStoreProps } from 'state/types'; import { withMobXProviderContext } from 'state/withStore'; @@ -96,6 +96,8 @@ class SchedulePage extends React.Component const users = store.userStore.getSearchResult().results; const schedule = scheduleStore.items[scheduleId]; + const disabled = schedule?.type !== ScheduleType.API || shiftIdToShowRotationForm || shiftIdToShowOverridesForm; + return ( <>
@@ -132,6 +134,11 @@ class SchedulePage extends React.Component
+ {schedule?.type !== ScheduleType.API && ( + + Ical and API/Terraform schedules are read-only + + )}
currentTimezone={currentTimezone} startMoment={startMoment} onClick={this.handleShowForm} + disabled={disabled} /> onDelete={this.handleDeleteRotation} shiftIdToShowRotationForm={shiftIdToShowRotationForm} onShowRotationForm={this.handleShowRotationForm} - disabled={shiftIdToShowRotationForm || shiftIdToShowOverridesForm} + disabled={disabled} /> onDelete={this.handleDeleteOverride} shiftIdToShowRotationForm={shiftIdToShowOverridesForm} onShowRotationForm={this.handleShowOverridesForm} - disabled={shiftIdToShowRotationForm || shiftIdToShowOverridesForm} + disabled={disabled} />
@@ -233,22 +241,10 @@ class SchedulePage extends React.Component }; handleShowRotationForm = (shiftId: Shift['id'] | 'new') => { - const { shiftIdToShowRotationForm, shiftIdToShowOverridesForm } = this.state; - - if (shiftId && (shiftIdToShowRotationForm || shiftIdToShowOverridesForm)) { - return; - } - this.setState({ shiftIdToShowRotationForm: shiftId }); }; handleShowOverridesForm = (shiftId: Shift['id'] | 'new') => { - const { shiftIdToShowRotationForm, shiftIdToShowOverridesForm } = this.state; - - if (shiftId && (shiftIdToShowRotationForm || shiftIdToShowOverridesForm)) { - return; - } - this.setState({ shiftIdToShowOverridesForm: shiftId }); }; From 693abdf7aa1be70b74bf1fa6cbc41e601c1f46c2 Mon Sep 17 00:00:00 2001 From: Maxim Date: Thu, 3 Nov 2022 16:05:57 +0000 Subject: [PATCH 22/49] add rotation live tutorial --- .../containers/Rotation/Rotation.module.css | 7 +- .../src/containers/Rotation/Rotation.tsx | 6 +- .../containers/Rotation/RotationTutorial.tsx | 64 +++++++++++-------- .../RotationForm/DateTimePicker.tsx | 12 +++- .../containers/RotationForm/RotationForm.tsx | 39 ++++++++++- .../src/containers/Rotations/Rotations.tsx | 1 + .../src/models/schedule/schedule.ts | 20 +++++- .../src/models/schedule/schedule.types.ts | 9 +++ 8 files changed, 117 insertions(+), 41 deletions(-) diff --git a/grafana-plugin/src/containers/Rotation/Rotation.module.css b/grafana-plugin/src/containers/Rotation/Rotation.module.css index dac98383d2..20be10ea23 100644 --- a/grafana-plugin/src/containers/Rotation/Rotation.module.css +++ b/grafana-plugin/src/containers/Rotation/Rotation.module.css @@ -73,21 +73,18 @@ .slots--tutorial { position: absolute; + background: rgba(61, 113, 217, 0.15); } .pointer { position: absolute; top: -9px; + transition: left 500ms ease; } .tutorial-slot { width: 175px; height: 28px; - background: rgba(61, 113, 217, 0.15); - - /* opacity: 0.15; */ - - /* background: var(--background-primary); */ border-radius: 2px; margin: 0 1px; padding: 4px; diff --git a/grafana-plugin/src/containers/Rotation/Rotation.tsx b/grafana-plugin/src/containers/Rotation/Rotation.tsx index 82effbfe41..6664186267 100644 --- a/grafana-plugin/src/containers/Rotation/Rotation.tsx +++ b/grafana-plugin/src/containers/Rotation/Rotation.tsx @@ -5,7 +5,7 @@ import cn from 'classnames/bind'; import dayjs from 'dayjs'; import ScheduleSlot from 'containers/ScheduleSlot/ScheduleSlot'; -import { Schedule, Event } from 'models/schedule/schedule.types'; +import { Schedule, Event, RotationFormLiveParams } from 'models/schedule/schedule.types'; import { Timezone } from 'models/timezone/timezone.types'; import { getLabel } from './Rotation.helpers'; @@ -26,6 +26,7 @@ interface RotationProps { onClick?: (moment: dayjs.Dayjs) => void; days?: number; transparent?: boolean; + tutorialParams?: RotationFormLiveParams; } const Rotation: FC = (props) => { @@ -40,6 +41,7 @@ const Rotation: FC = (props) => { onClick, days = 7, transparent = false, + tutorialParams, } = props; const [animate, _setAnimate] = useState(true); @@ -74,7 +76,7 @@ const Rotation: FC = (props) => { return (
- {/**/} + {tutorialParams && } {events ? ( events.length ? (
= (props) => { - const { startMoment, days = 7 /* shiftStart, shiftEnd, rotationStart*/ } = props; - - const shiftStart = dayjs(startMoment); - const shiftEnd = dayjs(startMoment).add(1, 'days'); - const rotationStart = dayjs(startMoment).add(1, 'days'); + const { startMoment, days = 7, shiftStart, shiftEnd, rotationStart, focusElementName } = props; const duration = shiftEnd.diff(shiftStart, 'seconds'); const events = useMemo(() => { - const events = []; - for (let i = 0; i < days; i++) { - events.push({ - start: dayjs(shiftStart).add(i, 'days'), - end: dayjs(shiftStart).add(duration, 'seconds').add(i, 'days'), - }); + return [ + { + start: dayjs(shiftStart), + end: dayjs(shiftStart).add(duration, 'seconds'), + }, + ]; + }, [shiftStart, duration]); + + const base = 60 * 60 * 24 * days; + + const pointerX = useMemo(() => { + if (focusElementName === undefined) { + return undefined; } - return events; - }, []); - - const base = 60 * 60 * 24 * 7; - const diff = dayjs(rotationStart).diff(startMoment, 'seconds'); + const moment = props[focusElementName]; + const firstEvent = events[0]; + const diff = dayjs(moment).diff(firstEvent.start, 'seconds'); - const currentTimeX = diff / base; + return diff / base; + }, [focusElementName, events, rotationStart]); const x = useMemo(() => { if (!events || !events.length) { @@ -57,20 +56,29 @@ const RotationTutorial: FC = (props) => { return (
- + {events.map((event, index) => { const duration = event.end.diff(event.start, 'seconds'); const width = duration / base; - return ; + return ( + + ); })}
); }; -const TutorialSlot = (props: { style: React.CSSProperties }) => { - const { style } = props; +const TutorialSlot = (props: { style: React.CSSProperties; active: boolean }) => { + const { style, active } = props; - return
; + return
; }; const Pointer = (props: { className: string; style: React.CSSProperties }) => { diff --git a/grafana-plugin/src/containers/RotationForm/DateTimePicker.tsx b/grafana-plugin/src/containers/RotationForm/DateTimePicker.tsx index bc26521526..a2d6d7ee42 100644 --- a/grafana-plugin/src/containers/RotationForm/DateTimePicker.tsx +++ b/grafana-plugin/src/containers/RotationForm/DateTimePicker.tsx @@ -12,6 +12,8 @@ interface UserTooltipProps { onChange: (value: dayjs.Dayjs) => void; disabled?: boolean; minMoment?: dayjs.Dayjs; + onFocus?: () => void; + onBlur?: () => void; } const toDate = (moment: dayjs.Dayjs, timezone: Timezone) => { @@ -28,7 +30,7 @@ const toDate = (moment: dayjs.Dayjs, timezone: Timezone) => { }; const DateTimePicker = (props: UserTooltipProps) => { - const { value: propValue, minMoment, timezone, onChange, disabled } = props; + const { value: propValue, minMoment, timezone, onChange, disabled, onFocus, onBlur } = props; const value = useMemo(() => toDate(propValue, timezone), [propValue, timezone]); @@ -66,8 +68,12 @@ const DateTimePicker = (props: UserTooltipProps) => { return ( - - +
+ +
+
+ +
); }; diff --git a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx index 82b1e70756..61be69875e 100644 --- a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx +++ b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx @@ -233,6 +233,27 @@ const RotationForm: FC = observer((props) => { const isFormValid = useMemo(() => userGroups.some((group) => group.length), [userGroups]); + const [focusElementName, setFocusElementName] = useState(undefined); + + const getFocusHandler = (elementName: string) => { + return () => { + setFocusElementName(elementName); + }; + }; + + const handleBlur = useCallback(() => { + setFocusElementName(undefined); + }, []); + + useEffect(() => { + store.scheduleStore.setRotationFormLiveParams({ + rotationStart, + shiftStart, + shiftEnd, + focusElementName, + }); + }, [params, focusElementName]); + return ( = observer((props) => { value={rotationStart} onChange={setRotationStart} timezone={currentTimezone} + onFocus={getFocusHandler('rotationStart')} + onBlur={handleBlur} /> = observer((props) => { } > - + = observer((props) => { } > - +
{ startMoment={startMoment} currentTimezone={currentTimezone} transparent={isPreview} + tutorialParams={isPreview && store.scheduleStore.rotationFormLiveParams} /> ))} diff --git a/grafana-plugin/src/models/schedule/schedule.ts b/grafana-plugin/src/models/schedule/schedule.ts index cee1deaa7b..c7984a8185 100644 --- a/grafana-plugin/src/models/schedule/schedule.ts +++ b/grafana-plugin/src/models/schedule/schedule.ts @@ -14,7 +14,17 @@ import { splitToLayers, splitToShiftsAndFillGaps, } from './schedule.helpers'; -import { Rotation, RotationType, Schedule, ScheduleEvent, Shift, Event, Layer, ShiftEvents } from './schedule.types'; +import { + Rotation, + RotationType, + Schedule, + ScheduleEvent, + Shift, + Event, + Layer, + ShiftEvents, + RotationFormLiveParams, +} from './schedule.types'; export class ScheduleStore extends BaseStore { @observable @@ -57,6 +67,9 @@ export class ScheduleStore extends BaseStore { @observable overridePreview?: Array<{ shiftId: Shift['id']; isPreview?: boolean; events: Event[] }>; + @observable + rotationFormLiveParams: RotationFormLiveParams = undefined; + @observable scheduleToScheduleEvents: { [id: string]: ScheduleEvent[]; @@ -187,6 +200,10 @@ export class ScheduleStore extends BaseStore { return response; } + setRotationFormLiveParams(params: RotationFormLiveParams) { + this.rotationFormLiveParams = params; + } + async updateRotationPreview( scheduleId: Schedule['id'], shiftId: Shift['id'] | 'new', @@ -227,6 +244,7 @@ export class ScheduleStore extends BaseStore { this.finalPreview = undefined; this.rotationPreview = undefined; this.overridePreview = undefined; + this.rotationFormLiveParams = undefined; } async updateRotation(shiftId: Shift['id'], params: Partial) { diff --git a/grafana-plugin/src/models/schedule/schedule.types.ts b/grafana-plugin/src/models/schedule/schedule.types.ts index ffbec8dc91..8b703377c4 100644 --- a/grafana-plugin/src/models/schedule/schedule.types.ts +++ b/grafana-plugin/src/models/schedule/schedule.types.ts @@ -1,3 +1,5 @@ +import dayjs from 'dayjs'; + import { GrafanaTeam } from 'models/grafana_team/grafana_team.types'; import { SlackChannel } from 'models/slack_channel/slack_channel.types'; import { User } from 'models/user/user.types'; @@ -9,6 +11,13 @@ export enum ScheduleType { 'API', } +export interface RotationFormLiveParams { + rotationStart: dayjs.Dayjs; + shiftStart: dayjs.Dayjs; + shiftEnd: dayjs.Dayjs; + focusElementName: string; +} + export interface Schedule { id: string; ical_url_primary: string; From 7ef830fdd2a8c08cd8a6e39b9432f2958537fc5e Mon Sep 17 00:00:00 2001 From: Maxim Date: Thu, 3 Nov 2022 16:19:43 +0000 Subject: [PATCH 23/49] add nice pointer animation --- .../src/containers/Rotation/Rotation.module.css | 10 +++++++++- .../src/containers/Rotation/RotationTutorial.tsx | 4 ++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/grafana-plugin/src/containers/Rotation/Rotation.module.css b/grafana-plugin/src/containers/Rotation/Rotation.module.css index 20be10ea23..5d462ddf9c 100644 --- a/grafana-plugin/src/containers/Rotation/Rotation.module.css +++ b/grafana-plugin/src/containers/Rotation/Rotation.module.css @@ -79,7 +79,15 @@ .pointer { position: absolute; top: -9px; - transition: left 500ms ease; + transition: left 500ms ease, opacity 500ms ease, transform 500ms ease; + transform-origin: bottom center; + opacity: 0; + transform: scale(0); +} + +.pointer--active { + transform: scale(1); + opacity: 1; } .tutorial-slot { diff --git a/grafana-plugin/src/containers/Rotation/RotationTutorial.tsx b/grafana-plugin/src/containers/Rotation/RotationTutorial.tsx index c153ce2a54..3c1c0475da 100644 --- a/grafana-plugin/src/containers/Rotation/RotationTutorial.tsx +++ b/grafana-plugin/src/containers/Rotation/RotationTutorial.tsx @@ -57,8 +57,8 @@ const RotationTutorial: FC = (props) => { return (
{events.map((event, index) => { const duration = event.end.diff(event.start, 'seconds'); From 4240333fdf1fc43bc45b20cb947f16a7bb4ac844 Mon Sep 17 00:00:00 2001 From: Maxim Date: Fri, 4 Nov 2022 13:59:39 +0000 Subject: [PATCH 24/49] selected days minor fix --- .../src/components/ScheduleQuality/ScheduleQuality.module.css | 2 +- .../src/containers/RotationForm/RotationForm.module.css | 2 +- grafana-plugin/src/containers/RotationForm/RotationForm.tsx | 4 ++-- grafana-plugin/src/style/vars.css | 1 - 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/grafana-plugin/src/components/ScheduleQuality/ScheduleQuality.module.css b/grafana-plugin/src/components/ScheduleQuality/ScheduleQuality.module.css index 5f9d2674fc..24f1ab48c2 100644 --- a/grafana-plugin/src/components/ScheduleQuality/ScheduleQuality.module.css +++ b/grafana-plugin/src/components/ScheduleQuality/ScheduleQuality.module.css @@ -14,7 +14,7 @@ .progress { width: 100%; height: 16px; - background-color: var(--secondary-background-shade); + background-color: var(--secondary-background); position: relative; } diff --git a/grafana-plugin/src/containers/RotationForm/RotationForm.module.css b/grafana-plugin/src/containers/RotationForm/RotationForm.module.css index b913ddb35c..26042e17d2 100644 --- a/grafana-plugin/src/containers/RotationForm/RotationForm.module.css +++ b/grafana-plugin/src/containers/RotationForm/RotationForm.module.css @@ -45,7 +45,7 @@ .day { width: 28px; height: 28px; - background: var(--secondary-background-shade); + background: var(--secondary-background); border-radius: 2px; line-height: 28px; text-align: center; diff --git a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx index 61be69875e..da364a4440 100644 --- a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx +++ b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx @@ -154,7 +154,7 @@ const RotationForm: FC = observer((props) => { rolling_users: userGroups, interval: repeatEveryValue, frequency: repeatEveryPeriod, - by_day: repeatEveryPeriod === 1 || repeatEveryPeriod === 0 ? selectedDays : null, + by_day: (repeatEveryPeriod === 0 && repeatEveryValue === 1) || repeatEveryPeriod === 1 ? selectedDays : null, priority_level: shiftId === 'new' ? layerPriority : shift?.priority_level, }), [ @@ -343,7 +343,7 @@ const RotationForm: FC = observer((props) => { /> - {(repeatEveryPeriod === 1 || repeatEveryPeriod === 0) && ( + {((repeatEveryPeriod === 0 && repeatEveryValue === 1) || repeatEveryPeriod === 1) && ( Date: Mon, 7 Nov 2022 16:34:43 +0100 Subject: [PATCH 25/49] One startup command to rule them all (#760) * Modify `docker-compose-developer` configuration files, and `Makefile` to support running everything in containers for local development - Make use of the COMPOSE_PROFILES env var that is supported by docker-compose to allow swapping-out/turning off certain docker-compose services. - add makefile cleanup command. Will remove all docker resources related to running the project locally - The "restart grafana container" issue, where users would need to restart their grafana container when setting up the project for the first time, is now fixed (make command now runs yarn build:dev before docker-compose startup; this ensures grafana-plugin/dist is available for grafana container before it starts up) - The DEVELOPER.md has been updated as well to reflect these new changes. It has been moved to ./dev/README.md (and references to the old file have been updated). - The redis image that is referenced in the docker-compose files has been pinned to v7.0.5 (latest version as of this commit) to avoid any surprises w/ future releases. - remove root .dockerignore in favour of individual .dockerignore files in ./engine and ./grafana-plugin --- .dockerignore | 9 - .gitignore | 12 +- grafana-plugin/.nvmrc => .nvmrc | 0 DEVELOPER.md | 325 ------------------ Makefile | 167 ++++++--- README.md | 6 +- .env.dev.example => dev/.env.dev.example | 16 +- dev/.env.mysql.dev | 12 + dev/.env.postgres.dev | 13 + dev/.env.sqlite.dev | 2 + dev/.gitignore | 1 + dev/README.md | 203 +++++++++++ docker-compose-developer-pg.yml | 84 ----- docker-compose-developer.yml | 265 ++++++++++++-- docker-compose-mysql-rabbitmq.yml | 13 +- docker-compose.yml | 9 +- docs/sources/open-source/_index.md | 2 +- engine/.dockerignore | 8 + engine/Dockerfile | 13 +- engine/requirements.txt | 2 +- engine/settings/dev.py | 8 - grafana-plugin/.dockerignore | 4 + grafana-plugin/Dockerfile.dev | 14 + grafana-plugin/package.json | 1 + .../PluginConfigPage/PluginConfigPage.tsx | 2 +- 25 files changed, 647 insertions(+), 544 deletions(-) delete mode 100644 .dockerignore rename grafana-plugin/.nvmrc => .nvmrc (100%) delete mode 100644 DEVELOPER.md rename .env.dev.example => dev/.env.dev.example (58%) create mode 100644 dev/.env.mysql.dev create mode 100644 dev/.env.postgres.dev create mode 100644 dev/.env.sqlite.dev create mode 100644 dev/.gitignore create mode 100644 dev/README.md delete mode 100644 docker-compose-developer-pg.yml create mode 100644 engine/.dockerignore create mode 100644 grafana-plugin/.dockerignore create mode 100644 grafana-plugin/Dockerfile.dev diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index 6561a0ad72..0000000000 --- a/.dockerignore +++ /dev/null @@ -1,9 +0,0 @@ -venv/* -venv2.7/* -.DS_Store -frontend/node_modules -frontend/build -package-lock.json -./engine/extensions -.env -.env-hobby diff --git a/.gitignore b/.gitignore index d074861005..320b022a09 100644 --- a/.gitignore +++ b/.gitignore @@ -1,17 +1,12 @@ # Backend */db.sqlite3 -engine/oncall_dev.db +engine/*.db *.pyc venv .python-version -.env -.env_hobby -.env.dev .vscode dump.rdb .idea -engine/celerybeat-schedule.db -engine/sqlite_data jupiter_playbooks/* engine/reports/*.csv engine/jupiter_playbooks/* @@ -29,11 +24,8 @@ node_modules # misc .DS_Store -.env.local -.env.development.local -.env.test.local -.env.production.local .swp +.env npm-debug.log* yarn-debug.log* diff --git a/grafana-plugin/.nvmrc b/.nvmrc similarity index 100% rename from grafana-plugin/.nvmrc rename to .nvmrc diff --git a/DEVELOPER.md b/DEVELOPER.md deleted file mode 100644 index f85cf6482b..0000000000 --- a/DEVELOPER.md +++ /dev/null @@ -1,325 +0,0 @@ -- [Developer quickstart](#developer-quickstart) - - [Code style](#code-style) - - [Backend setup](#backend-setup) - - [Frontend setup](#frontend-setup) - - [Setup using Makefile](#setup-using-makefile) - - [Slack application setup](#slack-application-setup) - - [Update drone build](#update-drone-build) -- [Troubleshooting](#troubleshooting) - - [ld: library not found for -lssl](#ld-library-not-found-for--lssl) - - [Could not build wheels for cryptography which use PEP 517 and cannot be installed directly](#could-not-build-wheels-for-cryptography-which-use-pep-517-and-cannot-be-installed-directly) - - [django.db.utils.OperationalError: (1366, "Incorrect string value ...")](#djangodbutilsoperationalerror-1366-incorrect-string-value-) - - [Empty queryset when filtering against datetime field](#empty-queryset-when-filtering-against-datetime-field) -- [Hints](#hints) - - [Building the all-in-one docker container](#building-the-all-in-one-docker-container) - - [Running Grafana with plugin (frontend) folder mounted for dev purposes](#running-grafana-with-plugin-frontend-folder-mounted-for-dev-purposes) - - [How to recreate the local database](#recreating-the-local-database) - - [Running tests locally](#running-tests-locally) -- [IDE Specific Instructions](#ide-specific-instructions) - - [PyCharm](#pycharm) - -## Developer quickstart - -Related: [How to develop integrations](/engine/config_integrations/README.md) - -### Code style - -- [isort](https://github.com/PyCQA/isort), [black](https://github.com/psf/black) and [flake8](https://github.com/PyCQA/flake8) are used to format backend code -- [eslint](https://eslint.org) and [stylelint](https://stylelint.io) are used to format frontend code -- To run formatters and linters on all files: `pre-commit run --all-files` -- To install pre-commit hooks: `pre-commit install` - -### Backend setup - -1. Start stateful services (RabbitMQ, Redis, Grafana with mounted plugin folder) - -```bash -docker-compose -f docker-compose-developer.yml up -d -``` - -NOTE: to use a PostgreSQL db backend, use the `docker-compose-developer-pg.yml` file instead. - -2. `postgres` is a dependency on some of our Python dependencies (notably `psycopg2` ([docs](https://www.psycopg.org/docs/install.html#prerequisites))). To install this on Mac you can simply run: - -```bash -brew install postgresql@14 -``` - -For non Mac installation please visit [here](https://www.postgresql.org/download/) for more information on how to install. - -3. Prepare a python environment: - -```bash -# Create and activate the virtual environment -python3.9 -m venv venv && source venv/bin/activate - -# Verify that python has version 3.9.x -python --version - -# Make sure you have latest pip and wheel support -pip install -U pip wheel - -# Copy and check .env.dev file. -cp .env.dev.example .env.dev - -# NOTE: if you want to use the PostgreSQL db backend add DATABASE_TYPE=postgresql to your .env.dev file; -# currently allowed backend values are `mysql` (default), `postgresql` and `sqlite3` - -# Apply .env.dev to current terminal. -# For PyCharm it's better to use https://plugins.jetbrains.com/plugin/7861-envfile/ -export $(grep -v '^#' .env.dev | xargs -0) - -# Install dependencies. -# Hint: there is a known issue with uwsgi. It's not used in the local dev environment. Feel free to comment it in `engine/requirements.txt`. -cd engine && pip install -r requirements.txt - -# Migrate the DB: -python manage.py migrate - -# Create user for django admin panel (if you need it): -python manage.py createsuperuser -``` - -4. Launch the backend: - -```bash -# Http server: -python manage.py runserver 0.0.0.0:8080 - -# Worker for background tasks (run it in the parallel terminal, don't forget to export .env.dev there) -python manage.py start_celery - -# Additionally you could launch the worker with periodic tasks launcher (99% you don't need this) -celery -A engine beat -l info -``` - -5. All set! Check out internal API endpoints at http://localhost:8000/. - -### Frontend setup - -1. Make sure you have [NodeJS v.14+ < 17](https://nodejs.org/) and [yarn](https://yarnpkg.com/) installed. **Note**: If you are using [`nvm`](https://github.com/nvm-sh/nvm) feel free to simply run `cd grafana-plugin && nvm install` to install the proper Node version. - -2. Install the dependencies with `yarn` and launch the frontend server (on port `3000` by default) - -```bash -cd grafana-plugin -yarn install -yarn -yarn watch -``` - -3. Ensure /grafana-plugin/provisioning has no grafana-plugin.yml -4. Generate an invitation token: - -```bash -cd engine; -python manage.py issue_invite_for_the_frontend --override -``` - -... or use output of all-in-one docker container described in the README.md. - -5. Open Grafana in the browser http://localhost:3000 (login: oncall, password: oncall) notice OnCall Plugin is not enabled, navigate to Configuration->Plugins and click Grafana OnCall - -6. Some configuration fields will appear be available. Fill them out and click Initialize OnCall - -``` -OnCall API URL: -http://host.docker.internal:8080 - -Invitation Token (Single use token to connect Grafana instance): -Response from the invite generator command (check above) - -Grafana URL (URL OnCall will use to talk to Grafana instance): -http://localhost:3000 -``` - -NOTE: you may not have `host.docker.internal` available, in that case you can get the -host IP from inside the container by running: - -```bash -/sbin/ip route|awk '/default/ { print $3 }' - -# Alternatively add host.docker.internal as an extra_host for grafana in docker-compose-developer.yml -extra_hosts: - - "host.docker.internal:host-gateway" - -``` - -### Setup using Makefile - -- Make sure you have `make` installed -- Backend setup: - - Run stateful services: - `$ make docker-services-start` - - (you can change your preferred docker file by defining the `DOCKER_FILE` env variable) - - - Setup environment: - `$ make bootstrap` - - (you can change your preferred directory for your Python virtualenv by defining the `ENV_DIR` env variable) - - - Start the server (this will run bootstrap if needed and apply db migrations): - `$ make run` - - - Start the celery workers: - `$ make start-celery` - - - Start celery beat: - `$ make start-celery-beat` - -- Frontend: - - Build and watch plugin: - `$ make watch-plugin` - - - Generate invitation token: - `$ make manage ARGS="issue_invite_for_the_frontend --override"` - - - Follow instructions above to setup plugin (see steps 5 and 6) - -- Other useful targets: - - `$ make shell` (open Django shell) - - `$ make dbshell` (open DB shell) - - `$ make test` (run tests) - - `$ make lint` (run lint checks) - - -### Slack application setup - -For Slack app configuration check our docs: https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup - -### Update drone build - -The .drone.yml build file must be signed when changes are made to it. Follow these steps: - -If you have not installed drone CLI follow [these instructions](https://docs.drone.io/cli/install/) - -To sign the .drone.yml file: - -```bash -export DRONE_SERVER=https://drone.grafana.net - -# Get your drone token from https://drone.grafana.net/account -export DRONE_TOKEN= - -drone sign --save grafana/oncall .drone.yml -``` - -## Troubleshooting - -### ld: library not found for -lssl - -**Problem:** - -``` -pip install -r requirements.txt -... - ld: library not found for -lssl - clang: error: linker command failed with exit code 1 (use -v to see invocation) - error: command 'gcc' failed with exit status 1 -... -``` - -**Solution:** - -``` -export LDFLAGS=-L/usr/local/opt/openssl/lib -pip install -r requirements.txt -``` - -### Could not build wheels for cryptography which use PEP 517 and cannot be installed directly - -Happens on Apple Silicon - -**Problem:** - -``` - build/temp.macosx-12-arm64-3.9/_openssl.c:575:10: fatal error: 'openssl/opensslv.h' file not found - #include - ^~~~~~~~~~~~~~~~~~~~ - 1 error generated. - error: command '/usr/bin/clang' failed with exit code 1 - ---------------------------------------- - ERROR: Failed building wheel for cryptography -``` - -**Solution:** - -``` -LDFLAGS="-L$(brew --prefix openssl@1.1)/lib" CFLAGS="-I$(brew --prefix openssl@1.1)/include" pip install `cat requirements.txt | grep cryptography` -``` - -### django.db.utils.OperationalError: (1366, "Incorrect string value ...") - -**Problem:** - -``` -django.db.utils.OperationalError: (1366, "Incorrect string value: '\\xF0\\x9F\\x98\\x8A\\xF0\\x9F...' for column 'cached_name' at row 1") -``` - -**Solution:** - -Recreate the database with the correct encoding. - -### Grafana OnCall plugin does not show up in plugin list - -**Problem:** -I've run `yarn watch` in `grafana_plugin` but I do not see Grafana OnCall in the list of plugins - -**Solution:** -If it is the first time you have run `yarn watch` and it was run after starting Grafana in docker-compose; Grafana will not have detected a plugin to fix: `docker-compose -f developer-docker-compose.yml restart grafana` - -## Hints: - -### Building the all-in-one docker container - -```bash -cd engine; -docker build -t grafana/oncall-all-in-one -f Dockerfile.all-in-one . -``` - -### Running Grafana with plugin (frontend) folder mounted for dev purposes - -Do it only after you built frontend at least once! Also developer-docker-compose.yml has similar Grafana included. - -```bash -docker run --rm -it -p 3000:3000 -v "$(pwd)"/grafana-plugin:/var/lib/grafana/plugins/grafana-plugin -e GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS=grafana-oncall-app --name=grafana grafana/grafana:8.3.2 -``` - -Credentials: admin/admin - -### Running tests locally - -In the `engine` directory, with the `.env.dev` vars exported and virtualenv activated - -```bash -pytest -``` - -You can also install `pytest.xdist` in your env and run tests in parallel: - -```bash -pip install pytest.xdist -pytest -n4 -``` - -## IDE Specific Instructions - -### PyCharm - -1. Create venv and copy .env.dev file - ```bash - python3.9 -m venv venv - cp .env.dev.example .env.dev - ``` -2. Open the project in PyCharm -3. Settings → Project OnCall - - In Python Interpreter click the gear and create a new Virtualenv from existing environment selecting the venv created in Step 1. - - In Project Structure make sure the project root is the content root and add /engine to Sources -4. Under Settings → Languages & Frameworks → Django - - Enable Django support - - Set Django project root to /engine - - Set Settings to settings/dev.py -5. Create a new Django Server run configuration to Run/Debug the engine - - Use a plugin such as EnvFile to load the .env.dev file - - Change port from 8000 to 8080 diff --git a/Makefile b/Makefile index 736428f1d6..c2746ea54f 100644 --- a/Makefile +++ b/Makefile @@ -1,73 +1,136 @@ -include .env.dev +DOCKER_COMPOSE_FILE = docker-compose-developer.yml +DOCKER_COMPOSE_DEV_LABEL = com.grafana.oncall.env=dev + +# compose profiles +MYSQL_PROFILE = mysql +POSTGRES_PROFILE = postgres +SQLITE_PROFILE = sqlite +ENGINE_PROFILE = engine +UI_PROFILE = oncall_ui +REDIS_PROFILE = redis +RABBITMQ_PROFILE = rabbitmq +GRAFANA_PROFILE = grafana + +DEV_ENV_DIR = ./dev +DEV_ENV_FILE = $(DEV_ENV_DIR)/.env.dev +DEV_ENV_EXAMPLE_FILE = $(DEV_ENV_FILE).example + +ENGINE_DIR = ./engine +SQLITE_DB_FILE = $(ENGINE_DIR)/oncall.db + +# -n flag only copies DEV_ENV_EXAMPLE_FILE-> DEV_ENV_FILE if it doesn't already exist +$(shell cp -n $(DEV_ENV_EXAMPLE_FILE) $(DEV_ENV_FILE)) +include $(DEV_ENV_FILE) + +# if COMPOSE_PROFILES is set in DEV_ENV_FILE use it +# otherwise use a default (or what is passed in as an arg) +ifeq ($(COMPOSE_PROFILES),) + COMPOSE_PROFILES=$(ENGINE_PROFILE),$(UI_PROFILE),$(REDIS_PROFILE),$(GRAFANA_PROFILE) +endif + +# conditionally assign DB based on what is present in COMPOSE_PROFILES +ifeq ($(findstring $(MYSQL_PROFILE),$(COMPOSE_PROFILES)),$(MYSQL_PROFILE)) + DB=$(MYSQL_PROFILE) +else ifeq ($(findstring $(POSTGRES_PROFILE),$(COMPOSE_PROFILES)),$(POSTGRES_PROFILE)) + DB=$(POSTGRES_PROFILE) +else + DB=$(SQLITE_PROFILE) +endif + +# conditionally assign BROKER_TYPE based on what is present in COMPOSE_PROFILES +# if the user specifies both rabbitmq and redis, we'll make the assumption that rabbitmq is the broker +ifeq ($(findstring $(RABBITMQ_PROFILE),$(COMPOSE_PROFILES)),$(RABBITMQ_PROFILE)) + BROKER_TYPE=$(RABBITMQ_PROFILE) +else + BROKER_TYPE=$(REDIS_PROFILE) +endif + +define run_engine_docker_command + DB=$(DB) BROKER_TYPE=$(BROKER_TYPE) docker-compose -f $(DOCKER_COMPOSE_FILE) run --rm oncall_engine_commands $(1) +endef -ENV_DIR ?= venv -ENV = $(CURDIR)/$(ENV_DIR) -CELERY = $(ENV)/bin/celery -PRECOMMIT = $(ENV)/bin/pre-commit -PIP = $(ENV)/bin/pip -PYTHON3 = $(ENV)/bin/python3 -PYTEST = $(ENV)/bin/pytest +define run_docker_compose_command + COMPOSE_PROFILES=$(COMPOSE_PROFILES) DB=$(DB) BROKER_TYPE=$(BROKER_TYPE) docker-compose -f $(DOCKER_COMPOSE_FILE) $(1) +endef -DOCKER_FILE ?= docker-compose-developer.yml +# touch SQLITE_DB_FILE if it does not exist and DB is eqaul to SQLITE_PROFILE +start: +ifeq ($(DB),$(SQLITE_PROFILE)) + @if [ ! -f $(SQLITE_DB_FILE) ]; then \ + touch $(SQLITE_DB_FILE); \ + fi +endif -define setup_engine_env - export `grep -v '^#' .env.dev | xargs -0` && cd engine -endef +# if the oncall UI is to be run in docker we should do an initial build of the frontend code +# this makes sure that it will be available when the grafana container starts up without the need to +# restart the grafana container initially +ifeq ($(findstring $(UI_PROFILE),$(COMPOSE_PROFILES)),$(UI_PROFILE)) + cd grafana-plugin && yarn install && yarn build:dev +endif -$(ENV): - python3.9 -m venv $(ENV_DIR) + $(call run_docker_compose_command,up --remove-orphans -d) -bootstrap: $(ENV) - $(PIP) install -U pip wheel - cp -n .env.dev.example .env.dev - cd engine && $(PIP) install -r requirements.txt - @touch $@ +stop: + $(call run_docker_compose_command,down) -migrate: bootstrap - $(setup_engine_env) && $(PYTHON3) manage.py migrate +restart: + $(call run_docker_compose_command,restart) -clean: - rm -rf $(ENV) +cleanup: stop + docker system prune --filter label="$(DOCKER_COMPOSE_DEV_LABEL)" --all --volumes -lint: bootstrap - cd engine && $(PRECOMMIT) run --all-files +install-pre-commit: + @if [ ! -x "$$(command -v pre-commit)" ]; then \ + echo "installing pre-commit"; \ + pip install $$(grep "pre-commit" $(ENGINE_DIR)/requirements.txt); \ + else \ + echo "pre-commit already installed"; \ + fi -dbshell: bootstrap - $(setup_engine_env) && $(PYTHON3) manage.py dbshell $(ARGS) +lint: install-pre-commit + pre-commit run --all-files -shell: bootstrap - $(setup_engine_env) && $(PYTHON3) manage.py shell $(ARGS) +install-precommit-hook: install-pre-commit + pre-commit install -test: bootstrap - $(setup_engine_env) && $(PYTEST) --ds=settings.dev $(ARGS) +get-invite-token: + $(call run_engine_docker_command,python manage.py issue_invite_for_the_frontend --override) -manage: bootstrap - $(setup_engine_env) && $(PYTHON3) manage.py $(ARGS) +test: + $(call run_engine_docker_command,pytest) -run: bootstrap migrate - $(setup_engine_env) && $(PYTHON3) manage.py runserver +start-celery-beat: + $(call run_engine_docker_command,celery -A engine beat -l info) -start-celery: bootstrap - . $(ENV)/bin/activate && $(setup_engine_env) && $(PYTHON3) manage.py start_celery +purge-queues: + $(call run_engine_docker_command,celery -A engine purge -f) -start-celery-beat: bootstrap - $(setup_engine_env) && $(CELERY) -A engine beat -l info +shell: + $(call run_engine_docker_command,python manage.py shell) -purge-queues: bootstrap - $(setup_engine_env) && $(CELERY) -A engine purge +dbshell: + $(call run_engine_docker_command,python manage.py dbshell) + +# The below commands are useful for running backend services outside of docker +define backend_command + export `grep -v '^#' $(DEV_ENV_FILE) | xargs -0` && \ + export BROKER_TYPE=$(BROKER_TYPE) && \ + cd engine && \ + $(1) +endef -docker-services-start: - docker-compose -f $(DOCKER_FILE) up -d - @echo "Waiting for database connection..." - until $$(nc -z -v -w30 localhost 3306); do sleep 1; done; +backend-bootstrap: + pip install -U pip wheel + cd engine && pip install -r requirements.txt -docker-services-restart: - docker-compose -f $(DOCKER_FILE) restart +backend-migrate: + $(call backend_command,python manage.py migrate) -docker-services-stop: - docker-compose -f $(DOCKER_FILE) stop +run-backend-server: + $(call backend_command,python manage.py runserver) -watch-plugin: - cd grafana-plugin && yarn install && yarn && yarn watch +run-backend-celery: + $(call backend_command,python manage.py start_celery) -.PHONY: grafana-plugin +backend-command: + $(call backend_command,$(CMD)) diff --git a/README.md b/README.md index 96455f6e44..ebdbfc5e9e 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,11 @@ Developer-friendly incident response with brilliant Slack integration. ## Getting Started -We prepared multiple environments: [production](https://grafana.com/docs/grafana-cloud/oncall/open-source/#production-environment), [developer](DEVELOPER.md) and hobby: +We prepared multiple environments: + +- [production](https://grafana.com/docs/grafana-cloud/oncall/open-source/#production-environment) +- [developer](./dev/README.md) +- hobby (described in the following steps) 1. Download [`docker-compose.yml`](docker-compose.yml): diff --git a/.env.dev.example b/dev/.env.dev.example similarity index 58% rename from .env.dev.example rename to dev/.env.dev.example index 8f7f1ad640..a33fabcea2 100644 --- a/.env.dev.example +++ b/dev/.env.dev.example @@ -15,7 +15,7 @@ TWILIO_AUTH_TOKEN= TWILIO_NUMBER= DJANGO_SETTINGS_MODULE=settings.dev -SECRET_KEY=jkashdkjashdkjh +SECRET_KEY=jyRnfRIeMjYfKdoFa9dKXcNaEGGc8GH1TChmYoWW BASE_URL=http://localhost:8080 FEATURE_TELEGRAM_INTEGRATION_ENABLED=True @@ -26,3 +26,17 @@ SLACK_INSTALL_RETURN_REDIRECT_HOST=http://localhost:8080 SOCIAL_AUTH_REDIRECT_IS_HTTPS=False GRAFANA_INCIDENT_STATIC_API_KEY= + +CELERY_WORKER_QUEUE="default,critical,long,slack,telegram,webhook,retry,celery" +CELERY_WORKER_CONCURRENCY=1 +CELERY_WORKER_MAX_TASKS_PER_CHILD=100 +CELERY_WORKER_SHUTDOWN_INTERVAL=65m +CELERY_WORKER_BEAT_ENABLED=True + +RABBITMQ_USERNAME=rabbitmq +RABBITMQ_PASSWORD=rabbitmq +RABBITMQ_HOST=rabbitmq +RABBITMQ_PORT=5672 +RABBITMQ_DEFAULT_VHOST="/" + +REDIS_URI=redis://redis:6379/0 diff --git a/dev/.env.mysql.dev b/dev/.env.mysql.dev new file mode 100644 index 0000000000..8b8485b7d2 --- /dev/null +++ b/dev/.env.mysql.dev @@ -0,0 +1,12 @@ +DATABASE_USER=root +DATABASE_NAME=oncall_local_dev +DATABASE_PASSWORD=empty +DATABASE_HOST=mysql +DATABASE_PORT=3306 + +# specific for the grafana container +GF_DATABASE_TYPE=mysql +GF_DATABASE_HOST=mysql:3306 +GF_DATABASE_USER=root +GF_DATABASE_PASSWORD=empty +GF_DATABASE_SSL_MODE=disable diff --git a/dev/.env.postgres.dev b/dev/.env.postgres.dev new file mode 100644 index 0000000000..6219c52fa4 --- /dev/null +++ b/dev/.env.postgres.dev @@ -0,0 +1,13 @@ +DATABASE_TYPE=postgresql +DATABASE_NAME=oncall_local_dev +DATABASE_USER=postgres +DATABASE_PASSWORD=empty +DATABASE_HOST=postgres +DATABASE_PORT=5432 + +# specific for the grafana container +GF_DATABASE_TYPE=postgres +GF_DATABASE_HOST=postgres:5432 +GF_DATABASE_NAME=grafana +GF_DATABASE_USER=postgres +GF_DATABASE_PASSWORD=empty diff --git a/dev/.env.sqlite.dev b/dev/.env.sqlite.dev new file mode 100644 index 0000000000..3797830e1b --- /dev/null +++ b/dev/.env.sqlite.dev @@ -0,0 +1,2 @@ +DATABASE_TYPE=sqlite3 +DATABASE_NAME=/var/lib/oncall/oncall.db diff --git a/dev/.gitignore b/dev/.gitignore new file mode 100644 index 0000000000..27c84076b2 --- /dev/null +++ b/dev/.gitignore @@ -0,0 +1 @@ +.env.dev diff --git a/dev/README.md b/dev/README.md new file mode 100644 index 0000000000..512f3cf112 --- /dev/null +++ b/dev/README.md @@ -0,0 +1,203 @@ +# Developer quickstart + +- [Running the project](#running-the-project) + - [`COMPOSE_PROFILES`](#compose_profiles) + - [`GRAFANA_VERSION`](#grafana_version) + - [Running backend services outside Docker](#running-backend-services-outside-docker) +- [Useful `make` commands](#useful-make-commands) +- [Setting environment variables](#setting-environment-variables) +- [Slack application setup](#slack-application-setup) +- [Update drone build](#update-drone-build) +- [Troubleshooting](#troubleshooting) + - [ld: library not found for -lssl](#ld-library-not-found-for--lssl) + - [Could not build wheels for cryptography which use PEP 517 and cannot be installed directly](#could-not-build-wheels-for-cryptography-which-use-pep-517-and-cannot-be-installed-directly) + - [django.db.utils.OperationalError: (1366, "Incorrect string value ...")](#djangodbutilsoperationalerror-1366-incorrect-string-value) +- [IDE Specific Instructions](#ide-specific-instructions) + - [PyCharm](#pycharm-professional-edition) + +Related: [How to develop integrations](/engine/config_integrations/README.md) + +## Running the project + +By default everything runs inside Docker. These options can be modified via the [`COMPOSE_PROFILES`](#compose_profiles) environment variable. + +1. Firstly, ensure that you have `docker` [installed](https://docs.docker.com/get-docker/) and running on your machine. **NOTE**: the `docker-compose-developer.yml` file uses some syntax/features that are only supported by Docker Compose v2. For insturctions on how to enable this (if you haven't already done so), see [here](https://www.docker.com/blog/announcing-compose-v2-general-availability/). +2. Run `make start`. By default this will run everything in Docker, using SQLite as the database and Redis as the message broker/cache. See [Running in Docker](#running-in-docker) below for more details on how to swap out/disable which components are run in Docker. +3. Open Grafana in a browser [here](http://localhost:3000/plugins/grafana-oncall-app) (login: `oncall`, password: `oncall`). +4. You should now see the OnCall plugin configuration page. Fill out the configuration options as follows: + +- Invite token: run `make get-invite-token` and copy/paste the token that gets printed out +- OnCall backend URL: http://host.docker.internal:8080 (this is the URL that is running the OnCall API; it should be accessible from Grafana) +- Grafana URL: http://grafana:3000 (this is the URL OnCall will use to talk to the Grafana Instance) + +5. Enjoy! Check our [OSS docs](https://grafana.com/docs/grafana-cloud/oncall/open-source/) if you want to set up Slack, Telegram, Twilio or SMS/calls through Grafana Cloud. +6. (Optional) Install `pre-commit` hooks by running `make install-precommit-hook` + +### `COMPOSE_PROFILES` + +This configuration option represents a comma-separated list of [`docker-compose` profiles](https://docs.docker.com/compose/profiles/). It allows you to swap-out, or disable, certain components in Docker. + +This option can be configured in two ways: + +1. Setting a `COMPOSE_PROFILE` environment variable in `.env.dev`. This allows you to avoid having to set `COMPOSE_PROFILE` for each `make` command you execute afterwards. +2. Passing in a `COMPOSE_PROFILES` argument when running `make` commands. For example: + +```bash +make start COMPOSE_PROFILES=postgres,engine,grafana,rabbitmq +``` + +The possible profiles values are: + +- `grafana` +- `engine` +- `oncall_ui` +- `redis` +- `rabbitmq` +- `postgres` +- `mysql` + +The default is `engine,oncall_ui,redis,grafana`. This runs: + +- all OnCall components (using SQLite as the database) +- Redis as the Celery message broker/cache +- a Grafana container + +### `GRAFANA_VERSION` + +If you would like to change the version of Grafana being run, simply pass in a `GRAFANA_VERSION` environment variable to `make start` (or alternatively set it in your `.env.dev` file). The value of this environment variable should be a valid `grafana/grafana` published Docker [image tag](https://hub.docker.com/r/grafana/grafana/tags). + +### Running backend services outside Docker + +By default everything runs inside Docker. If you would like to run the backend services outside of Docker (for integrating w/ PyCharm for example), follow these instructions: + +1. Create a Python 3.9 virtual environment using a method of your choosing (ex. [venv](https://docs.python.org/3.9/library/venv.html) or [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv)). Make sure the virtualenv is "activated". +2. `postgres` is a dependency on some of our Python dependencies (notably `psycopg2` ([docs](https://www.psycopg.org/docs/install.html#prerequisites))). Please visit [here](https://www.postgresql.org/download/) for installation instructions. +3. `make backend-bootstrap` - installs all backend dependencies +4. Modify your `.env.dev` by copying the contents of one of `.env.mysql.dev`, `.env.postgres.dev`, or `.env.sqlite.dev` into `.env.dev` (you should exclude the `GF_` prefixed environment variables). In most cases where you are running stateful services via `docker-compose` and backend services outside of docker you will simply need to change the database host to `localhost` (or in the case of `sqlite` update the file-path to your `sqlite` database file). +5. `make backend-migrate` - runs necessary database migrations +6. Open two separate shells and then run the following: + +- `make run-backend-server` - runs the HTTP server +- `make run-backend-celery` - runs Celery workers + +## Useful `make` commands + +See [`COMPOSE_PROFILES`](#compose_profiles) for more information on what this option is and how to configure it. + +```bash +make stop # stop all of the docker containers +make restart # restart all docker containers + +# this will remove all of the images, containers, volumes, and networks +# associated with your local OnCall developer setup +make cleanup + +make get-invite-token # generate an invitation token +make start-celery-beat # start celery beat +make purge-queues # purge celery queues +make shell # starts an OnCall engine Django shell +make dbshell # opens a DB shell +make test # run backend tests + +# run both frontend and backend linters +# may need to run `yarn install` from within `grafana-plugin` to install several `pre-commit` dependencies +make lint +``` + +## Setting environment variables + +If you need to override any additional environment variables, you should set these in a root `.env.dev` file. This file is automatically picked up by the OnCall engine Docker containers. This file is ignored from source control and also overrides any defaults that are set in other `.env*` files + +## Slack application setup + +For Slack app configuration check our docs: https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup + +## Update drone build + +The `.drone.yml` build file must be signed when changes are made to it. Follow these steps: + +If you have not installed drone CLI follow [these instructions](https://docs.drone.io/cli/install/) + +To sign the `.drone.yml` file: + +```bash +export DRONE_SERVER=https://drone.grafana.net + +# Get your drone token from https://drone.grafana.net/account +export DRONE_TOKEN= + +drone sign --save grafana/oncall .drone.yml +``` + +## Troubleshooting + +### ld: library not found for -lssl + +**Problem:** + +``` +make backend-bootstrap +... + ld: library not found for -lssl + clang: error: linker command failed with exit code 1 (use -v to see invocation) + error: command 'gcc' failed with exit status 1 +... +``` + +**Solution:** + +``` +export LDFLAGS=-L/usr/local/opt/openssl/lib +make backend-bootstrap +``` + +### Could not build wheels for cryptography which use PEP 517 and cannot be installed directly + +Happens on Apple Silicon + +**Problem:** + +``` + build/temp.macosx-12-arm64-3.9/_openssl.c:575:10: fatal error: 'openssl/opensslv.h' file not found + #include + ^~~~~~~~~~~~~~~~~~~~ + 1 error generated. + error: command '/usr/bin/clang' failed with exit code 1 + ---------------------------------------- + ERROR: Failed building wheel for cryptography +``` + +**Solution:** + +``` +LDFLAGS="-L$(brew --prefix openssl@1.1)/lib" CFLAGS="-I$(brew --prefix openssl@1.1)/include" pip install `cat engine/requirements.txt | grep cryptography` +``` + +### django.db.utils.OperationalError: (1366, "Incorrect string value ...") + +**Problem:** + +``` +django.db.utils.OperationalError: (1366, "Incorrect string value: '\\xF0\\x9F\\x98\\x8A\\xF0\\x9F...' for column 'cached_name' at row 1") +``` + +**Solution:** + +Recreate the database with the correct encoding. + +## IDE Specific Instructions + +### PyCharm + +1. Follow the instructions listed in ["Running backend services outside Docker"](#running-backend-services-outside-docker). +2. Open the project in PyCharm +3. Settings → Project OnCall + - In Python Interpreter click the gear and create a new Virtualenv from existing environment selecting the venv created in Step 1. + - In Project Structure make sure the project root is the content root and add /engine to Sources +4. Under Settings → Languages & Frameworks → Django + - Enable Django support + - Set Django project root to /engine + - Set Settings to settings/dev.py +5. Create a new Django Server run configuration to Run/Debug the engine + - Use a plugin such as EnvFile to load the .env.dev file + - Change port from 8000 to 8080 diff --git a/docker-compose-developer-pg.yml b/docker-compose-developer-pg.yml deleted file mode 100644 index 7c61cf355b..0000000000 --- a/docker-compose-developer-pg.yml +++ /dev/null @@ -1,84 +0,0 @@ -version: "3.8" - -services: - postgres: - image: postgres:14.4 - restart: always - ports: - - "5432:5432" - environment: - POSTGRES_DB: oncall_local_dev - POSTGRES_PASSWORD: empty - POSTGRES_INITDB_ARGS: --encoding=UTF-8 - deploy: - resources: - limits: - memory: 500m - cpus: '0.5' - healthcheck: - test: ["CMD", "pg_isready", "-U", "postgres"] - interval: 10s - timeout: 5s - retries: 5 - - redis: - image: redis - restart: always - ports: - - "6379:6379" - deploy: - resources: - limits: - memory: 100m - cpus: '0.1' - - rabbit: - image: "rabbitmq:3.7.15-management" - environment: - RABBITMQ_DEFAULT_USER: "rabbitmq" - RABBITMQ_DEFAULT_PASS: "rabbitmq" - RABBITMQ_DEFAULT_VHOST: "/" - deploy: - resources: - limits: - memory: 1000m - cpus: '0.5' - ports: - - "15672:15672" - - "5672:5672" - - postgres_to_create_grafana_db: - image: postgres:14.4 - command: bash -c "PGPASSWORD=empty psql -U postgres -h postgres -tc \"SELECT 1 FROM pg_database WHERE datname = 'grafana'\" | grep -q 1 || PGPASSWORD=empty psql -U postgres -h postgres -c \"CREATE DATABASE grafana\"" - depends_on: - postgres: - condition: service_healthy - - grafana: - image: "grafana/grafana:main" - restart: always - environment: - GF_DATABASE_TYPE: postgres - GF_DATABASE_HOST: postgres:5432 - GF_DATABASE_NAME: grafana - GF_DATABASE_USER: postgres - GF_DATABASE_PASSWORD: empty - GF_DATABASE_SSL_MODE: disable - GF_SECURITY_ADMIN_USER: ${GRAFANA_USER:-admin} - GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_PASSWORD:-admin} - GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: grafana-oncall-app - GF_INSTALL_PLUGINS: grafana-oncall-app - deploy: - resources: - limits: - memory: 500m - cpus: '0.5' - volumes: - - ./grafana-plugin:/var/lib/grafana/plugins/grafana-plugin - ports: - - "3000:3000" - depends_on: - postgres_to_create_grafana_db: - condition: service_completed_successfully - postgres: - condition: service_healthy diff --git a/docker-compose-developer.yml b/docker-compose-developer.yml index 33ef3fd193..6b0d2ab378 100644 --- a/docker-compose-developer.yml +++ b/docker-compose-developer.yml @@ -1,80 +1,273 @@ version: "3.8" +x-labels: &oncall-labels + - "com.grafana.oncall.env=dev" + +x-oncall-build: &oncall-build-args + context: ./engine + target: dev + labels: *oncall-labels + +x-oncall-volumes: &oncall-volumes + - ./engine:/etc/app + - ./engine/oncall.db:/var/lib/oncall/oncall.db + +x-env-files: &oncall-env-files + - ./dev/.env.dev + - ./dev/.env.${DB}.dev + +x-env-vars: &oncall-env-vars + BROKER_TYPE: ${BROKER_TYPE} + services: - mysql: - image: mysql:5.7 - platform: linux/x86_64 - command: --default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci + oncall_ui: + container_name: oncall_ui + labels: *oncall-labels + build: + context: ./grafana-plugin + dockerfile: Dockerfile.dev + labels: *oncall-labels + volumes: + - ./grafana-plugin:/etc/app + - /etc/app/node_modules + profiles: + - oncall_ui + + oncall_engine: + container_name: oncall_engine + labels: *oncall-labels + build: *oncall-build-args restart: always + command: "python manage.py runserver 0.0.0.0:8080" + env_file: *oncall-env-files + environment: *oncall-env-vars + volumes: *oncall-volumes ports: - - "3306:3306" - environment: - MYSQL_ROOT_PASSWORD: empty - MYSQL_DATABASE: oncall_local_dev - deploy: - resources: - limits: - memory: 500m - cpus: '0.5' - healthcheck: - test: [ "CMD", "mysqladmin" ,"ping", "-h", "localhost" ] - timeout: 20s - retries: 10 + - "8080:8080" + depends_on: + oncall_db_migration: + condition: service_completed_successfully + profiles: + - engine + + # used to invoke one-off commands, primarily from the Makefile + # oncall_engine couldn't (easily) be used due to it's depends_on property + # we could alternatively just use `docker run` however that would require + # duplicating the env-files, volume mounts, etc in the Makefile + oncall_engine_commands: + container_name: oncall_engine_commands + labels: *oncall-labels + build: *oncall-build-args + env_file: *oncall-env-files + environment: *oncall-env-vars + volumes: *oncall-volumes + profiles: + # no need to start this except from within the Makefile + - _engine_commands + + oncall_celery: + container_name: oncall_celery + labels: *oncall-labels + build: *oncall-build-args + restart: always + command: "python manage.py start_celery" + env_file: *oncall-env-files + environment: *oncall-env-vars + volumes: *oncall-volumes + depends_on: + oncall_db_migration: + condition: service_completed_successfully + profiles: + - engine + + oncall_db_migration: + container_name: oncall_db_migration + labels: *oncall-labels + build: *oncall-build-args + command: "python manage.py migrate --noinput" + env_file: *oncall-env-files + environment: *oncall-env-vars + volumes: *oncall-volumes + depends_on: + postgres: + condition: service_healthy + mysql: + condition: service_healthy + rabbitmq: + condition: service_healthy + redis: + condition: service_healthy + profiles: + - engine redis: - image: redis + container_name: redis + labels: *oncall-labels + image: redis:7.0.5 restart: always ports: - "6379:6379" deploy: + labels: *oncall-labels resources: limits: - memory: 100m - cpus: '0.1' + memory: 500m + cpus: "0.5" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + timeout: 5s + interval: 5s + retries: 10 + volumes: + - redisdata_dev:/data + profiles: + - redis - rabbit: + rabbitmq: + container_name: rabbitmq + labels: *oncall-labels image: "rabbitmq:3.7.15-management" + restart: always environment: RABBITMQ_DEFAULT_USER: "rabbitmq" RABBITMQ_DEFAULT_PASS: "rabbitmq" RABBITMQ_DEFAULT_VHOST: "/" + ports: + - "15672:15672" + - "5672:5672" deploy: + labels: *oncall-labels resources: limits: memory: 1000m - cpus: '0.5' + cpus: "0.5" + healthcheck: + test: rabbitmq-diagnostics -q ping + interval: 30s + timeout: 30s + retries: 3 + volumes: + - rabbitmqdata_dev:/var/lib/rabbitmq + profiles: + - rabbitmq + + mysql: + container_name: mysql + labels: *oncall-labels + image: mysql:5.7 + platform: linux/x86_64 + command: --default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci + restart: always + environment: + MYSQL_ROOT_PASSWORD: empty + MYSQL_DATABASE: oncall_local_dev ports: - - "15672:15672" - - "5672:5672" + - "3306:3306" + deploy: + labels: *oncall-labels + resources: + limits: + memory: 500m + cpus: "0.5" + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + timeout: 20s + retries: 10 + volumes: + - mysqldata_dev:/var/lib/mysql + profiles: + - mysql - mysql-to-create-grafana-db: + mysql_to_create_grafana_db: + container_name: mysql_to_create_grafana_db + labels: *oncall-labels image: mysql:5.7 platform: linux/x86_64 command: bash -c "mysql -h mysql -uroot -pempty -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'" depends_on: mysql: condition: service_healthy + profiles: + - mysql + + postgres: + container_name: postgres + labels: *oncall-labels + image: postgres:14.4 + restart: always + environment: + POSTGRES_DB: oncall_local_dev + POSTGRES_PASSWORD: empty + POSTGRES_INITDB_ARGS: --encoding=UTF-8 + ports: + - "5432:5432" + deploy: + labels: *oncall-labels + resources: + limits: + memory: 500m + cpus: "0.5" + healthcheck: + test: ["CMD", "pg_isready", "-U", "postgres"] + interval: 10s + timeout: 5s + retries: 5 + volumes: + - postgresdata_dev:/var/lib/postgresql/data + profiles: + - postgres + + postgres_to_create_grafana_db: + container_name: postgres_to_create_grafana_db + labels: *oncall-labels + image: postgres:14.4 + command: bash -c "PGPASSWORD=empty psql -U postgres -h postgres -tc \"SELECT 1 FROM pg_database WHERE datname = 'grafana'\" | grep -q 1 || PGPASSWORD=empty psql -U postgres -h postgres -c \"CREATE DATABASE grafana\"" + depends_on: + postgres: + condition: service_healthy + profiles: + - postgres grafana: - image: "grafana/grafana:main" + container_name: grafana + labels: *oncall-labels + image: "grafana/grafana:${GRAFANA_VERSION:-main}" restart: always environment: - GF_DATABASE_TYPE: mysql - GF_DATABASE_HOST: mysql - GF_DATABASE_USER: root - GF_DATABASE_PASSWORD: empty GF_SECURITY_ADMIN_USER: oncall GF_SECURITY_ADMIN_PASSWORD: oncall GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: grafana-oncall-app + env_file: + - ./dev/.env.${DB}.dev + ports: + - "3000:3000" deploy: + labels: *oncall-labels resources: limits: memory: 500m - cpus: '0.5' + cpus: "0.5" + extra_hosts: + - "host.docker.internal:host-gateway" volumes: + - grafanadata_dev:/var/lib/grafana - ./grafana-plugin:/var/lib/grafana/plugins/grafana-plugin - ports: - - "3000:3000" - depends_on: - mysql: - condition: service_healthy + profiles: + - grafana + +volumes: + redisdata_dev: + labels: *oncall-labels + grafanadata_dev: + labels: *oncall-labels + rabbitmqdata_dev: + labels: *oncall-labels + postgresdata_dev: + labels: *oncall-labels + mysqldata_dev: + labels: *oncall-labels + +networks: + default: + name: oncall_dev + labels: *oncall-labels diff --git a/docker-compose-mysql-rabbitmq.yml b/docker-compose-mysql-rabbitmq.yml index a77f5d25c4..fdcd213cc4 100644 --- a/docker-compose-mysql-rabbitmq.yml +++ b/docker-compose-mysql-rabbitmq.yml @@ -1,7 +1,6 @@ version: "3.8" -x-environment: - &oncall-environment +x-environment: &oncall-environment BASE_URL: $DOMAIN SECRET_KEY: $SECRET_KEY RABBITMQ_USERNAME: "rabbitmq" @@ -82,14 +81,14 @@ services: resources: limits: memory: 500m - cpus: '0.5' + cpus: "0.5" healthcheck: test: "mysql -uroot -p$MYSQL_PASSWORD oncall_hobby -e 'select 1'" timeout: 20s retries: 10 redis: - image: redis + image: redis:7.0.5 restart: always expose: - 6379 @@ -97,7 +96,7 @@ services: resources: limits: memory: 100m - cpus: '0.1' + cpus: "0.1" rabbitmq: image: "rabbitmq:3.7.15-management" @@ -113,7 +112,7 @@ services: resources: limits: memory: 1000m - cpus: '0.5' + cpus: "0.5" healthcheck: test: rabbitmq-diagnostics -q ping interval: 30s @@ -148,7 +147,7 @@ services: resources: limits: memory: 500m - cpus: '0.5' + cpus: "0.5" depends_on: mysql_to_create_grafana_db: condition: service_completed_successfully diff --git a/docker-compose.yml b/docker-compose.yml index 070f6f5121..e713499853 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,6 @@ version: "3.8" -x-environment: - &oncall-environment +x-environment: &oncall-environment DATABASE_TYPE: sqlite3 BROKER_TYPE: redis BASE_URL: $DOMAIN @@ -55,7 +54,7 @@ services: condition: service_healthy redis: - image: redis + image: redis:7.0.5 restart: always expose: - 6379 @@ -65,7 +64,7 @@ services: resources: limits: memory: 500m - cpus: '0.5' + cpus: "0.5" healthcheck: test: ["CMD", "redis-cli", "ping"] timeout: 5s @@ -88,7 +87,7 @@ services: resources: limits: memory: 500m - cpus: '0.5' + cpus: "0.5" profiles: - with_grafana diff --git a/docs/sources/open-source/_index.md b/docs/sources/open-source/_index.md index 5e3f03c620..d92eeec5c8 100644 --- a/docs/sources/open-source/_index.md +++ b/docs/sources/open-source/_index.md @@ -18,7 +18,7 @@ This guide describes the necessary installation and configuration steps needed t There are three Grafana OnCall OSS environments available: - **Hobby** playground environment for local usage: [README.md](https://github.com/grafana/oncall#getting-started) -- **Development** environment for contributors: [DEVELOPER.md](https://github.com/grafana/oncall/blob/dev/DEVELOPER.md) +- **Development** environment for contributors: [development README.md](https://github.com/grafana/oncall/blob/dev/dev/README.md) - **Production** environment for reliable cloud installation using Helm: [Production Environment](#production-environment) ## Production Environment diff --git a/engine/.dockerignore b/engine/.dockerignore new file mode 100644 index 0000000000..f7ef646f8f --- /dev/null +++ b/engine/.dockerignore @@ -0,0 +1,8 @@ +__pycache__ +.pytest_cache +*.pyc +celerybeat-schedule +*.db +./extensions + +.DS_Store diff --git a/engine/Dockerfile b/engine/Dockerfile index f23b28b601..6c7531143c 100644 --- a/engine/Dockerfile +++ b/engine/Dockerfile @@ -1,12 +1,13 @@ -FROM python:3.9-alpine3.16 +FROM python:3.9-alpine3.16 AS base RUN apk add bash python3-dev build-base linux-headers pcre-dev mariadb-connector-c-dev openssl-dev libffi-dev git -RUN pip install uwsgi WORKDIR /etc/app COPY ./requirements.txt ./ -RUN pip install regex==2021.11.2 RUN pip install -r requirements.txt +# we intentionally have two COPY commands, this is to have the requirements.txt in a separate build step +# which only invalidates when the requirements.txt actually changes. This avoids having to unneccasrily reinstall deps (which is time-consuming) +# https://stackoverflow.com/questions/34398632/docker-how-to-run-pip-requirements-txt-only-if-there-was-a-change/34399661#34399661 COPY ./ ./ # Collect static files and create an SQLite database @@ -14,6 +15,12 @@ RUN mkdir -p /var/lib/oncall RUN DJANGO_SETTINGS_MODULE=settings.prod_without_db DATABASE_TYPE=sqlite3 DATABASE_NAME=/var/lib/oncall/oncall.db SECRET_KEY="ThEmUsTSecretKEYforBUILDstage123" python manage.py collectstatic --no-input RUN chown -R 1000:2000 /var/lib/oncall +FROM base AS dev + +# these are needed for the django dbshell command +RUN apk add sqlite mysql-client postgresql-client + +FROM base AS prod # This is required for prometheus_client to sync between uwsgi workers RUN mkdir -p /tmp/prometheus_django_metrics; diff --git a/engine/requirements.txt b/engine/requirements.txt index a29f7cb169..3c5463f1ae 100644 --- a/engine/requirements.txt +++ b/engine/requirements.txt @@ -40,4 +40,4 @@ PyMySQL==1.0.2 psycopg2-binary==2.9.3 emoji==1.7.0 apns2==0.7.2 - +regex==2021.11.2 diff --git a/engine/settings/dev.py b/engine/settings/dev.py index 9c418ae4af..4923902fe0 100644 --- a/engine/settings/dev.py +++ b/engine/settings/dev.py @@ -17,15 +17,7 @@ "PORT": DATABASE_PORT or DATABASE_DEFAULTS[DATABASE_TYPE]["PORT"], } -if BROKER_TYPE == BrokerTypes.RABBITMQ: - CELERY_BROKER_URL = "pyamqp://rabbitmq:rabbitmq@localhost:5672" -elif BROKER_TYPE == BrokerTypes.REDIS: - CELERY_BROKER_URL = "redis://localhost:6379" - -CACHES["default"]["LOCATION"] = ["localhost:6379"] - SECRET_KEY = os.environ.get("SECRET_KEY", "osMsNM0PqlRHBlUvqmeJ7+ldU3IUETCrY9TrmiViaSmInBHolr1WUlS0OFS4AHrnnkp1vp9S9z1") - MIRAGE_SECRET_KEY = os.environ.get( "MIRAGE_SECRET_KEY", "sIrmyTvh+Go+h/2E46SnYGwgkKyH6IF6MXZb65I40HVCbj0+dD3JvpAqppEwFb7Vxnxlvtey+EL" ) diff --git a/grafana-plugin/.dockerignore b/grafana-plugin/.dockerignore new file mode 100644 index 0000000000..bfefc6f9c5 --- /dev/null +++ b/grafana-plugin/.dockerignore @@ -0,0 +1,4 @@ +node_modules +frontend_enterprise +dist +.DS_Store diff --git a/grafana-plugin/Dockerfile.dev b/grafana-plugin/Dockerfile.dev new file mode 100644 index 0000000000..681ca9ea7d --- /dev/null +++ b/grafana-plugin/Dockerfile.dev @@ -0,0 +1,14 @@ +FROM node:14.17.0-alpine + +WORKDIR /etc/app +ENV PATH /etc/app/node_modules/.bin:$PATH + +# this allows hot reloading of the container +# https://stackoverflow.com/a/72478714 +ENV WATCHPACK_POLLING true + +COPY ./package.json ./ +COPY ./yarn.lock ./ +RUN yarn install + +CMD ["yarn", "start"] diff --git a/grafana-plugin/package.json b/grafana-plugin/package.json index 4c338d15fd..712c972bfc 100644 --- a/grafana-plugin/package.json +++ b/grafana-plugin/package.json @@ -8,6 +8,7 @@ "stylelint": "stylelint ./src/**/*.{css,scss,module.css,module.scss}", "stylelint:fix": "stylelint --fix ./src/**/*.{css,scss,module.css,module.scss}", "build": "grafana-toolkit plugin:build", + "build:dev": "grafana-toolkit plugin:build --skipTest --skipLint", "test": "jest --verbose", "dev": "grafana-toolkit plugin:dev", "watch": "grafana-toolkit plugin:dev --watch", diff --git a/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.tsx b/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.tsx index ea9c77e1f0..7ff7353a03 100644 --- a/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.tsx +++ b/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.tsx @@ -299,7 +299,7 @@ Seek for such a line: “Your invite token: <> , use it in the Graf <> From a349d39f318448e2c05d521d78eef7c940f23fe2 Mon Sep 17 00:00:00 2001 From: Matias Bordese Date: Mon, 7 Nov 2022 12:03:57 -0300 Subject: [PATCH 26/49] Enforce until on override shifts --- engine/apps/api/views/schedule.py | 8 +---- .../schedules/models/custom_on_call_shift.py | 5 ++- .../apps/schedules/models/on_call_schedule.py | 6 +--- .../tests/test_custom_on_call_shift.py | 32 +++++++++++++++++++ 4 files changed, 38 insertions(+), 13 deletions(-) diff --git a/engine/apps/api/views/schedule.py b/engine/apps/api/views/schedule.py index 07e7378700..7ecebaf4e3 100644 --- a/engine/apps/api/views/schedule.py +++ b/engine/apps/api/views/schedule.py @@ -1,5 +1,3 @@ -import datetime - import pytz from django.core.exceptions import ObjectDoesNotExist from django.db.models import Count, OuterRef, Subquery @@ -297,11 +295,7 @@ def next_shifts_per_user(self, request, pk): users = {u: None for u in schedule.related_users()} for e in events: user = e["users"][0]["pk"] if e["users"] else None - event_end = e["end"] - if not isinstance(event_end, datetime.datetime): - # all day events end is a date, make it a datetime for comparison - event_end = datetime.datetime.combine(event_end, datetime.datetime.min.time(), tzinfo=pytz.UTC) - if user is not None and users.get(user) is None and event_end > now: + if user is not None and users.get(user) is None and e["end"] > now: users[user] = e result = {"users": users} diff --git a/engine/apps/schedules/models/custom_on_call_shift.py b/engine/apps/schedules/models/custom_on_call_shift.py index 3026325e11..c285dba9f9 100644 --- a/engine/apps/schedules/models/custom_on_call_shift.py +++ b/engine/apps/schedules/models/custom_on_call_shift.py @@ -401,7 +401,10 @@ def generate_ical(self, start, user_counter=0, user=None, counter=1, time_zone=" if user: event.add("summary", self.get_summary_with_user_for_ical(user)) event.add("dtstart", self.convert_dt_to_schedule_timezone(start, time_zone)) - event.add("dtend", self.convert_dt_to_schedule_timezone(start + self.duration, time_zone)) + dtend = start + self.duration + if self.until: + dtend = min(dtend, self.until) + event.add("dtend", self.convert_dt_to_schedule_timezone(dtend, time_zone)) event.add("dtstamp", self.rotation_start) if custom_rrule: event.add("rrule", custom_rrule) diff --git a/engine/apps/schedules/models/on_call_schedule.py b/engine/apps/schedules/models/on_call_schedule.py index 38df913832..7b04d6aedc 100644 --- a/engine/apps/schedules/models/on_call_schedule.py +++ b/engine/apps/schedules/models/on_call_schedule.py @@ -272,11 +272,7 @@ def _resolve_schedule(self, events): return [] def event_start_cmp_key(e): - # all day events: compare using a datetime object at 00:00 - start = e["start"] - if not isinstance(start, datetime.datetime): - start = datetime.datetime.combine(start, datetime.datetime.min.time(), tzinfo=pytz.UTC) - return start + return e["start"] def event_cmp_key(e): """Sorting key criteria for events.""" diff --git a/engine/apps/schedules/tests/test_custom_on_call_shift.py b/engine/apps/schedules/tests/test_custom_on_call_shift.py index a7945765d4..8a746169c9 100644 --- a/engine/apps/schedules/tests/test_custom_on_call_shift.py +++ b/engine/apps/schedules/tests/test_custom_on_call_shift.py @@ -59,6 +59,38 @@ def test_get_on_call_users_from_web_schedule_override(make_organization_and_user assert user in users_on_call +@pytest.mark.django_db +def test_get_on_call_users_from_web_schedule_override_until( + make_organization_and_user, make_on_call_shift, make_schedule +): + organization, user = make_organization_and_user() + + schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb) + date = timezone.now().replace(microsecond=0) + + data = { + "start": date, + "rotation_start": date, + "duration": timezone.timedelta(seconds=10800), + "schedule": schedule, + "until": date + timezone.timedelta(seconds=3600), + } + + on_call_shift = make_on_call_shift(organization=organization, shift_type=CustomOnCallShift.TYPE_OVERRIDE, **data) + on_call_shift.add_rolling_users([[user]]) + + # user is on-call + date = date + timezone.timedelta(minutes=5) + users_on_call = list_users_to_notify_from_ical(schedule, date) + assert len(users_on_call) == 1 + assert user in users_on_call + + # and the until is enforced + date = date + timezone.timedelta(hours=2) + users_on_call = list_users_to_notify_from_ical(schedule, date) + assert len(users_on_call) == 0 + + @pytest.mark.django_db def test_get_on_call_users_from_recurrent_event(make_organization_and_user, make_on_call_shift, make_schedule): organization, user = make_organization_and_user() From 5e58d125c0180174fbf492d9647dd800b4b87b83 Mon Sep 17 00:00:00 2001 From: Maxim Date: Mon, 7 Nov 2022 17:20:48 +0000 Subject: [PATCH 27/49] make new schedules as default --- .../NewScheduleSelector.tsx | 12 +- grafana-plugin/src/pages/index.ts | 15 +- .../src/pages/schedule/Schedule.tsx | 2 +- .../src/pages/schedules/Schedules.module.css | 66 +- .../src/pages/schedules/Schedules.tsx | 702 +++++++----------- .../pages/schedules_NEW/Schedules.module.css | 26 - .../src/pages/schedules_NEW/Schedules.tsx | 423 ----------- .../Schedules.helpers.ts | 0 .../pages/schedules_OLD/Schedules.module.css | 68 ++ .../src/pages/schedules_OLD/Schedules.tsx | 555 ++++++++++++++ 10 files changed, 929 insertions(+), 940 deletions(-) delete mode 100644 grafana-plugin/src/pages/schedules_NEW/Schedules.module.css delete mode 100644 grafana-plugin/src/pages/schedules_NEW/Schedules.tsx rename grafana-plugin/src/pages/{schedules => schedules_OLD}/Schedules.helpers.ts (100%) create mode 100644 grafana-plugin/src/pages/schedules_OLD/Schedules.module.css create mode 100644 grafana-plugin/src/pages/schedules_OLD/Schedules.tsx diff --git a/grafana-plugin/src/components/NewScheduleSelector/NewScheduleSelector.tsx b/grafana-plugin/src/components/NewScheduleSelector/NewScheduleSelector.tsx index a9786716e9..782eb0d31b 100644 --- a/grafana-plugin/src/components/NewScheduleSelector/NewScheduleSelector.tsx +++ b/grafana-plugin/src/components/NewScheduleSelector/NewScheduleSelector.tsx @@ -80,12 +80,16 @@ const NewScheduleSelector: FC = (props) => { Create schedule by API - Configure rotations and upload calendar by Terraform file + Use API or Terraform to manage rotations - + + + diff --git a/grafana-plugin/src/pages/index.ts b/grafana-plugin/src/pages/index.ts index 5c2478f497..812b68f933 100644 --- a/grafana-plugin/src/pages/index.ts +++ b/grafana-plugin/src/pages/index.ts @@ -13,8 +13,8 @@ import MaintenancePage2 from 'pages/maintenance/Maintenance'; import OrganizationLogPage2 from 'pages/organization-logs/OrganizationLog'; import OutgoingWebhooks2 from 'pages/outgoing_webhooks/OutgoingWebhooks'; import SchedulePage from 'pages/schedule/Schedule'; -import SchedulesPage2 from 'pages/schedules/Schedules'; -import SchedulesPage from 'pages/schedules_NEW/Schedules'; +import SchedulesPage from 'pages/schedules/Schedules'; +import SchedulesPageOld from 'pages/schedules_OLD/Schedules'; import SettingsPage2 from 'pages/settings/SettingsPage'; import Test from 'pages/test/Test'; import UsersPage2 from 'pages/users/Users'; @@ -61,16 +61,17 @@ export const pages: PageDefinition[] = [ text: 'Escalation Chains', }, { - component: SchedulesPage2, + component: SchedulesPageOld, icon: 'calendar-alt', - id: 'schedules', - text: 'Schedules', + id: 'schedules-old', + text: 'Schedules OLD', + hideFromTabs: true, }, { component: SchedulesPage, icon: 'calendar-alt', - id: 'schedules-new', - text: 'Schedules α', + id: 'schedules', + text: 'Schedules', }, { component: SchedulePage, diff --git a/grafana-plugin/src/pages/schedule/Schedule.tsx b/grafana-plugin/src/pages/schedule/Schedule.tsx index f98cca9bb3..38287d1332 100644 --- a/grafana-plugin/src/pages/schedule/Schedule.tsx +++ b/grafana-plugin/src/pages/schedule/Schedule.tsx @@ -110,7 +110,7 @@ class SchedulePage extends React.Component
- + diff --git a/grafana-plugin/src/pages/schedules/Schedules.module.css b/grafana-plugin/src/pages/schedules/Schedules.module.css index 102be56c88..07963dc1c4 100644 --- a/grafana-plugin/src/pages/schedules/Schedules.module.css +++ b/grafana-plugin/src/pages/schedules/Schedules.module.css @@ -2,67 +2,11 @@ margin-top: 24px; } -.title { - margin-bottom: 20px; +.schedule { + position: relative; + margin: 20px 0; } -.header { - display: flex; - justify-content: space-between; - align-items: center; -} - -.buttons { - width: 100%; - justify-content: flex-end; -} - -.filters { - margin-bottom: 20px; -} - -.instructions { - display: flex; - flex-direction: column; - align-items: center; - gap: 20px; - width: 50%; - margin: 20px auto; - white-space: break-spaces; - text-align: center; -} - -.events { - margin: 16px 32px; -} - -.events-list { - margin: 0; - list-style-type: none; -} - -.events-list-item { - margin-top: 12px; -} - -.priority-icon { - width: 32px; - border-radius: 50%; - background: var(--secondary-background); - line-height: 32px; - text-align: center; - font-size: 14px; - font-weight: 500; - flex-shrink: 0; -} - -.gap-between-shifts { - width: 520px; - padding: 5px 5px 5px 24px; - background-color: rgba(209, 14, 92, 0.15); - border: 1px solid rgba(209, 14, 92, 0.15); - border-radius: 50px; - color: #ff5286; - font-weight: 400; - align-items: baseline; +.root .buttons { + padding-right: 10px; } diff --git a/grafana-plugin/src/pages/schedules/Schedules.tsx b/grafana-plugin/src/pages/schedules/Schedules.tsx index 49a632ae47..1e45e43aca 100644 --- a/grafana-plugin/src/pages/schedules/Schedules.tsx +++ b/grafana-plugin/src/pages/schedules/Schedules.tsx @@ -1,129 +1,78 @@ import React, { SyntheticEvent } from 'react'; -import { AppRootProps } from '@grafana/data'; import { getLocationSrv } from '@grafana/runtime'; -import { - Button, - ConfirmModal, - HorizontalGroup, - Icon, - LoadingPlaceholder, - Modal, - PENDING_COLOR, - Tooltip, - VerticalGroup, -} from '@grafana/ui'; +import { Button, HorizontalGroup, IconButton, LoadingPlaceholder, VerticalGroup } from '@grafana/ui'; import cn from 'classnames/bind'; -import { omit } from 'lodash-es'; +import dayjs from 'dayjs'; +import { debounce } from 'lodash-es'; import { observer } from 'mobx-react'; -import moment from 'moment-timezone'; -import instructionsImage from 'assets/img/events_instructions.png'; import Avatar from 'components/Avatar/Avatar'; -import GTable from 'components/GTable/GTable'; -import PageErrorHandlingWrapper, { PageBaseState } from 'components/PageErrorHandlingWrapper/PageErrorHandlingWrapper'; -import { - getWrongTeamResponseInfo, - initErrorDataState, -} from 'components/PageErrorHandlingWrapper/PageErrorHandlingWrapper.helpers'; +import NewScheduleSelector from 'components/NewScheduleSelector/NewScheduleSelector'; import PluginLink from 'components/PluginLink/PluginLink'; -import SchedulesFilters from 'components/SchedulesFilters/SchedulesFilters'; -import { SchedulesFiltersType } from 'components/SchedulesFilters/SchedulesFilters.types'; +import ScheduleCounter from 'components/ScheduleCounter/ScheduleCounter'; +import ScheduleWarning from 'components/ScheduleWarning/ScheduleWarning'; +import SchedulesFilters from 'components/SchedulesFilters_NEW/SchedulesFilters'; +import { SchedulesFiltersType } from 'components/SchedulesFilters_NEW/SchedulesFilters.types'; +import Table from 'components/Table/Table'; import Text from 'components/Text/Text'; -import Tutorial from 'components/Tutorial/Tutorial'; -import { TutorialStep } from 'components/Tutorial/Tutorial.types'; +import TimelineMarks from 'components/TimelineMarks/TimelineMarks'; +import UserTimezoneSelect from 'components/UserTimezoneSelect/UserTimezoneSelect'; +import WithConfirm from 'components/WithConfirm/WithConfirm'; +import ScheduleFinal from 'containers/Rotations/ScheduleFinal'; import ScheduleForm from 'containers/ScheduleForm/ScheduleForm'; -import ScheduleICalSettings from 'containers/ScheduleIcalLink/ScheduleIcalLink'; import { WithPermissionControl } from 'containers/WithPermissionControl/WithPermissionControl'; -import { Schedule, ScheduleEvent, ScheduleType } from 'models/schedule/schedule.types'; +import { Schedule, ScheduleType } from 'models/schedule/schedule.types'; import { getSlackChannelName } from 'models/slack_channel/slack_channel.helpers'; +import { Timezone } from 'models/timezone/timezone.types'; +import { getStartOfWeek } from 'pages/schedule/Schedule.helpers'; import { WithStoreProps } from 'state/types'; import { UserAction } from 'state/userAction'; import { withMobXProviderContext } from 'state/withStore'; -import { openErrorNotification } from 'utils'; - -import { getDatesString } from './Schedules.helpers'; import styles from './Schedules.module.css'; const cx = cn.bind(styles); -interface SchedulesPageProps extends WithStoreProps, AppRootProps {} -interface SchedulesPageState extends PageBaseState { - scheduleIdToEdit?: Schedule['id']; - scheduleIdToDelete?: Schedule['id']; - scheduleIdToExport?: Schedule['id']; +interface SchedulesPageProps extends WithStoreProps {} + +interface SchedulesPageState { + startMoment: dayjs.Dayjs; filters: SchedulesFiltersType; - expandedSchedulesKeys: Array; + showNewScheduleSelector: boolean; + expandedRowKeys: Array; + scheduleIdToEdit?: Schedule['id']; } @observer class SchedulesPage extends React.Component { - state: SchedulesPageState = { - filters: { - selectedDate: moment().startOf('day').format('YYYY-MM-DD'), - }, - expandedSchedulesKeys: [], - errorData: initErrorDataState(), - }; + constructor(props: SchedulesPageProps) { + super(props); - componentDidMount() { - this.update().then(this.parseQueryParams); - } - - componentDidUpdate(prevProps: SchedulesPageProps) { - if (this.props.query.id !== prevProps.query.id) { - this.parseQueryParams(); - } + const { store } = this.props; + this.state = { + startMoment: getStartOfWeek(store.currentTimezone), + filters: { searchTerm: '', status: 'all', type: undefined }, + showNewScheduleSelector: false, + expandedRowKeys: [], + scheduleIdToEdit: undefined, + }; } - parseQueryParams = async () => { - this.setState({ errorData: initErrorDataState() }); // reset wrong team error to false on query parse - - const { - store, - query: { id }, - } = this.props; - - if (!id) { - return; - } - - let scheduleId: string = undefined; - const isNewSchedule = id === 'new'; - - if (!isNewSchedule) { - // load schedule only for valid id - const schedule = await store.scheduleStore - .loadItem(id, true) - .catch((error) => this.setState({ errorData: { ...getWrongTeamResponseInfo(error) } })); - if (!schedule) { - return; - } - - scheduleId = schedule.id; - } - - if (scheduleId || isNewSchedule) { - this.setState({ scheduleIdToEdit: id }); - } else { - openErrorNotification(`Schedule with id=${id} is not found. Please select schedule from the list.`); - } - }; - - update = () => { + async componentDidMount() { const { store } = this.props; - const { scheduleStore } = store; - return scheduleStore.updateItems(); - }; + store.userStore.updateItems(); + store.scheduleStore.updateItems(); + } render() { - const { store, query } = this.props; - const { expandedSchedulesKeys, scheduleIdToDelete, scheduleIdToEdit, scheduleIdToExport } = this.state; - const { filters, errorData } = this.state; + const { store } = this.props; + const { filters, showNewScheduleSelector, expandedRowKeys, scheduleIdToEdit } = this.state; + const { scheduleStore } = store; + const schedules = scheduleStore.getSearchResult(); const columns = [ { width: '10%', @@ -132,13 +81,21 @@ class SchedulesPage extends React.Component this.renderStatus(item), + }, + { + width: '30%', title: 'Name', - dataIndex: 'name', + key: 'name', + render: this.renderName, }, { - width: '20%', - title: 'OnCall now', + width: '30%', + title: 'Oncall', + key: 'users', render: this.renderOncallNow, }, { @@ -152,227 +109,155 @@ class SchedulesPage extends React.Component + filters.status === 'all' || + (filters.status === 'used' && schedule.number_of_escalation_chains) || + (filters.status === 'unused' && !schedule.number_of_escalation_chains) + ) + : undefined; return ( - - {() => ( - <> -
-
- - On-call Schedules - - Use this to distribute notifications among team members you specified in the "Notify Users from - on-call schedule" step in{' '} - escalation chains. - - -
- - {!schedules || schedules.length ? ( - ( -
- - - - Your timezone is {timezoneStr} UTC{offset} - - - - - - - -
- )} - rowKey="id" - columns={columns} - data={schedules} - expandable={{ - expandedRowRender: this.renderEvents, - expandRowByClick: true, - onExpand: this.onRowExpand, - expandedRowKeys: expandedSchedulesKeys, - onExpandedRowsChange: this.handleExpandedRowsChange, - }} - /> - ) : ( - - You haven’t added a schedule yet. - - - - - } - /> - )} -
- - {scheduleIdToEdit && ( - { - this.setState({ scheduleIdToEdit: undefined }); - getLocationSrv().update({ partial: true, query: { id: undefined } }); - }} - /> - )} - - {scheduleIdToDelete && ( - { - this.setState({ scheduleIdToDelete: undefined }); - }} - /> - )} - - {scheduleIdToExport && ( - this.setState({ scheduleIdToExport: undefined })} - > - - - )} - + <> +
+ + + + + {users && ( + + )} + + + + + {data ? Not found : Loading schedules...} + + } + /> + + + {showNewScheduleSelector && ( + { + this.setState({ showNewScheduleSelector: false }); + }} + /> + )} + {scheduleIdToEdit && ( + { + this.setState({ scheduleIdToEdit: undefined }); + }} + /> )} - + ); } - onRowExpand = (expanded: boolean, schedule: Schedule) => { - if (expanded) { - this.updateEventsFor(schedule.id); - } + handleTimezoneChange = (value: Timezone) => { + const { store } = this.props; + + store.currentTimezone = value; + + this.setState({ startMoment: getStartOfWeek(value) }, this.updateEvents); }; - handleExpandedRowsChange = (expandedRows: string[]) => { - this.setState({ expandedSchedulesKeys: expandedRows }); + handleCreateScheduleClick = () => { + this.setState({ showNewScheduleSelector: true }); }; - renderEvents = (schedule: Schedule) => { - const { store } = this.props; - const { scheduleStore } = store; - const { scheduleToScheduleEvents } = scheduleStore; - - const events = scheduleToScheduleEvents[schedule.id]; - - return events ? ( - events.length ? ( -
- - Events - -
    - {(events || []).map((event, idx) => ( -
  • - -
  • - ))} -
-
- ) : ( - this.renderInstruction() - ) - ) : ( - - ); + handleCreateSchedule = (data: Schedule) => { + if (data.type === ScheduleType.API) { + getLocationSrv().update({ query: { page: 'schedule', id: data.id } }); + } }; - renderInstruction = () => { - const { store } = this.props; - const { userStore } = store; + handleExpandRow = (expanded: boolean, data: Schedule) => { + const { expandedRowKeys } = this.state; - return ( -
- - There are no active slots here. To add an event, enter a username, for example “ - {userStore.currentUser?.username}“, and click the “Reload” button. OnCall will download this calendar and set - up an on-call schedule based on event names. OnCall will refresh the calendar every 10 minutes after the - intial setup. - - -
- ); + if (expanded && !expandedRowKeys.includes(data.id)) { + this.setState({ expandedRowKeys: [...this.state.expandedRowKeys, data.id] }, this.updateEvents); + } else if (!expanded && expandedRowKeys.includes(data.id)) { + const index = expandedRowKeys.indexOf(data.id); + const newExpandedRowKeys = [...expandedRowKeys]; + newExpandedRowKeys.splice(index, 1); + this.setState({ expandedRowKeys: newExpandedRowKeys }, this.updateEvents); + } }; - handleChangeFilters = (filters: SchedulesFiltersType) => { - this.setState({ filters }, () => { - const { filters, expandedSchedulesKeys } = this.state; - - if (!filters.selectedDate) { - return; - } + updateEvents = () => { + const { store } = this.props; + const { expandedRowKeys, startMoment } = this.state; - expandedSchedulesKeys.forEach((id) => this.updateEventsFor(id)); + expandedRowKeys.forEach((scheduleId) => { + store.scheduleStore.updateEvents(scheduleId, startMoment, 'rotation'); + store.scheduleStore.updateEvents(scheduleId, startMoment, 'override'); + store.scheduleStore.updateEvents(scheduleId, startMoment, 'final'); }); }; - renderChannelName = (value: Schedule) => { - return getSlackChannelName(value.slack_channel) || '-'; - }; + renderSchedule = (data: Schedule) => { + const { startMoment } = this.state; + const { store } = this.props; - renderUserGroup = (value: Schedule) => { - return value.user_group?.handle || '-'; + return ( +
+ +
+ +
+
+ ); }; - renderOncallNow = (item: Schedule, _index: number) => { - if (item.on_call_now?.length > 0) { - return item.on_call_now.map((user, _index) => { - return ( - -
- - {user.username} -
-
- ); - }); - } - return null; + getScheduleClickHandler = (scheduleId: Schedule['id']) => { + return () => { + getLocationSrv().update({ query: { page: 'schedule', id: scheduleId } }); + }; }; renderType = (value: number) => { @@ -384,172 +269,153 @@ class SchedulesPage extends React.Component { - if (item.warnings.length > 0) { - const tooltipContent = ( -
- {item.warnings.map((warning: string) => ( -

{warning}

- ))} -
- ); + return ; + }; + + renderStatus = (item: Schedule) => { + const { + store: { scheduleStore }, + } = this.props; + + const relatedEscalationChains = scheduleStore.relatedEscalationChains[item.id]; + return ( + + {item.number_of_escalation_chains > 0 && ( + + {relatedEscalationChains ? ( + relatedEscalationChains.length ? ( + relatedEscalationChains.map((escalationChain) => ( +
+ + {escalationChain.name} + +
+ )) + ) : ( + 'Not used yet' + ) + ) : ( + Loading related escalation chains.... + )} + + } + onHover={this.getUpdateRelatedEscalationChainsHandler(item.id)} + /> + )} + + {/* */} +
+ ); + }; + + renderName = (item: Schedule) => { + return {item.name}; + }; + + renderOncallNow = (item: Schedule, _index: number) => { + if (item.on_call_now?.length > 0) { return ( - - - + + {item.on_call_now.map((user, _index) => { + return ( + +
+ + {user.username} +
+
+ ); + })} +
); } - return null; }; - renderActionButtons = (record: Schedule) => { - return ( - - - - - - - - - + renderButtons = (item: Schedule) => { + return ( + + + - - + + + + ); }; - updateEventsFor = async (scheduleId: Schedule['id'], withEmpty = true, with_gap = true) => { - const { store } = this.props; - - const { scheduleStore } = store; - const { - filters: { selectedDate }, - } = this.state; - - store.scheduleStore.scheduleToScheduleEvents = omit(store.scheduleStore.scheduleToScheduleEvents, [scheduleId]); - - this.forceUpdate(); - - await scheduleStore.updateScheduleEvents(scheduleId, withEmpty, with_gap, selectedDate, moment.tz.guess()); + getEditScheduleClickHandler = (id: Schedule['id']) => { + return (event) => { + event.stopPropagation(); - this.forceUpdate(); + this.setState({ scheduleIdToEdit: id }); + }; }; - getReloadScheduleClickHandler = (scheduleId: Schedule['id']) => { + getDeleteScheduleClickHandler = (id: Schedule['id']) => { const { store } = this.props; - const { scheduleStore } = store; - return async (event: SyntheticEvent) => { + return (event: SyntheticEvent) => { event.stopPropagation(); - await scheduleStore.reloadIcal(scheduleId); - - scheduleStore.updateItem(scheduleId); - this.updateEventsFor(scheduleId); + scheduleStore.delete(id).then(this.update); }; }; - getDeleteScheduleClickHandler = (scheduleId: Schedule['id']) => { - return (event: SyntheticEvent) => { - event.stopPropagation(); - this.setState({ scheduleIdToDelete: scheduleId }); - }; + handleSchedulesFiltersChange = (filters: SchedulesFiltersType) => { + this.setState({ filters }, this.debouncedUpdateSchedules); }; - getExportScheduleClickHandler = (scheduleId: Schedule['id']) => { - return (event: SyntheticEvent) => { - event.stopPropagation(); - this.setState({ scheduleIdToExport: scheduleId }); - }; + applyFilters = () => { + const { filters } = this.state; + const { store } = this.props; + const { scheduleStore } = store; + scheduleStore.updateItems(filters); }; - handleDelete = async () => { - const { scheduleIdToDelete } = this.state; - const { store } = this.props; + debouncedUpdateSchedules = debounce(this.applyFilters, 1000); - this.setState({ scheduleIdToDelete: undefined }); + handlePageChange = (_page: number) => {}; + update = () => { + const { store } = this.props; const { scheduleStore } = store; - await scheduleStore.delete(scheduleIdToDelete); - - this.update(); + return scheduleStore.updateItems(); }; -} - -interface EventProps { - event: ScheduleEvent; -} -const Event = ({ event }: EventProps) => { - const dates = getDatesString(event.start, event.end, event.all_day); + getUpdateRelatedEscalationChainsHandler = (scheduleId: Schedule['id']) => { + const { store } = this.props; + const { scheduleStore } = store; - return ( - <> - {!event.is_gap ? ( - -
- {`L${event.priority_level || '0'}`} -
- -
- {!event.is_empty ? ( - event.users.map((user: any, index: number) => ( - - {index ? ', ' : ''} - {user.display_name} - - )) - ) : ( - - - Empty shift - {event.missing_users[0] && ( - - (check if {event.missing_users[0].includes(',') ? 'some of these users -' : 'user -'}{' '} - "{event.missing_users[0]}"{' '} - {event.missing_users[0].includes(',') ? 'are' : 'is'} existing in OnCall or{' '} - {event.missing_users[0].includes(',') ? 'have' : 'has'} Viewer role) - - )} - - )} - {event.source && — source: {event.source}} -
-
- {dates} -
-
-
- ) : ( -
- - Gap! Nobody On-Call... -
- )} - - ); -}; + return () => { + scheduleStore.updateRelatedEscalationChains(scheduleId).then(() => { + this.forceUpdate(); + }); + }; + }; +} export default withMobXProviderContext(SchedulesPage); diff --git a/grafana-plugin/src/pages/schedules_NEW/Schedules.module.css b/grafana-plugin/src/pages/schedules_NEW/Schedules.module.css deleted file mode 100644 index 380b25a347..0000000000 --- a/grafana-plugin/src/pages/schedules_NEW/Schedules.module.css +++ /dev/null @@ -1,26 +0,0 @@ -.root { - margin-top: 24px; -} - -.quality__type_success { - color: var(--warning-text-color); -} - -.schedule { - position: relative; - margin: 20px 0; -} - -.loader { - padding-left: 20px; -} - -.root .buttons { - padding-right: 10px; -} - -/* -.root .expanded-row { - background: var(--secondary-background); -} -*/ diff --git a/grafana-plugin/src/pages/schedules_NEW/Schedules.tsx b/grafana-plugin/src/pages/schedules_NEW/Schedules.tsx deleted file mode 100644 index c180fb46d4..0000000000 --- a/grafana-plugin/src/pages/schedules_NEW/Schedules.tsx +++ /dev/null @@ -1,423 +0,0 @@ -import React, { SyntheticEvent } from 'react'; - -import { getLocationSrv } from '@grafana/runtime'; -import { Button, HorizontalGroup, IconButton, LoadingPlaceholder, VerticalGroup } from '@grafana/ui'; -import cn from 'classnames/bind'; -import dayjs from 'dayjs'; -import { debounce } from 'lodash-es'; -import { observer } from 'mobx-react'; - -import Avatar from 'components/Avatar/Avatar'; -import NewScheduleSelector from 'components/NewScheduleSelector/NewScheduleSelector'; -import PluginLink from 'components/PluginLink/PluginLink'; -import ScheduleCounter from 'components/ScheduleCounter/ScheduleCounter'; -import ScheduleWarning from 'components/ScheduleWarning/ScheduleWarning'; -import SchedulesFilters from 'components/SchedulesFilters_NEW/SchedulesFilters'; -import { SchedulesFiltersType } from 'components/SchedulesFilters_NEW/SchedulesFilters.types'; -import Table from 'components/Table/Table'; -import Text from 'components/Text/Text'; -import TimelineMarks from 'components/TimelineMarks/TimelineMarks'; -import UserTimezoneSelect from 'components/UserTimezoneSelect/UserTimezoneSelect'; -import WithConfirm from 'components/WithConfirm/WithConfirm'; -import ScheduleFinal from 'containers/Rotations/ScheduleFinal'; -import ScheduleForm from 'containers/ScheduleForm/ScheduleForm'; -import { WithPermissionControl } from 'containers/WithPermissionControl/WithPermissionControl'; -import { Schedule, ScheduleType } from 'models/schedule/schedule.types'; -import { getSlackChannelName } from 'models/slack_channel/slack_channel.helpers'; -import { Timezone } from 'models/timezone/timezone.types'; -import { getStartOfWeek } from 'pages/schedule/Schedule.helpers'; -import { WithStoreProps } from 'state/types'; -import { UserAction } from 'state/userAction'; -import { withMobXProviderContext } from 'state/withStore'; - -import styles from './Schedules.module.css'; - -const cx = cn.bind(styles); - -interface SchedulesPageProps extends WithStoreProps {} - -interface SchedulesPageState { - startMoment: dayjs.Dayjs; - filters: SchedulesFiltersType; - showNewScheduleSelector: boolean; - expandedRowKeys: Array; - scheduleIdToEdit?: Schedule['id']; -} - -@observer -class SchedulesPage extends React.Component { - constructor(props: SchedulesPageProps) { - super(props); - - const { store } = this.props; - this.state = { - startMoment: getStartOfWeek(store.currentTimezone), - filters: { searchTerm: '', status: 'all', type: undefined }, - showNewScheduleSelector: false, - expandedRowKeys: [], - scheduleIdToEdit: undefined, - }; - } - - async componentDidMount() { - const { store } = this.props; - - store.userStore.updateItems(); - store.scheduleStore.updateItems(); - } - - render() { - const { store } = this.props; - const { filters, showNewScheduleSelector, expandedRowKeys, scheduleIdToEdit } = this.state; - - const { scheduleStore } = store; - - const schedules = scheduleStore.getSearchResult(); - const columns = [ - { - width: '10%', - title: 'Type', - dataIndex: 'type', - render: this.renderType, - }, - { - width: '5%', - title: 'Status', - key: 'name', - render: (item: Schedule) => this.renderStatus(item), - }, - { - width: '30%', - title: 'Name', - key: 'name', - render: this.renderName, - }, - { - width: '30%', - title: 'Oncall', - key: 'users', - render: this.renderOncallNow, - }, - { - width: '10%', - title: 'Slack channel', - render: this.renderChannelName, - }, - { - width: '10%', - title: 'Slack user group', - render: this.renderUserGroup, - }, - { - width: '5%', - key: 'warning', - render: this.renderWarning, - }, - { - width: '50px', - key: 'buttons', - render: this.renderButtons, - className: cx('buttons'), - }, - ]; - - const users = store.userStore.getSearchResult().results; - - const data = schedules - ? schedules - .filter( - (schedule) => - filters.status === 'all' || - (filters.status === 'used' && schedule.number_of_escalation_chains) || - (filters.status === 'unused' && !schedule.number_of_escalation_chains) - ) - .filter((schedule) => !filters.searchTerm || schedule.name.includes(filters.searchTerm)) - : undefined; - - return ( - <> -
- - - - - {users && ( - - )} - - - -
- {data ? Not found : Loading schedules...} - - } - /> - - - {showNewScheduleSelector && ( - { - this.setState({ showNewScheduleSelector: false }); - }} - /> - )} - {scheduleIdToEdit && ( - { - this.setState({ scheduleIdToEdit: undefined }); - }} - /> - )} - - ); - } - - handleTimezoneChange = (value: Timezone) => { - const { store } = this.props; - - store.currentTimezone = value; - - this.setState({ startMoment: getStartOfWeek(value) }, this.updateEvents); - }; - - handleCreateScheduleClick = () => { - this.setState({ showNewScheduleSelector: true }); - }; - - handleCreateSchedule = (data: Schedule) => { - if (data.type === ScheduleType.API) { - getLocationSrv().update({ query: { page: 'schedule', id: data.id } }); - } - }; - - handleExpandRow = (expanded: boolean, data: Schedule) => { - const { expandedRowKeys } = this.state; - - if (expanded && !expandedRowKeys.includes(data.id)) { - this.setState({ expandedRowKeys: [...this.state.expandedRowKeys, data.id] }, this.updateEvents); - } else if (!expanded && expandedRowKeys.includes(data.id)) { - const index = expandedRowKeys.indexOf(data.id); - const newExpandedRowKeys = [...expandedRowKeys]; - newExpandedRowKeys.splice(index, 1); - this.setState({ expandedRowKeys: newExpandedRowKeys }, this.updateEvents); - } - }; - - updateEvents = () => { - const { store } = this.props; - const { expandedRowKeys, startMoment } = this.state; - - expandedRowKeys.forEach((scheduleId) => { - store.scheduleStore.updateEvents(scheduleId, startMoment, 'rotation'); - store.scheduleStore.updateEvents(scheduleId, startMoment, 'override'); - store.scheduleStore.updateEvents(scheduleId, startMoment, 'final'); - }); - }; - - renderSchedule = (data: Schedule) => { - const { startMoment } = this.state; - const { store } = this.props; - - return ( -
- -
- -
-
- ); - }; - - getScheduleClickHandler = (scheduleId: Schedule['id']) => { - return () => { - getLocationSrv().update({ query: { page: 'schedule', id: scheduleId } }); - }; - }; - - renderType = (value: number) => { - type tTypeToVerbal = { - [key: number]: string; - }; - const typeToVerbal: tTypeToVerbal = { 0: 'API/Terraform', 1: 'Ical', 2: 'Web' }; - return typeToVerbal[value]; - }; - - renderWarning = (item: Schedule) => { - return ; - }; - - renderStatus = (item: Schedule) => { - const { - store: { scheduleStore }, - } = this.props; - - const relatedEscalationChains = scheduleStore.relatedEscalationChains[item.id]; - return ( - - {item.number_of_escalation_chains > 0 && ( - - {relatedEscalationChains ? ( - relatedEscalationChains.length ? ( - relatedEscalationChains.map((escalationChain) => ( -
- - {escalationChain.name} - -
- )) - ) : ( - 'Not used yet' - ) - ) : ( - Loading related escalation chains.... - )} - - } - onHover={this.getUpdateRelatedEscalationChainsHandler(item.id)} - /> - )} - - {/* */} -
- ); - }; - - renderName = (item: Schedule) => { - return {item.name}; - }; - - renderOncallNow = (item: Schedule, _index: number) => { - if (item.on_call_now?.length > 0) { - return ( - - {item.on_call_now.map((user, _index) => { - return ( - -
- - {user.username} -
-
- ); - })} -
- ); - } - return null; - }; - - renderChannelName = (value: Schedule) => { - return getSlackChannelName(value.slack_channel) || '-'; - }; - - renderUserGroup = (value: Schedule) => { - return value.user_group?.handle || '-'; - }; - - renderButtons = (item: Schedule) => { - return ( - - - - - - - - - - - ); - }; - - getEditScheduleClickHandler = (id: Schedule['id']) => { - return (event) => { - event.stopPropagation(); - - this.setState({ scheduleIdToEdit: id }); - }; - }; - - getDeleteScheduleClickHandler = (id: Schedule['id']) => { - const { store } = this.props; - const { scheduleStore } = store; - - return (event: SyntheticEvent) => { - event.stopPropagation(); - - scheduleStore.delete(id).then(this.update); - }; - }; - - handleSchedulesFiltersChange = (filters: SchedulesFiltersType) => { - this.setState({ filters }, this.debouncedUpdateSchedules); - }; - - applyFilters = () => { - const { filters } = this.state; - const { store } = this.props; - const { scheduleStore } = store; - scheduleStore.updateItems(filters); - }; - - debouncedUpdateSchedules = debounce(this.applyFilters, 1000); - - handlePageChange = (_page: number) => {}; - - update = () => { - const { store } = this.props; - const { scheduleStore } = store; - - return scheduleStore.updateItems(); - }; - - getUpdateRelatedEscalationChainsHandler = (scheduleId: Schedule['id']) => { - const { store } = this.props; - const { scheduleStore } = store; - - return () => { - scheduleStore.updateRelatedEscalationChains(scheduleId).then(() => { - this.forceUpdate(); - }); - }; - }; -} - -export default withMobXProviderContext(SchedulesPage); diff --git a/grafana-plugin/src/pages/schedules/Schedules.helpers.ts b/grafana-plugin/src/pages/schedules_OLD/Schedules.helpers.ts similarity index 100% rename from grafana-plugin/src/pages/schedules/Schedules.helpers.ts rename to grafana-plugin/src/pages/schedules_OLD/Schedules.helpers.ts diff --git a/grafana-plugin/src/pages/schedules_OLD/Schedules.module.css b/grafana-plugin/src/pages/schedules_OLD/Schedules.module.css new file mode 100644 index 0000000000..102be56c88 --- /dev/null +++ b/grafana-plugin/src/pages/schedules_OLD/Schedules.module.css @@ -0,0 +1,68 @@ +.root { + margin-top: 24px; +} + +.title { + margin-bottom: 20px; +} + +.header { + display: flex; + justify-content: space-between; + align-items: center; +} + +.buttons { + width: 100%; + justify-content: flex-end; +} + +.filters { + margin-bottom: 20px; +} + +.instructions { + display: flex; + flex-direction: column; + align-items: center; + gap: 20px; + width: 50%; + margin: 20px auto; + white-space: break-spaces; + text-align: center; +} + +.events { + margin: 16px 32px; +} + +.events-list { + margin: 0; + list-style-type: none; +} + +.events-list-item { + margin-top: 12px; +} + +.priority-icon { + width: 32px; + border-radius: 50%; + background: var(--secondary-background); + line-height: 32px; + text-align: center; + font-size: 14px; + font-weight: 500; + flex-shrink: 0; +} + +.gap-between-shifts { + width: 520px; + padding: 5px 5px 5px 24px; + background-color: rgba(209, 14, 92, 0.15); + border: 1px solid rgba(209, 14, 92, 0.15); + border-radius: 50px; + color: #ff5286; + font-weight: 400; + align-items: baseline; +} diff --git a/grafana-plugin/src/pages/schedules_OLD/Schedules.tsx b/grafana-plugin/src/pages/schedules_OLD/Schedules.tsx new file mode 100644 index 0000000000..49a632ae47 --- /dev/null +++ b/grafana-plugin/src/pages/schedules_OLD/Schedules.tsx @@ -0,0 +1,555 @@ +import React, { SyntheticEvent } from 'react'; + +import { AppRootProps } from '@grafana/data'; +import { getLocationSrv } from '@grafana/runtime'; +import { + Button, + ConfirmModal, + HorizontalGroup, + Icon, + LoadingPlaceholder, + Modal, + PENDING_COLOR, + Tooltip, + VerticalGroup, +} from '@grafana/ui'; +import cn from 'classnames/bind'; +import { omit } from 'lodash-es'; +import { observer } from 'mobx-react'; +import moment from 'moment-timezone'; + +import instructionsImage from 'assets/img/events_instructions.png'; +import Avatar from 'components/Avatar/Avatar'; +import GTable from 'components/GTable/GTable'; +import PageErrorHandlingWrapper, { PageBaseState } from 'components/PageErrorHandlingWrapper/PageErrorHandlingWrapper'; +import { + getWrongTeamResponseInfo, + initErrorDataState, +} from 'components/PageErrorHandlingWrapper/PageErrorHandlingWrapper.helpers'; +import PluginLink from 'components/PluginLink/PluginLink'; +import SchedulesFilters from 'components/SchedulesFilters/SchedulesFilters'; +import { SchedulesFiltersType } from 'components/SchedulesFilters/SchedulesFilters.types'; +import Text from 'components/Text/Text'; +import Tutorial from 'components/Tutorial/Tutorial'; +import { TutorialStep } from 'components/Tutorial/Tutorial.types'; +import ScheduleForm from 'containers/ScheduleForm/ScheduleForm'; +import ScheduleICalSettings from 'containers/ScheduleIcalLink/ScheduleIcalLink'; +import { WithPermissionControl } from 'containers/WithPermissionControl/WithPermissionControl'; +import { Schedule, ScheduleEvent, ScheduleType } from 'models/schedule/schedule.types'; +import { getSlackChannelName } from 'models/slack_channel/slack_channel.helpers'; +import { WithStoreProps } from 'state/types'; +import { UserAction } from 'state/userAction'; +import { withMobXProviderContext } from 'state/withStore'; +import { openErrorNotification } from 'utils'; + +import { getDatesString } from './Schedules.helpers'; + +import styles from './Schedules.module.css'; + +const cx = cn.bind(styles); + +interface SchedulesPageProps extends WithStoreProps, AppRootProps {} +interface SchedulesPageState extends PageBaseState { + scheduleIdToEdit?: Schedule['id']; + scheduleIdToDelete?: Schedule['id']; + scheduleIdToExport?: Schedule['id']; + filters: SchedulesFiltersType; + expandedSchedulesKeys: Array; +} + +@observer +class SchedulesPage extends React.Component { + state: SchedulesPageState = { + filters: { + selectedDate: moment().startOf('day').format('YYYY-MM-DD'), + }, + expandedSchedulesKeys: [], + errorData: initErrorDataState(), + }; + + componentDidMount() { + this.update().then(this.parseQueryParams); + } + + componentDidUpdate(prevProps: SchedulesPageProps) { + if (this.props.query.id !== prevProps.query.id) { + this.parseQueryParams(); + } + } + + parseQueryParams = async () => { + this.setState({ errorData: initErrorDataState() }); // reset wrong team error to false on query parse + + const { + store, + query: { id }, + } = this.props; + + if (!id) { + return; + } + + let scheduleId: string = undefined; + const isNewSchedule = id === 'new'; + + if (!isNewSchedule) { + // load schedule only for valid id + const schedule = await store.scheduleStore + .loadItem(id, true) + .catch((error) => this.setState({ errorData: { ...getWrongTeamResponseInfo(error) } })); + if (!schedule) { + return; + } + + scheduleId = schedule.id; + } + + if (scheduleId || isNewSchedule) { + this.setState({ scheduleIdToEdit: id }); + } else { + openErrorNotification(`Schedule with id=${id} is not found. Please select schedule from the list.`); + } + }; + + update = () => { + const { store } = this.props; + const { scheduleStore } = store; + + return scheduleStore.updateItems(); + }; + + render() { + const { store, query } = this.props; + const { expandedSchedulesKeys, scheduleIdToDelete, scheduleIdToEdit, scheduleIdToExport } = this.state; + const { filters, errorData } = this.state; + const { scheduleStore } = store; + + const columns = [ + { + width: '10%', + title: 'Type', + dataIndex: 'type', + render: this.renderType, + }, + { + width: '20%', + title: 'Name', + dataIndex: 'name', + }, + { + width: '20%', + title: 'OnCall now', + render: this.renderOncallNow, + }, + { + width: '10%', + title: 'Slack channel', + render: this.renderChannelName, + }, + { + width: '10%', + title: 'Slack user group', + render: this.renderUserGroup, + }, + { + width: '10%', + key: 'warning', + render: this.renderWarning, + }, + { + width: '20%', + key: 'action', + render: this.renderActionButtons, + }, + ]; + + const schedules = scheduleStore.getSearchResult(); + + const timezoneStr = moment.tz.guess(); + const offset = moment().tz(timezoneStr).format('Z'); + + return ( + + {() => ( + <> +
+
+ + On-call Schedules + + Use this to distribute notifications among team members you specified in the "Notify Users from + on-call schedule" step in{' '} + escalation chains. + + +
+ + {!schedules || schedules.length ? ( + ( +
+ + + + Your timezone is {timezoneStr} UTC{offset} + + + + + + + +
+ )} + rowKey="id" + columns={columns} + data={schedules} + expandable={{ + expandedRowRender: this.renderEvents, + expandRowByClick: true, + onExpand: this.onRowExpand, + expandedRowKeys: expandedSchedulesKeys, + onExpandedRowsChange: this.handleExpandedRowsChange, + }} + /> + ) : ( + + You haven’t added a schedule yet. + + + + + } + /> + )} +
+ + {scheduleIdToEdit && ( + { + this.setState({ scheduleIdToEdit: undefined }); + getLocationSrv().update({ partial: true, query: { id: undefined } }); + }} + /> + )} + + {scheduleIdToDelete && ( + { + this.setState({ scheduleIdToDelete: undefined }); + }} + /> + )} + + {scheduleIdToExport && ( + this.setState({ scheduleIdToExport: undefined })} + > + + + )} + + )} +
+ ); + } + + onRowExpand = (expanded: boolean, schedule: Schedule) => { + if (expanded) { + this.updateEventsFor(schedule.id); + } + }; + + handleExpandedRowsChange = (expandedRows: string[]) => { + this.setState({ expandedSchedulesKeys: expandedRows }); + }; + + renderEvents = (schedule: Schedule) => { + const { store } = this.props; + const { scheduleStore } = store; + const { scheduleToScheduleEvents } = scheduleStore; + + const events = scheduleToScheduleEvents[schedule.id]; + + return events ? ( + events.length ? ( +
+ + Events + +
    + {(events || []).map((event, idx) => ( +
  • + +
  • + ))} +
+
+ ) : ( + this.renderInstruction() + ) + ) : ( + + ); + }; + + renderInstruction = () => { + const { store } = this.props; + const { userStore } = store; + + return ( +
+ + There are no active slots here. To add an event, enter a username, for example “ + {userStore.currentUser?.username}“, and click the “Reload” button. OnCall will download this calendar and set + up an on-call schedule based on event names. OnCall will refresh the calendar every 10 minutes after the + intial setup. + + +
+ ); + }; + + handleChangeFilters = (filters: SchedulesFiltersType) => { + this.setState({ filters }, () => { + const { filters, expandedSchedulesKeys } = this.state; + + if (!filters.selectedDate) { + return; + } + + expandedSchedulesKeys.forEach((id) => this.updateEventsFor(id)); + }); + }; + + renderChannelName = (value: Schedule) => { + return getSlackChannelName(value.slack_channel) || '-'; + }; + + renderUserGroup = (value: Schedule) => { + return value.user_group?.handle || '-'; + }; + + renderOncallNow = (item: Schedule, _index: number) => { + if (item.on_call_now?.length > 0) { + return item.on_call_now.map((user, _index) => { + return ( + +
+ + {user.username} +
+
+ ); + }); + } + return null; + }; + + renderType = (value: number) => { + type tTypeToVerbal = { + [key: number]: string; + }; + const typeToVerbal: tTypeToVerbal = { 0: 'API/Terraform', 1: 'Ical', 2: 'Web' }; + return typeToVerbal[value]; + }; + + renderWarning = (item: Schedule) => { + if (item.warnings.length > 0) { + const tooltipContent = ( +
+ {item.warnings.map((warning: string) => ( +

{warning}

+ ))} +
+ ); + return ( + + + + ); + } + + return null; + }; + + renderActionButtons = (record: Schedule) => { + return ( + + + + + + + + + + + + + + + ); + }; + + updateEventsFor = async (scheduleId: Schedule['id'], withEmpty = true, with_gap = true) => { + const { store } = this.props; + + const { scheduleStore } = store; + const { + filters: { selectedDate }, + } = this.state; + + store.scheduleStore.scheduleToScheduleEvents = omit(store.scheduleStore.scheduleToScheduleEvents, [scheduleId]); + + this.forceUpdate(); + + await scheduleStore.updateScheduleEvents(scheduleId, withEmpty, with_gap, selectedDate, moment.tz.guess()); + + this.forceUpdate(); + }; + + getReloadScheduleClickHandler = (scheduleId: Schedule['id']) => { + const { store } = this.props; + + const { scheduleStore } = store; + + return async (event: SyntheticEvent) => { + event.stopPropagation(); + + await scheduleStore.reloadIcal(scheduleId); + + scheduleStore.updateItem(scheduleId); + this.updateEventsFor(scheduleId); + }; + }; + + getDeleteScheduleClickHandler = (scheduleId: Schedule['id']) => { + return (event: SyntheticEvent) => { + event.stopPropagation(); + this.setState({ scheduleIdToDelete: scheduleId }); + }; + }; + + getExportScheduleClickHandler = (scheduleId: Schedule['id']) => { + return (event: SyntheticEvent) => { + event.stopPropagation(); + this.setState({ scheduleIdToExport: scheduleId }); + }; + }; + + handleDelete = async () => { + const { scheduleIdToDelete } = this.state; + const { store } = this.props; + + this.setState({ scheduleIdToDelete: undefined }); + + const { scheduleStore } = store; + + await scheduleStore.delete(scheduleIdToDelete); + + this.update(); + }; +} + +interface EventProps { + event: ScheduleEvent; +} + +const Event = ({ event }: EventProps) => { + const dates = getDatesString(event.start, event.end, event.all_day); + + return ( + <> + {!event.is_gap ? ( + +
+ {`L${event.priority_level || '0'}`} +
+ +
+ {!event.is_empty ? ( + event.users.map((user: any, index: number) => ( + + {index ? ', ' : ''} + {user.display_name} + + )) + ) : ( + + + Empty shift + {event.missing_users[0] && ( + + (check if {event.missing_users[0].includes(',') ? 'some of these users -' : 'user -'}{' '} + "{event.missing_users[0]}"{' '} + {event.missing_users[0].includes(',') ? 'are' : 'is'} existing in OnCall or{' '} + {event.missing_users[0].includes(',') ? 'have' : 'has'} Viewer role) + + )} + + )} + {event.source && — source: {event.source}} +
+
+ {dates} +
+
+
+ ) : ( +
+ + Gap! Nobody On-Call... +
+ )} + + ); +}; + +export default withMobXProviderContext(SchedulesPage); From f320afb26904884ebe63f1b03fed1ea367b8a7f9 Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Mon, 7 Nov 2022 15:40:49 -0700 Subject: [PATCH 28/49] Add rule for jinja2.Template to remind common environment use --- engine/tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/engine/tox.ini b/engine/tox.ini index 6544bcfe52..0a721f1e3d 100644 --- a/engine/tox.ini +++ b/engine/tox.ini @@ -3,6 +3,8 @@ max-line-length = 180 extend-ignore = F541, E203 extend-exclude = */migrations/* ban-relative-imports = parents +banned-modules = + jinja2.Template = Use apply_jinja_template instead [pytest] # https://pytest-django.readthedocs.io/en/latest/configuring_django.html#order-of-choosing-settings From 9c550af7218e7ca8f2e7bbd871adde41326cdc57 Mon Sep 17 00:00:00 2001 From: Innokentii Konstantinov Date: Tue, 8 Nov 2022 14:43:22 +0800 Subject: [PATCH 29/49] Support of oncall-gw (#741) * Draft support of oncall-gw * Clean up * Create oncall connector on org create in gcom * Naming fixes * Rework oncall-gateway package. \nMove it from apps. * Fix typo --- engine/apps/slack/views.py | 3 + engine/apps/social_auth/pipeline.py | 12 +- .../user_management/models/organization.py | 20 +++ engine/common/oncall_gateway/__init__.py | 6 + .../oncall_gateway/oncall_gateway_client.py | 148 ++++++++++++++++++ engine/common/oncall_gateway/tasks.py | 96 ++++++++++++ engine/common/oncall_gateway/utils.py | 40 +++++ engine/settings/base.py | 6 + 8 files changed, 328 insertions(+), 3 deletions(-) create mode 100644 engine/common/oncall_gateway/__init__.py create mode 100644 engine/common/oncall_gateway/oncall_gateway_client.py create mode 100644 engine/common/oncall_gateway/tasks.py create mode 100644 engine/common/oncall_gateway/utils.py diff --git a/engine/apps/slack/views.py b/engine/apps/slack/views.py index 8d4ca3c1cf..3f312cb24f 100644 --- a/engine/apps/slack/views.py +++ b/engine/apps/slack/views.py @@ -52,6 +52,7 @@ from apps.slack.slack_client.exceptions import SlackAPIException, SlackAPITokenException from apps.slack.tasks import clean_slack_integration_leftovers, unpopulate_slack_user_identities from common.insight_log import ChatOpsEvent, ChatOpsType, write_chatops_insight_log +from common.oncall_gateway import delete_slack_connector_async from .models import SlackActionRecord, SlackMessage, SlackTeamIdentity, SlackUserIdentity @@ -537,6 +538,8 @@ def post(self, request): slack_team_identity = organization.slack_team_identity if slack_team_identity is not None: clean_slack_integration_leftovers.apply_async((organization.pk,)) + if settings.FEATURE_MULTIREGION_ENABLED: + delete_slack_connector_async.apply_async((slack_team_identity.slack_id,)) write_chatops_insight_log( author=request.user, event_name=ChatOpsEvent.WORKSPACE_DISCONNECTED, diff --git a/engine/apps/social_auth/pipeline.py b/engine/apps/social_auth/pipeline.py index a271d0560e..4aaf933954 100644 --- a/engine/apps/social_auth/pipeline.py +++ b/engine/apps/social_auth/pipeline.py @@ -2,7 +2,8 @@ from urllib.parse import urljoin from django.apps import apps -from django.http import HttpResponse +from django.conf import settings +from django.http import HttpResponse, JsonResponse from rest_framework import status from social_core.exceptions import AuthForbidden @@ -13,6 +14,7 @@ SLACK_AUTH_WRONG_WORKSPACE_ERROR, ) from common.insight_log import ChatOpsEvent, ChatOpsType, write_chatops_insight_log +from common.oncall_gateway import check_slack_installation_backend, create_slack_connector logger = logging.getLogger(__name__) @@ -93,13 +95,17 @@ def populate_slack_identities(response, backend, user, organization, **kwargs): return HttpResponse(status=status.HTTP_400_BAD_REQUEST) slack_team_id = response["team"]["id"] + if settings.FEATURE_MULTIREGION_ENABLED and not check_slack_installation_backend( + slack_team_id, settings.ONCALL_BACKEND_REGION + ): + return JsonResponse(status=status.HTTP_400_BAD_REQUEST, json={"detail": "error about regions"}) slack_team_identity, is_slack_team_identity_created = SlackTeamIdentity.objects.get_or_create( slack_id=slack_team_id, ) - # update slack oauth fields by data from response slack_team_identity.update_oauth_fields(user, organization, response) - + if settings.FEATURE_MULTIREGION_ENABLED: + create_slack_connector(slack_team_id, settings.ONCALL_BACKEND_REGION) populate_slack_channels_for_team.apply_async((slack_team_identity.pk,)) user.slack_user_identity.update_profile_info() # todo slack: do we need update info for all existing slack users in slack team? diff --git a/engine/apps/user_management/models/organization.py b/engine/apps/user_management/models/organization.py index 2561e3cbfb..3f35a22a21 100644 --- a/engine/apps/user_management/models/organization.py +++ b/engine/apps/user_management/models/organization.py @@ -12,6 +12,7 @@ from apps.slack.utils import post_message_to_channel from apps.user_management.subscription_strategy import FreePublicBetaSubscriptionStrategy from common.insight_log import ChatOpsEvent, ChatOpsType, write_chatops_insight_log +from common.oncall_gateway import create_oncall_connector, delete_oncall_connector_async from common.public_primary_keys import generate_public_primary_key, increase_public_primary_key_length logger = logging.getLogger(__name__) @@ -31,7 +32,26 @@ def generate_public_primary_key_for_organization(): return new_public_primary_key +class OrganizationQuerySet(models.QuerySet): + def create(self, **kwargs): + instance = super().create(**kwargs) + if settings.FEATURE_MULTIREGION_ENABLED: + create_oncall_connector(instance.public_primary_key, settings.ONCALL_BACKEND_REGION) + return instance + + def delete(self): + org_id = self.public_primary_key + super().delete(self) + if settings.FEATURE_MULTIREGION_ENABLED: + delete_oncall_connector_async.apply_async( + (org_id), + ) + + class Organization(MaintainableObject): + + objects = OrganizationQuerySet.as_manager() + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.subscription_strategy = self._get_subscription_strategy() diff --git a/engine/common/oncall_gateway/__init__.py b/engine/common/oncall_gateway/__init__.py new file mode 100644 index 0000000000..6b3e652393 --- /dev/null +++ b/engine/common/oncall_gateway/__init__.py @@ -0,0 +1,6 @@ +""" +This package is for interaction with OnCall-Gateway, service to provide multiregional chatops. +""" + +from .tasks import delete_oncall_connector_async, delete_slack_connector_async # noqa: F401 +from .utils import check_slack_installation_backend, create_oncall_connector, create_slack_connector # noqa: F401 diff --git a/engine/common/oncall_gateway/oncall_gateway_client.py b/engine/common/oncall_gateway/oncall_gateway_client.py new file mode 100644 index 0000000000..7682c9ceb5 --- /dev/null +++ b/engine/common/oncall_gateway/oncall_gateway_client.py @@ -0,0 +1,148 @@ +import json +from dataclasses import dataclass +from urllib.parse import urljoin + +import requests +from django.conf import settings + + +@dataclass +class OnCallConnector: + """ + OnCallConnector represents connection between oncall org and oncall-gateway + """ + + oncall_org_id: str + backend: str + + +@dataclass +class SlackConnector: + """ + SlackConnector represents connection between slack team with installed oncall app and oncall-gateway + """ + + slack_id: str + backend: str + + +DEFAULT_TIMEOUT = 5 + + +class OnCallGatewayAPIClient: + def __init__(self, url: str, token: str): + self.base_url = url + self.api_base_url = urljoin(self.base_url, "api/v1/") + self.api_token = token + + # OnCall Connector + @property + def _oncall_connectors_url(self) -> str: + return urljoin(self.api_base_url, "oncall_org_connectors") + + def post_oncall_connector( + self, oncall_org_id: str, backend: str + ) -> tuple[OnCallConnector, requests.models.Response]: + d = {"oncall_org_id": oncall_org_id, "backend": backend} + response = self._post(url=self._oncall_connectors_url, json=d) + response_data = response.json() + + return OnCallConnector(oncall_org_id=response_data["oncall_org_id"], backend=response_data["backend"]), response + + def delete_oncall_connector(self, oncall_org_id: str) -> requests.models.Response: + url = urljoin(f"{self._oncall_connectors_url}/", oncall_org_id) + response = self._delete(url=url) + response_data = response.json() + return ( + OnCallConnector( + response_data["oncall_org_id"], + response_data["backend"], + ), + response, + ) + + # Slack Connector + @property + def _slack_connectors_url(self) -> str: + return urljoin(self.api_base_url, "slack_team_connectors") + + def post_slack_connector(self, slack_id: str, backend: str) -> tuple[SlackConnector, requests.models.Response]: + d = {"slack_id": slack_id, "backend": backend} + response = self._post(url=self._slack_connectors_url, json=d) + response_data = response.json() + return ( + OnCallConnector( + response_data["oncall_org_id"], + response_data["backend"], + ), + response, + ) + + def get_slack_connector(self, slack_id: str) -> tuple[SlackConnector, requests.models.Response]: + url = urljoin(f"{self._slack_connectors_url}/", slack_id) + response = self._get(url=url) + response_data = response.json() + return ( + SlackConnector( + response_data["slack_id"], + response_data["backend"], + ), + response, + ) + + def delete_slack_connector(self, slack_id: str) -> requests.models.Response: + url = urljoin(f"{self._slack_connectors_url}/", slack_id) + response = self._delete(url=url) + return response + + def _get(self, url, params=None, **kwargs) -> requests.models.Response: + kwargs["params"] = params + response = self._call_api(method=requests.get, url=url, **kwargs) + return response + + def _post(self, url, data=None, json=None, **kwargs) -> requests.models.Response: + kwargs["data"] = data + kwargs["json"] = json + response = self._call_api(method=requests.post, url=url, **kwargs) + return response + + def _delete(self, url, **kwargs) -> requests.models.Response: + response = self._call_api(method=requests.delete, url=url, **kwargs) + return response + + def _call_api(self, method, url, **kwargs) -> requests.models.Response: + kwargs["headers"] = self._headers | kwargs.get("headers", {}) + response = method(url, **kwargs) + self._check_response(response) + return response + + @property + def _headers(self) -> dict: + return { + "User-Agent": settings.GRAFANA_COM_USER_AGENT, + "Authorization": f"Bearer {self.api_token}", + "Content-Type": "application/json", + } + + @classmethod + def _check_response(cls, response: requests.models.Response): + if response.status_code not in [200, 201, 202, 204]: + err_msg = cls._get_error_msg_from_response(response) + if 400 <= response.status_code < 500: + print(1) + err_msg = "%s Client Error: %s for url: %s" % (response.status_code, err_msg, response.url) + + elif 500 <= response.status_code < 600: + print(2) + err_msg = "%s Server Error: %s for url: %s" % (response.status_code, err_msg, response.url) + print(err_msg) + raise requests.exceptions.HTTPError(err_msg, response=response) + + @classmethod + def _get_error_msg_from_response(cls, response: requests.models.Response) -> str: + error_msg = "" + try: + error_msg = response.json()["message"] + except (json.JSONDecodeError, KeyError): + error_msg = response.text if response.text else response.reason + return error_msg diff --git a/engine/common/oncall_gateway/tasks.py b/engine/common/oncall_gateway/tasks.py new file mode 100644 index 0000000000..ecc1584137 --- /dev/null +++ b/engine/common/oncall_gateway/tasks.py @@ -0,0 +1,96 @@ +import requests +from celery.utils.log import get_task_logger +from django.conf import settings + +from common.custom_celery_tasks import shared_dedicated_queue_retry_task + +from .oncall_gateway_client import OnCallGatewayAPIClient + +task_logger = get_task_logger(__name__) + + +@shared_dedicated_queue_retry_task( + autoretry_for=(Exception,), + retry_backoff=True, + max_retries=None, +) +def create_oncall_connector_async(oncall_org_id, backend): + client = OnCallGatewayAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN) + try: + client.post_oncall_connector(oncall_org_id, backend) + except requests.exceptions.HTTPError as http_exc: + # TODO: decide which http codes to retry + print(http_exc.response) + if http_exc.response.status_code == 409: + task_logger.error( + f"Failed to create OnCallConnector oncall_org_id={oncall_org_id} backend={backend} exc={http_exc}" + ) + else: + raise http_exc + except Exception as e: + task_logger.error(f"Failed to create OnCallConnector oncall_org_id={oncall_org_id} backend={backend} exc={e}") + raise e + + +@shared_dedicated_queue_retry_task( + autoretry_for=(Exception,), + retry_backoff=True, + max_retries=None, +) +def delete_oncall_connector_async(oncall_org_id): + client = OnCallGatewayAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN) + try: + client.delete_slack_connector(oncall_org_id) + except requests.exceptions.HTTPError as http_exc: + if http_exc.response.status_code == 404: + # 404 indicates than resourse was deleted already + return + else: + task_logger.error(f"Failed to delete OnCallConnector oncall_org_id={oncall_org_id} exc={http_exc}") + raise http_exc + except Exception as e: + task_logger.error(f"Failed to delete OnCallConnector oncall_org_id={oncall_org_id} exc={e}") + raise e + + +@shared_dedicated_queue_retry_task( + autoretry_for=(Exception,), + retry_backoff=True, + max_retries=None, +) +def create_slack_connector_async(slack_id, backend): + client = OnCallGatewayAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN) + try: + client.post_slack_connector(slack_id, backend) + except requests.exceptions.HTTPError as http_exc: + # TODO: decide which http codes to retry + if http_exc.response.status_code == 409: + task_logger.error( + f"Failed to create SlackConnector oncall_org_id={slack_id} backend={backend} exc={http_exc}" + ) + else: + raise http_exc + except Exception as e: + task_logger.error(f"Failed to create SlackConnector slack_id={slack_id} backend={backend} exc={e}") + raise e + + +@shared_dedicated_queue_retry_task( + autoretry_for=(Exception,), + retry_backoff=True, + max_retries=None, +) +def delete_slack_connector_async(slack_id): + client = OnCallGatewayAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN) + try: + client.delete_slack_connector(slack_id) + except requests.exceptions.HTTPError as http_exc: + if http_exc.response.status_code == 404: + # 404 indicates than resourse was deleted already + return + else: + task_logger.error(f"Failed to delete OnCallConnector slack_id={slack_id} exc={http_exc}") + raise http_exc + except Exception as e: + task_logger.error(f"Failed to delete OnCallConnector slack_id={slack_id} exc={e}") + raise e diff --git a/engine/common/oncall_gateway/utils.py b/engine/common/oncall_gateway/utils.py new file mode 100644 index 0000000000..28802e150b --- /dev/null +++ b/engine/common/oncall_gateway/utils.py @@ -0,0 +1,40 @@ +import logging + +import requests +from django.conf import settings + +from .oncall_gateway_client import OnCallGatewayAPIClient +from .tasks import create_oncall_connector_async, create_slack_connector_async + +logger = logging.getLogger(__name__) + + +def create_oncall_connector(oncall_org_id: str, backend: str): + client = OnCallGatewayAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN) + try: + client.post_oncall_connector(oncall_org_id, backend) + except Exception as e: + logger.error(f"Failed to create_oncall_connector oncall_org_id={oncall_org_id} backend={backend} exc={e}") + create_oncall_connector_async.apply_async((oncall_org_id, backend), countdown=2) + + +def check_slack_installation_backend(slack_id: str, backend: str) -> bool: + client = OnCallGatewayAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN) + try: + slack_connector, _ = client.get_slack_connector(slack_id) + if slack_connector.backend == backend: + return True + else: + return False + except requests.exceptions.HTTPError as http_exc: + if http_exc.response.status_code == 404: + return True + + +def create_slack_connector(slack_id: str, backend: str): + client = OnCallGatewayAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN) + try: + client.post_slack_connector(slack_id, backend) + except Exception as e: + logger.error(f"Failed to create_oncall_connector slack_id={slack_id} backend={backend} exc={e}") + create_slack_connector_async.apply_async((slack_id, backend), countdown=2) diff --git a/engine/settings/base.py b/engine/settings/base.py index b673f7793b..2164ea808f 100644 --- a/engine/settings/base.py +++ b/engine/settings/base.py @@ -53,6 +53,7 @@ FEATURE_EMAIL_INTEGRATION_ENABLED = getenv_boolean("FEATURE_EMAIL_INTEGRATION_ENABLED", default=True) FEATURE_SLACK_INTEGRATION_ENABLED = getenv_boolean("FEATURE_SLACK_INTEGRATION_ENABLED", default=True) FEATURE_WEB_SCHEDULES_ENABLED = getenv_boolean("FEATURE_WEB_SCHEDULES_ENABLED", default=False) +FEATURE_MULTIREGION_ENABLED = getenv_boolean("FEATURE_MULTIREGION_ENABLED", default=False) GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED = getenv_boolean("GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED", default=True) GRAFANA_CLOUD_NOTIFICATIONS_ENABLED = getenv_boolean("GRAFANA_CLOUD_NOTIFICATIONS_ENABLED", default=True) @@ -73,6 +74,11 @@ # Outgoing webhook settings DANGEROUS_WEBHOOKS_ENABLED = getenv_boolean("DANGEROUS_WEBHOOKS_ENABLED", default=False) +# Multiregion settings +ONCALL_GATEWAY_URL = os.environ.get("ONCALL_GATEWAY_URL") +ONCALL_GATEWAY_API_TOKEN = os.environ.get("ONCALL_GATEWAY_API_TOKEN") +ONCALL_BACKEND_REGION = os.environ.get("ONCALL_BACKEND_REGION") + # Database class DatabaseTypes: From e459c70a41537939015c3018491c7f300ac8328d Mon Sep 17 00:00:00 2001 From: Innokentii Konstantinov Date: Tue, 8 Nov 2022 16:27:36 +0800 Subject: [PATCH 30/49] Fix oncallGatewayClient --- engine/common/oncall_gateway/oncall_gateway_client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/engine/common/oncall_gateway/oncall_gateway_client.py b/engine/common/oncall_gateway/oncall_gateway_client.py index 7682c9ceb5..63d4861363 100644 --- a/engine/common/oncall_gateway/oncall_gateway_client.py +++ b/engine/common/oncall_gateway/oncall_gateway_client.py @@ -22,7 +22,7 @@ class SlackConnector: SlackConnector represents connection between slack team with installed oncall app and oncall-gateway """ - slack_id: str + slack_team_id: str backend: str @@ -67,7 +67,7 @@ def _slack_connectors_url(self) -> str: return urljoin(self.api_base_url, "slack_team_connectors") def post_slack_connector(self, slack_id: str, backend: str) -> tuple[SlackConnector, requests.models.Response]: - d = {"slack_id": slack_id, "backend": backend} + d = {"slack_team_id": slack_id, "backend": backend} response = self._post(url=self._slack_connectors_url, json=d) response_data = response.json() return ( @@ -84,7 +84,7 @@ def get_slack_connector(self, slack_id: str) -> tuple[SlackConnector, requests.m response_data = response.json() return ( SlackConnector( - response_data["slack_id"], + response_data["slack_team_id"], response_data["backend"], ), response, From e7641b7539f76525320ccedbc2d806535c6cfcfe Mon Sep 17 00:00:00 2001 From: Ildar Iskhakov Date: Tue, 8 Nov 2022 17:10:26 +0800 Subject: [PATCH 31/49] Fix helm values.yaml, bump helm version (#792) --- helm/oncall/Chart.yaml | 4 ++-- helm/oncall/values.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/helm/oncall/Chart.yaml b/helm/oncall/Chart.yaml index c6c8fc6e69..791590743b 100644 --- a/helm/oncall/Chart.yaml +++ b/helm/oncall/Chart.yaml @@ -8,13 +8,13 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 1.0.9 +version: 1.0.10 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "v1.0.50" +appVersion: "v1.0.51" dependencies: - name: cert-manager version: v1.8.0 diff --git a/helm/oncall/values.yaml b/helm/oncall/values.yaml index 3de0e12225..dfc71d388c 100644 --- a/helm/oncall/values.yaml +++ b/helm/oncall/values.yaml @@ -221,7 +221,7 @@ externalPostgresql: # use an existing secret for the database password existingSecret: "" # the key in the secret containing the database password - passwordKey: password + passwordKey: # RabbitMQ is included into this release for the convenience. # It is recommended to host it separately from this release From 131b7e7c5a2080eaddf02b8e1f15e242f034394e Mon Sep 17 00:00:00 2001 From: Vadim Stepanov Date: Tue, 8 Nov 2022 12:00:42 +0000 Subject: [PATCH 32/49] helm: allow empty smtp password, bump chart version (#797) * helm: allow empty smtp password * don't create the secret in case it's empty * helm: set EMAIL_USE_TLS to true by default --- helm/oncall/Chart.yaml | 2 +- helm/oncall/templates/_env.tpl | 3 ++- helm/oncall/templates/secrets.yaml | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/helm/oncall/Chart.yaml b/helm/oncall/Chart.yaml index 791590743b..1c9b491b03 100644 --- a/helm/oncall/Chart.yaml +++ b/helm/oncall/Chart.yaml @@ -8,7 +8,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 1.0.10 +version: 1.0.11 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/helm/oncall/templates/_env.tpl b/helm/oncall/templates/_env.tpl index cdff2fd85e..1620523a2c 100644 --- a/helm/oncall/templates/_env.tpl +++ b/helm/oncall/templates/_env.tpl @@ -364,8 +364,9 @@ rabbitmq-password secretKeyRef: name: {{ include "oncall.fullname" . }}-smtp key: smtp-password + optional: true - name: EMAIL_USE_TLS - value: {{ .Values.oncall.smtp.tls | toString | title | quote }} + value: {{ .Values.oncall.smtp.tls | default true | toString | title | quote }} - name: EMAIL_FROM_ADDRESS value: {{ .Values.oncall.smtp.fromEmail | quote }} {{- else -}} diff --git a/helm/oncall/templates/secrets.yaml b/helm/oncall/templates/secrets.yaml index dfd7cdb257..07fa1351b7 100644 --- a/helm/oncall/templates/secrets.yaml +++ b/helm/oncall/templates/secrets.yaml @@ -41,14 +41,14 @@ data: redis-password: {{ required "externalRedis.password is required if not redis.enabled" .Values.externalRedis.password | b64enc | quote }} {{- end }} --- -{{ if .Values.oncall.smtp.enabled -}} +{{ if and .Values.oncall.smtp.enabled .Values.oncall.smtp.password -}} apiVersion: v1 kind: Secret metadata: name: {{ include "oncall.fullname" . }}-smtp type: Opaque data: - smtp-password: {{ required "oncall.smtp.password is required if oncall.smtp.enabled" .Values.oncall.smtp.password | b64enc | quote }} + smtp-password: {{ .Values.oncall.smtp.password | b64enc | quote }} {{- end }} --- {{ if and (not .Values.postgresql.enabled) (eq .Values.database.type "postgresql") (not .Values.externalPostgresql.existingSecret) -}} From 034459c46a278e0b91eaa87d5fc32dfee630af4f Mon Sep 17 00:00:00 2001 From: Maxim Date: Tue, 8 Nov 2022 13:16:36 +0000 Subject: [PATCH 33/49] anyway show rotations form on error, preload user profile to use proper tz --- .../src/containers/Rotation/RotationTutorial.tsx | 2 +- .../src/containers/RotationForm/RotationForm.tsx | 6 +++--- .../src/containers/Rotations/Rotations.tsx | 2 +- grafana-plugin/src/models/user/user.ts | 2 ++ grafana-plugin/src/state/rootBaseStore.ts | 12 ++++++++---- 5 files changed, 15 insertions(+), 9 deletions(-) diff --git a/grafana-plugin/src/containers/Rotation/RotationTutorial.tsx b/grafana-plugin/src/containers/Rotation/RotationTutorial.tsx index 3c1c0475da..e9042ec9d8 100644 --- a/grafana-plugin/src/containers/Rotation/RotationTutorial.tsx +++ b/grafana-plugin/src/containers/Rotation/RotationTutorial.tsx @@ -52,7 +52,7 @@ const RotationTutorial: FC = (props) => { const base = 60 * 60 * 24 * days; return firstShiftOffset / base; - }, [events]); + }, [events, startMoment]); return (
diff --git a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx index da364a4440..3266eceb2a 100644 --- a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx +++ b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx @@ -17,7 +17,7 @@ import { Schedule, Shift } from 'models/schedule/schedule.types'; import { getTzOffsetString } from 'models/timezone/timezone.helpers'; import { Timezone } from 'models/timezone/timezone.types'; import { User } from 'models/user/user.types'; -import { getDateTime, getUTCString } from 'pages/schedule/Schedule.helpers'; +import { getDateTime, getStartOfWeek, getUTCString } from 'pages/schedule/Schedule.helpers'; import { SelectOption } from 'state/types'; import { useStore } from 'state/useStore'; import { getCoords, waitForElement } from 'utils/DOM'; @@ -58,7 +58,7 @@ const RotationForm: FC = observer((props) => { onDelete, layerPriority, shiftId, - shiftMoment = dayjs().startOf('isoWeek'), + shiftMoment = getStartOfWeek(currentTimezone), shiftColor = '#3D71D9', } = props; @@ -195,7 +195,7 @@ const RotationForm: FC = observer((props) => { const updatePreview = () => { store.scheduleStore .updateRotationPreview(scheduleId, shiftId, getFromString(startMoment), false, params) - .then(() => { + .finally(() => { setIsOpen(true); }); }; diff --git a/grafana-plugin/src/containers/Rotations/Rotations.tsx b/grafana-plugin/src/containers/Rotations/Rotations.tsx index 0caedc09fc..3bc4512140 100644 --- a/grafana-plugin/src/containers/Rotations/Rotations.tsx +++ b/grafana-plugin/src/containers/Rotations/Rotations.tsx @@ -187,7 +187,7 @@ class Rotations extends Component {
{ - this.handleAddLayer(nextPriority); + this.handleAddLayer(nextPriority, startMoment); }} > + Add rotations layer diff --git a/grafana-plugin/src/models/user/user.ts b/grafana-plugin/src/models/user/user.ts index 60972d80f9..d790ed8eda 100644 --- a/grafana-plugin/src/models/user/user.ts +++ b/grafana-plugin/src/models/user/user.ts @@ -65,6 +65,8 @@ export class UserStore extends BaseStore { [response.pk]: { ...response, timezone }, }; + this.rootStore.currentTimezone = timezone; + this.currentUserPk = response.pk; } diff --git a/grafana-plugin/src/state/rootBaseStore.ts b/grafana-plugin/src/state/rootBaseStore.ts index d300a8ac04..ff884dd050 100644 --- a/grafana-plugin/src/state/rootBaseStore.ts +++ b/grafana-plugin/src/state/rootBaseStore.ts @@ -125,7 +125,6 @@ export class RootBaseStore { // stores async updateBasicData() { - this.userStore.loadCurrentUser(); this.teamStore.loadCurrentTeam(); this.grafanaTeamStore.updateItems(); this.updateFeatures(); @@ -154,7 +153,14 @@ export class RootBaseStore { } this.backendVersion = get_sync_response.version; this.backendLicense = get_sync_response.license; - this.appLoading = false; + + try { + await this.userStore.loadCurrentUser(); + + this.appLoading = false; + } catch (e) { + this.initializationError = 'OnCall was not able to initialize current user'; + } } handleSyncException(e: any) { @@ -208,8 +214,6 @@ export class RootBaseStore { async setupPlugin(meta: AppPluginMeta) { this.resetStatusToDefault(); - console.log(meta); - if (!meta.jsonData?.onCallApiUrl) { this.pluginIsInitialized = false; return; From 1e60fe299f692b2c7df24388ed2a97938b07df56 Mon Sep 17 00:00:00 2001 From: Maxim Date: Tue, 8 Nov 2022 15:01:31 +0000 Subject: [PATCH 34/49] add with permission control --- .../src/containers/RotationForm/RotationForm.tsx | 4 ++-- .../src/containers/Rotations/Rotations.tsx | 15 +++++++++++---- .../containers/Rotations/ScheduleOverrides.tsx | 10 +++++++--- grafana-plugin/src/models/base_store.ts | 4 ++-- grafana-plugin/src/pages/schedule/Schedule.tsx | 10 ++++++++-- grafana-plugin/src/pages/schedules/Schedules.tsx | 8 +++++--- 6 files changed, 35 insertions(+), 16 deletions(-) diff --git a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx index 3266eceb2a..352af9f953 100644 --- a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx +++ b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx @@ -154,7 +154,7 @@ const RotationForm: FC = observer((props) => { rolling_users: userGroups, interval: repeatEveryValue, frequency: repeatEveryPeriod, - by_day: (repeatEveryPeriod === 0 && repeatEveryValue === 1) || repeatEveryPeriod === 1 ? selectedDays : null, + by_day: repeatEveryPeriod === 0 || repeatEveryPeriod === 1 ? selectedDays : null, priority_level: shiftId === 'new' ? layerPriority : shift?.priority_level, }), [ @@ -343,7 +343,7 @@ const RotationForm: FC = observer((props) => { /> - {((repeatEveryPeriod === 0 && repeatEveryValue === 1) || repeatEveryPeriod === 1) && ( + {(repeatEveryPeriod === 0 || repeatEveryPeriod === 1) && ( {
{disabled ? ( - + + + ) : ( {
{ + if (disabled) { + return; + } this.handleAddLayer(nextPriority, startMoment); }} > - + Add rotations layer + + Add rotations layer
)}
diff --git a/grafana-plugin/src/containers/Rotations/ScheduleOverrides.tsx b/grafana-plugin/src/containers/Rotations/ScheduleOverrides.tsx index a0eff2859c..c0cc6a85f6 100644 --- a/grafana-plugin/src/containers/Rotations/ScheduleOverrides.tsx +++ b/grafana-plugin/src/containers/Rotations/ScheduleOverrides.tsx @@ -10,10 +10,12 @@ import Text from 'components/Text/Text'; import TimelineMarks from 'components/TimelineMarks/TimelineMarks'; import Rotation from 'containers/Rotation/Rotation'; import ScheduleOverrideForm from 'containers/RotationForm/ScheduleOverrideForm'; +import { WithPermissionControl } from 'containers/WithPermissionControl/WithPermissionControl'; import { getOverrideColor, getOverridesFromStore } from 'models/schedule/schedule.helpers'; import { Schedule, Shift, ShiftEvents } from 'models/schedule/schedule.types'; import { Timezone } from 'models/timezone/timezone.types'; import { WithStoreProps } from 'state/types'; +import { UserAction } from 'state/userAction'; import { withMobXProviderContext } from 'state/withStore'; import { DEFAULT_TRANSITION_TIMEOUT } from './Rotations.config'; @@ -78,9 +80,11 @@ class ScheduleOverrides extends Component - + + +
diff --git a/grafana-plugin/src/models/base_store.ts b/grafana-plugin/src/models/base_store.ts index f20c4d6b68..766cb69826 100644 --- a/grafana-plugin/src/models/base_store.ts +++ b/grafana-plugin/src/models/base_store.ts @@ -3,7 +3,7 @@ import { action } from 'mobx'; import { makeRequest } from 'network'; import { RootStore } from 'state'; -import { openErrorNotification } from 'utils'; +import { openWarningNotification } from 'utils'; export default class BaseStore { protected rootStore: RootStore; @@ -26,7 +26,7 @@ export default class BaseStore { : Object.keys(payload) .map((key) => `${sentenceCase(key)}: ${payload[key]}`) .join('\n'); - openErrorNotification(text); + openWarningNotification(text); } throw error; diff --git a/grafana-plugin/src/pages/schedule/Schedule.tsx b/grafana-plugin/src/pages/schedule/Schedule.tsx index 38287d1332..779d7bdbb5 100644 --- a/grafana-plugin/src/pages/schedule/Schedule.tsx +++ b/grafana-plugin/src/pages/schedule/Schedule.tsx @@ -2,7 +2,7 @@ import React from 'react'; import { AppRootProps } from '@grafana/data'; import { getLocationSrv } from '@grafana/runtime'; -import { Button, HorizontalGroup, VerticalGroup, IconButton, ToolbarButton, Icon, Modal } from '@grafana/ui'; +import { Button, HorizontalGroup, Icon, IconButton, Modal, ToolbarButton, VerticalGroup } from '@grafana/ui'; import cn from 'classnames/bind'; import dayjs from 'dayjs'; import { omit } from 'lodash-es'; @@ -22,11 +22,13 @@ import UsersTimezones from 'containers/UsersTimezones/UsersTimezones'; import { Schedule, ScheduleType, Shift } from 'models/schedule/schedule.types'; import { Timezone } from 'models/timezone/timezone.types'; import { WithStoreProps } from 'state/types'; +import { UserAction } from 'state/userAction'; import { withMobXProviderContext } from 'state/withStore'; import { getStartOfWeek } from './Schedule.helpers'; import styles from './Schedule.module.css'; + const cx = cn.bind(styles); interface SchedulePageProps extends AppRootProps, WithStoreProps {} @@ -101,7 +103,11 @@ class SchedulePage extends React.Component const users = store.userStore.getSearchResult().results; const schedule = scheduleStore.items[scheduleId]; - const disabled = schedule?.type !== ScheduleType.API || shiftIdToShowRotationForm || shiftIdToShowOverridesForm; + const disabled = + !store.isUserActionAllowed(UserAction.UpdateSchedules) || + schedule?.type !== ScheduleType.API || + shiftIdToShowRotationForm || + shiftIdToShowOverridesForm; return ( <> diff --git a/grafana-plugin/src/pages/schedules/Schedules.tsx b/grafana-plugin/src/pages/schedules/Schedules.tsx index 1e45e43aca..ca5a373a7f 100644 --- a/grafana-plugin/src/pages/schedules/Schedules.tsx +++ b/grafana-plugin/src/pages/schedules/Schedules.tsx @@ -146,9 +146,11 @@ class SchedulesPage extends React.Component )} - + + +
Date: Mon, 7 Nov 2022 17:38:10 -0300 Subject: [PATCH 35/49] Ensure start date matches by_day selection --- .../schedules/models/custom_on_call_shift.py | 9 ++++ .../tests/test_custom_on_call_shift.py | 49 +++++++++++++++++++ 2 files changed, 58 insertions(+) diff --git a/engine/apps/schedules/models/custom_on_call_shift.py b/engine/apps/schedules/models/custom_on_call_shift.py index c285dba9f9..96cc2db2bc 100644 --- a/engine/apps/schedules/models/custom_on_call_shift.py +++ b/engine/apps/schedules/models/custom_on_call_shift.py @@ -121,6 +121,7 @@ class CustomOnCallShift(models.Model): SATURDAY: "SA", SUNDAY: "SU", } + ICAL_WEEKDAY_REVERSE_MAP = {v: k for k, v in ICAL_WEEKDAY_MAP.items()} WEB_WEEKDAY_MAP = { "MO": "Monday", @@ -365,6 +366,14 @@ def convert_to_ical(self, time_zone="UTC", allow_empty_users=False): else: start = self.get_rotation_date(event_ical) + # Make sure we respect the selected days if any when defining start date + if self.frequency is not None and self.by_day: + start_day = CustomOnCallShift.ICAL_WEEKDAY_MAP[start.weekday()] + if start_day not in self.by_day: + expected_start_day = min(CustomOnCallShift.ICAL_WEEKDAY_REVERSE_MAP[d] for d in self.by_day) + delta = (expected_start_day - start.weekday()) % 7 + start = start + timezone.timedelta(days=delta) + if self.frequency == CustomOnCallShift.FREQUENCY_DAILY and self.by_day: result = self._daily_by_day_to_ical(time_zone, start, users_queue) all_rotation_checked = True diff --git a/engine/apps/schedules/tests/test_custom_on_call_shift.py b/engine/apps/schedules/tests/test_custom_on_call_shift.py index 8a746169c9..3da17a6612 100644 --- a/engine/apps/schedules/tests/test_custom_on_call_shift.py +++ b/engine/apps/schedules/tests/test_custom_on_call_shift.py @@ -386,6 +386,55 @@ def test_rolling_users_event_daily_by_day( assert len(users_on_call) == 0 +@pytest.mark.django_db +def test_rolling_users_event_daily_by_day_off_start(make_organization_and_user, make_on_call_shift, make_schedule): + organization, user_1 = make_organization_and_user() + + schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb) + now = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) + current_week_monday = now - timezone.timedelta(days=now.weekday()) + + # WE, FR + weekdays = [2, 4] + by_day = [CustomOnCallShift.ICAL_WEEKDAY_MAP[day] for day in weekdays] + data = { + "priority_level": 1, + "start": current_week_monday, + "rotation_start": current_week_monday, + "duration": timezone.timedelta(seconds=10800), + "frequency": CustomOnCallShift.FREQUENCY_DAILY, + "interval": 1, + "by_day": by_day, + "schedule": schedule, + } + rolling_users = [[user_1]] + on_call_shift = make_on_call_shift( + organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data + ) + on_call_shift.add_rolling_users(rolling_users) + + date = current_week_monday + timezone.timedelta(minutes=5) + + user_1_on_call_dates = [date + timezone.timedelta(days=2), date + timezone.timedelta(days=4)] + nobody_on_call_dates = [ + date, # MO + date + timezone.timedelta(days=1), # TU + date + timezone.timedelta(days=3), # TH + date + timezone.timedelta(days=5), # SA + date + timezone.timedelta(days=6), # SU + date + timezone.timedelta(days=7), # MO + ] + + for dt in user_1_on_call_dates: + users_on_call = list_users_to_notify_from_ical(schedule, dt) + assert len(users_on_call) == 1 + assert user_1 in users_on_call + + for dt in nobody_on_call_dates: + users_on_call = list_users_to_notify_from_ical(schedule, dt) + assert len(users_on_call) == 0 + + @pytest.mark.django_db def test_rolling_users_event_with_interval_daily_by_day( make_organization_and_user, make_user_for_organization, make_on_call_shift, make_schedule From cd39b67ef19386308790d8bba9c06b405ddf0bb7 Mon Sep 17 00:00:00 2001 From: Vadim Stepanov Date: Tue, 8 Nov 2022 15:41:12 +0000 Subject: [PATCH 36/49] live settings: treat empty string as empty value (#805) --- engine/apps/base/utils.py | 2 +- grafana-plugin/src/pages/livesettings/LiveSettings.helpers.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/engine/apps/base/utils.py b/engine/apps/base/utils.py index f8ad633f1a..8dc1f5faa1 100644 --- a/engine/apps/base/utils.py +++ b/engine/apps/base/utils.py @@ -38,7 +38,7 @@ def __init__(self, live_setting): def get_error(self): check_fn_name = f"_check_{self.live_setting.name.lower()}" - if self.live_setting.value is None: + if self.live_setting.value in (None, ""): return "Empty" # skip validation if there's no handler for it diff --git a/grafana-plugin/src/pages/livesettings/LiveSettings.helpers.ts b/grafana-plugin/src/pages/livesettings/LiveSettings.helpers.ts index d7edb1d007..d910a2c089 100644 --- a/grafana-plugin/src/pages/livesettings/LiveSettings.helpers.ts +++ b/grafana-plugin/src/pages/livesettings/LiveSettings.helpers.ts @@ -1,7 +1,7 @@ import { NULL_VALUE } from './LiveSettings.config'; export function normalizeValue(value: string) { - if (value === null) { + if (value === null || value === '') { return NULL_VALUE; } From 1002d94787d8453a6c6ee59d7d911a7e99b6344e Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 8 Nov 2022 15:40:21 -0700 Subject: [PATCH 37/49] Add content type header, remove host --- engine/apps/user_management/middlewares.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/engine/apps/user_management/middlewares.py b/engine/apps/user_management/middlewares.py index ff6aab569a..c23a401c39 100644 --- a/engine/apps/user_management/middlewares.py +++ b/engine/apps/user_management/middlewares.py @@ -29,6 +29,9 @@ def process_exception(self, request, exception): headers = dict( (regex.sub("", header), value) for (header, value) in request.META.items() if header.startswith("HTTP_") ) + headers.pop("HOST") + if request.META["CONTENT_TYPE"]: + headers["CONTENT_TYPE"] = request.META["CONTENT_TYPE"] response = self.make_request(request.method, url, headers, request.body) return HttpResponse(response.content, status=response.status_code) From 6b86c4ceffcb6a0f74037f7c2de29c7b78ade2dd Mon Sep 17 00:00:00 2001 From: Michael Derynck Date: Tue, 8 Nov 2022 15:46:38 -0700 Subject: [PATCH 38/49] Avoid key error --- engine/apps/user_management/middlewares.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/engine/apps/user_management/middlewares.py b/engine/apps/user_management/middlewares.py index c23a401c39..30d2bba7a9 100644 --- a/engine/apps/user_management/middlewares.py +++ b/engine/apps/user_management/middlewares.py @@ -29,7 +29,7 @@ def process_exception(self, request, exception): headers = dict( (regex.sub("", header), value) for (header, value) in request.META.items() if header.startswith("HTTP_") ) - headers.pop("HOST") + headers.pop("HOST", None) if request.META["CONTENT_TYPE"]: headers["CONTENT_TYPE"] = request.META["CONTENT_TYPE"] From 1177e44cc7efb15ad0439915321ed112138f24cb Mon Sep 17 00:00:00 2001 From: Joey Orlando Date: Wed, 9 Nov 2022 07:21:33 +0100 Subject: [PATCH 39/49] enterprise dev changes + few other small changes (#802) * support enterprise development in docker * fix flaky mysql healthcheck command I was getting the mysql_to_create_grafana_db and oncall_db_migration prematurely starting up this commit changes the healthcheck used here to be the same as what is used in docker-compose-mysql-rabbitmq.yml * upgrade docker-compose config files to 3.9 3.8 does not actually support the "long form" version of depends_on see here for more info https://stackoverflow.com/a/54249757 https://docs.docker.com/compose/compose-file/compose-file-v3/#depends_on * add make init command and update documentation * cleanup gitignore files --- .gitignore | 37 +------------------------------ Makefile | 5 +++-- dev/README.md | 6 ++++- docker-compose-developer.yml | 15 ++++++++++--- docker-compose-mysql-rabbitmq.yml | 2 +- docker-compose.yml | 2 +- engine/.gitignore | 5 +++++ engine/Dockerfile | 5 +++-- grafana-plugin/.gitignore | 23 +++---------------- 9 files changed, 34 insertions(+), 66 deletions(-) create mode 100644 engine/.gitignore diff --git a/.gitignore b/.gitignore index 320b022a09..0e47d8afb4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,43 +1,8 @@ -# Backend -*/db.sqlite3 -engine/*.db *.pyc venv .python-version + .vscode -dump.rdb .idea -jupiter_playbooks/* -engine/reports/*.csv -engine/jupiter_playbooks/* - -# Frontend dependencies -node_modules -/.pnp -.pnp.js - -# testing -/coverage - -# production -/build - -# misc .DS_Store -.swp .env - -npm-debug.log* -yarn-debug.log* -yarn-error.log* - -scripts/gcom.token -scripts/gcom_grafana.token -scripts/gcom_raintank.token - -engine/extensions/ - -grafana-plugin/frontend_enterprise - -uwsgi-local.ini -celerybeat-schedule diff --git a/Makefile b/Makefile index c2746ea54f..fb08b79af5 100644 --- a/Makefile +++ b/Makefile @@ -61,6 +61,9 @@ ifeq ($(DB),$(SQLITE_PROFILE)) fi endif + $(call run_docker_compose_command,up --remove-orphans -d) + +init: # if the oncall UI is to be run in docker we should do an initial build of the frontend code # this makes sure that it will be available when the grafana container starts up without the need to # restart the grafana container initially @@ -68,8 +71,6 @@ ifeq ($(findstring $(UI_PROFILE),$(COMPOSE_PROFILES)),$(UI_PROFILE)) cd grafana-plugin && yarn install && yarn build:dev endif - $(call run_docker_compose_command,up --remove-orphans -d) - stop: $(call run_docker_compose_command,down) diff --git a/dev/README.md b/dev/README.md index 512f3cf112..eb7aed700c 100644 --- a/dev/README.md +++ b/dev/README.md @@ -22,7 +22,7 @@ Related: [How to develop integrations](/engine/config_integrations/README.md) By default everything runs inside Docker. These options can be modified via the [`COMPOSE_PROFILES`](#compose_profiles) environment variable. 1. Firstly, ensure that you have `docker` [installed](https://docs.docker.com/get-docker/) and running on your machine. **NOTE**: the `docker-compose-developer.yml` file uses some syntax/features that are only supported by Docker Compose v2. For insturctions on how to enable this (if you haven't already done so), see [here](https://www.docker.com/blog/announcing-compose-v2-general-availability/). -2. Run `make start`. By default this will run everything in Docker, using SQLite as the database and Redis as the message broker/cache. See [Running in Docker](#running-in-docker) below for more details on how to swap out/disable which components are run in Docker. +2. Run `make init start`. By default this will run everything in Docker, using SQLite as the database and Redis as the message broker/cache. See [Running in Docker](#running-in-docker) below for more details on how to swap out/disable which components are run in Docker. 3. Open Grafana in a browser [here](http://localhost:3000/plugins/grafana-oncall-app) (login: `oncall`, password: `oncall`). 4. You should now see the OnCall plugin configuration page. Fill out the configuration options as follows: @@ -33,6 +33,8 @@ By default everything runs inside Docker. These options can be modified via the 5. Enjoy! Check our [OSS docs](https://grafana.com/docs/grafana-cloud/oncall/open-source/) if you want to set up Slack, Telegram, Twilio or SMS/calls through Grafana Cloud. 6. (Optional) Install `pre-commit` hooks by running `make install-precommit-hook` +**Note**: on subsequent startups you can simply run `make start`, this is a bit faster because it skips the frontend build step. + ### `COMPOSE_PROFILES` This configuration option represents a comma-separated list of [`docker-compose` profiles](https://docs.docker.com/compose/profiles/). It allows you to swap-out, or disable, certain components in Docker. @@ -85,6 +87,8 @@ By default everything runs inside Docker. If you would like to run the backend s See [`COMPOSE_PROFILES`](#compose_profiles) for more information on what this option is and how to configure it. ```bash +make init # build the frontend plugin code then run make start +make start # start all of the docker containers make stop # stop all of the docker containers make restart # restart all docker containers diff --git a/docker-compose-developer.yml b/docker-compose-developer.yml index 6b0d2ab378..0cc79491eb 100644 --- a/docker-compose-developer.yml +++ b/docker-compose-developer.yml @@ -1,15 +1,17 @@ -version: "3.8" +version: "3.9" x-labels: &oncall-labels - "com.grafana.oncall.env=dev" x-oncall-build: &oncall-build-args context: ./engine - target: dev + target: ${ONCALL_IMAGE_TARGET:-dev} labels: *oncall-labels x-oncall-volumes: &oncall-volumes - ./engine:/etc/app + # https://stackoverflow.com/a/60456034 + - ${ENTERPRISE_ENGINE:-/dev/null}:/etc/app/extensions/engine_enterprise - ./engine/oncall.db:/var/lib/oncall/oncall.db x-env-files: &oncall-env-files @@ -30,6 +32,8 @@ services: volumes: - ./grafana-plugin:/etc/app - /etc/app/node_modules + # https://stackoverflow.com/a/60456034 + - ${ENTERPRISE_FRONTEND:-/dev/null}:/etc/app/frontend_enterprise profiles: - oncall_ui @@ -170,7 +174,7 @@ services: memory: 500m cpus: "0.5" healthcheck: - test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + test: "mysql -uroot -pempty oncall_local_dev -e 'select 1'" timeout: 20s retries: 10 volumes: @@ -252,6 +256,11 @@ services: volumes: - grafanadata_dev:/var/lib/grafana - ./grafana-plugin:/var/lib/grafana/plugins/grafana-plugin + depends_on: + postgres: + condition: service_healthy + mysql: + condition: service_healthy profiles: - grafana diff --git a/docker-compose-mysql-rabbitmq.yml b/docker-compose-mysql-rabbitmq.yml index fdcd213cc4..9537c2f265 100644 --- a/docker-compose-mysql-rabbitmq.yml +++ b/docker-compose-mysql-rabbitmq.yml @@ -1,4 +1,4 @@ -version: "3.8" +version: "3.9" x-environment: &oncall-environment BASE_URL: $DOMAIN diff --git a/docker-compose.yml b/docker-compose.yml index e713499853..1557dafc75 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,4 @@ -version: "3.8" +version: "3.9" x-environment: &oncall-environment DATABASE_TYPE: sqlite3 diff --git a/engine/.gitignore b/engine/.gitignore new file mode 100644 index 0000000000..1193cff735 --- /dev/null +++ b/engine/.gitignore @@ -0,0 +1,5 @@ +requirements-enterprise.txt +extensions/ +uwsgi-local.ini +celerybeat-schedule +*.db diff --git a/engine/Dockerfile b/engine/Dockerfile index 6c7531143c..0367b53ab3 100644 --- a/engine/Dockerfile +++ b/engine/Dockerfile @@ -16,10 +16,11 @@ RUN DJANGO_SETTINGS_MODULE=settings.prod_without_db DATABASE_TYPE=sqlite3 DATABA RUN chown -R 1000:2000 /var/lib/oncall FROM base AS dev - -# these are needed for the django dbshell command RUN apk add sqlite mysql-client postgresql-client +FROM dev AS dev-enterprise +RUN pip install -r requirements-enterprise.txt + FROM base AS prod # This is required for prometheus_client to sync between uwsgi workers diff --git a/grafana-plugin/.gitignore b/grafana-plugin/.gitignore index cc1a03f405..5a3710439b 100644 --- a/grafana-plugin/.gitignore +++ b/grafana-plugin/.gitignore @@ -1,33 +1,16 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. - -# dependencies /node_modules -/.pnp -.pnp.js +.eslintcache # testing /coverage # production /dist -/ci - -# misc -.idea -.DS_Store -.env.local -.env.development.local -.env.test.local -.env.production.local - -grafana-plugin.yml - -# cache -.eslintcache npm-debug.log* yarn-debug.log* yarn-error.log* # This file is generated -grafana-plugin.yml \ No newline at end of file +grafana-plugin.yml +frontend_enterprise From eca469048f5e4a0f032b51e1ab05e50121657ea6 Mon Sep 17 00:00:00 2001 From: Joey Orlando Date: Wed, 9 Nov 2022 09:50:51 +0100 Subject: [PATCH 40/49] export MessagingBackends interface (#781) --- grafana-plugin/src/models/user/user.types.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/grafana-plugin/src/models/user/user.types.ts b/grafana-plugin/src/models/user/user.types.ts index 4d48e3d8f7..4c7b6fb896 100644 --- a/grafana-plugin/src/models/user/user.types.ts +++ b/grafana-plugin/src/models/user/user.types.ts @@ -8,6 +8,10 @@ export enum UserRole { VIEWER, } +export interface MessagingBackends { + [key: string]: any; +} + export interface User { pk: string; slack_login: string; @@ -26,9 +30,7 @@ export interface User { telegram_nick_name: string; telegram_chat_id: number; // TODO check if string }; - messaging_backends: { - [key: string]: any; - }; + messaging_backends: MessagingBackends; notification_chain_verbal: { default: string; important: string; From 5d3e98867f681464e702301637c2eaaf44464e7d Mon Sep 17 00:00:00 2001 From: Joey Orlando Date: Wed, 9 Nov 2022 11:13:38 +0100 Subject: [PATCH 41/49] specify "prod" as the docker build target locally, docker build works as expected. When not specifying a build target, it builds the last target specified in the Dockerfile (in this case "prod"). On GitHub actions this works properly as well. However, there seems to be something about the version of docker used on Drone that causes it to build all of the stages (and hence failing on enterprise-dev). Let's instead just be explicit about which build target to use for both drone and GitHub actions. --- .drone.yml | 14 ++++++++------ .github/workflows/ci.yml | 1 + 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.drone.yml b/.drone.yml index f7e1288f86..c553d0b86e 100644 --- a/.drone.yml +++ b/.drone.yml @@ -104,6 +104,7 @@ steps: repo: us.gcr.io/kubernetes-dev/oncall dockerfile: engine/Dockerfile context: engine/ + target: prod config: from_secret: gcr_admin depends_on: @@ -117,6 +118,7 @@ steps: repo: grafana/oncall dockerfile: engine/Dockerfile context: engine/ + target: prod password: from_secret: docker_password username: @@ -146,9 +148,9 @@ services: trigger: event: include: - - tag - - push - - pull_request + - tag + - push + - pull_request ref: include: - refs/heads/main @@ -196,7 +198,7 @@ steps: GRAFANA_API_KEY: from_secret: gcom_plugin_publisher_api_key commands: - - "curl -f -s -H \"Authorization: Bearer $${GRAFANA_API_KEY}\" -d \"download[any][url]=https://storage.googleapis.com/grafana-oncall-app/releases/grafana-oncall-app-${DRONE_TAG}.zip\" -d \"download[any][md5]=$$(curl -sL https://storage.googleapis.com/grafana-oncall-app/releases/grafana-oncall-app-${DRONE_TAG}.zip | md5sum | cut -d' ' -f1)\" -d url=https://github.com/grafana/oncall/grafana-plugin https://grafana.com/api/plugins" + - 'curl -f -s -H "Authorization: Bearer $${GRAFANA_API_KEY}" -d "download[any][url]=https://storage.googleapis.com/grafana-oncall-app/releases/grafana-oncall-app-${DRONE_TAG}.zip" -d "download[any][md5]=$$(curl -sL https://storage.googleapis.com/grafana-oncall-app/releases/grafana-oncall-app-${DRONE_TAG}.zip | md5sum | cut -d'' '' -f1)" -d url=https://github.com/grafana/oncall/grafana-plugin https://grafana.com/api/plugins' depends_on: - sign and package plugin @@ -229,6 +231,7 @@ steps: repo: grafana/oncall tags: ${DRONE_TAG}-amd64-linux dockerfile: engine/Dockerfile + target: prod context: engine/ password: from_secret: docker_password @@ -266,6 +269,7 @@ steps: repo: grafana/oncall tags: ${DRONE_TAG}-arm64-linux dockerfile: engine/Dockerfile + target: prod context: engine/ password: from_secret: docker_password @@ -396,5 +400,3 @@ name: drone_token --- kind: signature hmac: 8a060649c132677ba1b5693b5ac6c846c02f9a5bb645fe990b26a7ea42a0fb66 - -... diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f816a2e5a..a8eda99fab 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -152,5 +152,6 @@ jobs: context: ./engine file: ./engine/Dockerfile push: false + target: prod - name: Image digest run: echo ${{ steps.docker_build.outputs.digest }} From 82b74e115c2de8ce8f7b57989073ab2f8e938d0d Mon Sep 17 00:00:00 2001 From: Matvey Kukuy Date: Wed, 9 Nov 2022 12:48:15 +0200 Subject: [PATCH 42/49] Update README.md Added youtube video link --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ebdbfc5e9e..b97cd25048 100644 --- a/README.md +++ b/README.md @@ -91,6 +91,7 @@ See [Grafana docs](https://grafana.com/docs/grafana/latest/administration/plugin - _Migration from the PagerDuty_ - [Migrator](https://github.com/grafana/oncall/tree/dev/tools/pagerduty-migrator) - _Documentation_ - [Grafana OnCall](https://grafana.com/docs/grafana-cloud/oncall/) +- _Overview Webinar_ - [YouTube](https://www.youtube.com/watch?v=7uSe1pulgs8) - _How To Add Integration_ - [How to Add Integration](https://github.com/grafana/oncall/tree/dev/engine/config_integrations/README.md) - _Blog Post_ - [Announcing Grafana OnCall, the easiest way to do on-call management](https://grafana.com/blog/2021/11/09/announcing-grafana-oncall/) - _Presentation_ - [Deep dive into the Grafana, Prometheus, and Alertmanager stack for alerting and on-call management](https://grafana.com/go/observabilitycon/2021/alerting/?pg=blog) From a27a55b36d2ca77229e10ece7b1c6cd64149c07d Mon Sep 17 00:00:00 2001 From: Joey Orlando Date: Wed, 9 Nov 2022 11:55:58 +0100 Subject: [PATCH 43/49] force the plugins/docker drone image to use buildkit (#811) specify DOCKER_BUILDKIT=1 env var, this will force it to use buildkit which is what supports skipping build stages that are not required for the final image --- .drone.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/.drone.yml b/.drone.yml index c553d0b86e..5bcf9b2fa2 100644 --- a/.drone.yml +++ b/.drone.yml @@ -100,6 +100,10 @@ steps: - name: Build and Push Engine Docker Image Backend to GCR image: plugins/docker + environment: + # force docker to use buildkit feature, this will skip build stages that aren't required in the final image (ie. dev & dev-enterprise) + # https://github.com/docker/cli/issues/1134#issuecomment-406449342 + DOCKER_BUILDKIT: 1 settings: repo: us.gcr.io/kubernetes-dev/oncall dockerfile: engine/Dockerfile @@ -114,6 +118,10 @@ steps: - name: Build and Push Engine Docker Image Backend to Dockerhub image: plugins/docker + environment: + # force docker to use buildkit feature, this will skip build stages that aren't required in the final image (ie. dev & dev-enterprise) + # https://github.com/docker/cli/issues/1134#issuecomment-406449342 + DOCKER_BUILDKIT: 1 settings: repo: grafana/oncall dockerfile: engine/Dockerfile @@ -227,6 +235,10 @@ steps: - name: build and push docker image image: plugins/docker + environment: + # force docker to use buildkit feature, this will skip build stages that aren't required in the final image (ie. dev & dev-enterprise) + # https://github.com/docker/cli/issues/1134#issuecomment-406449342 + DOCKER_BUILDKIT: 1 settings: repo: grafana/oncall tags: ${DRONE_TAG}-amd64-linux @@ -265,6 +277,10 @@ steps: - name: build and push docker image image: plugins/docker + environment: + # force docker to use buildkit feature, this will skip build stages that aren't required in the final image (ie. dev & dev-enterprise) + # https://github.com/docker/cli/issues/1134#issuecomment-406449342 + DOCKER_BUILDKIT: 1 settings: repo: grafana/oncall tags: ${DRONE_TAG}-arm64-linux From fd4877408a20471d3aef32812e8aff15127f29bc Mon Sep 17 00:00:00 2001 From: Joey Orlando Date: Wed, 9 Nov 2022 13:53:59 +0100 Subject: [PATCH 44/49] remove grafana_plugin_management django app (#812) * remove grafana_plugin_management django app it seems to be no longer used or referenced. In addition apps.api.serializers.organization.PluginOrganizationSerializer was only referenced from within grafana_plugin_management and is thereby safe to remove. --- CHANGELOG.md | 1 + engine/apps/api/serializers/organization.py | 19 ------ .../grafana_plugin_management/__init__.py | 0 engine/apps/grafana_plugin_management/urls.py | 13 ---- .../views/__init__.py | 1 - .../views/plugin_installations.py | 64 ------------------- engine/engine/urls.py | 1 - engine/settings/base.py | 1 - 8 files changed, 1 insertion(+), 99 deletions(-) delete mode 100644 engine/apps/grafana_plugin_management/__init__.py delete mode 100644 engine/apps/grafana_plugin_management/urls.py delete mode 100644 engine/apps/grafana_plugin_management/views/__init__.py delete mode 100644 engine/apps/grafana_plugin_management/views/plugin_installations.py diff --git a/CHANGELOG.md b/CHANGELOG.md index efd7a181ba..a9eb4636fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ## v1.0.52 (TBD) - Allow use of API keys as alternative to account auth token for Twilio +- Remove `grafana_plugin_management` Django app ## v1.0.51 (2022-11-05) diff --git a/engine/apps/api/serializers/organization.py b/engine/apps/api/serializers/organization.py index 30241a453b..79c6a90a72 100644 --- a/engine/apps/api/serializers/organization.py +++ b/engine/apps/api/serializers/organization.py @@ -162,22 +162,3 @@ class FastOrganizationSerializer(serializers.ModelSerializer): class Meta: model = Organization fields = ["pk", "name"] - - -class PluginOrganizationSerializer(serializers.ModelSerializer): - pk = serializers.CharField(read_only=True, source="public_primary_key") - grafana_token = serializers.CharField(write_only=True, source="api_token") - - class Meta: - model = Organization - fields = [ - "pk", - "stack_id", - "stack_slug", - "grafana_url", - "org_id", - "org_slug", - "org_title", - "region_slug", - "grafana_token", - ] diff --git a/engine/apps/grafana_plugin_management/__init__.py b/engine/apps/grafana_plugin_management/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/engine/apps/grafana_plugin_management/urls.py b/engine/apps/grafana_plugin_management/urls.py deleted file mode 100644 index 91ea363bf2..0000000000 --- a/engine/apps/grafana_plugin_management/urls.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.urls import include, path - -from apps.grafana_plugin_management.views import PluginInstallationsView -from common.api_helpers.optional_slash_router import OptionalSlashRouter - -app_name = "grafana-plugin-management" - -router = OptionalSlashRouter() -router.register(r"plugin_installations", PluginInstallationsView, basename="plugin_installations") - -urlpatterns = [ - path("", include(router.urls)), -] diff --git a/engine/apps/grafana_plugin_management/views/__init__.py b/engine/apps/grafana_plugin_management/views/__init__.py deleted file mode 100644 index ae0b494091..0000000000 --- a/engine/apps/grafana_plugin_management/views/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .plugin_installations import PluginInstallationsView # noqa: F401 diff --git a/engine/apps/grafana_plugin_management/views/plugin_installations.py b/engine/apps/grafana_plugin_management/views/plugin_installations.py deleted file mode 100644 index 3268aa88a6..0000000000 --- a/engine/apps/grafana_plugin_management/views/plugin_installations.py +++ /dev/null @@ -1,64 +0,0 @@ -from rest_framework import status -from rest_framework.authentication import BasicAuthentication, SessionAuthentication -from rest_framework.decorators import action -from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin -from rest_framework.response import Response -from rest_framework.viewsets import GenericViewSet - -from apps.api.permissions import IsStaff -from apps.api.serializers.organization import PluginOrganizationSerializer -from apps.grafana_plugin.helpers.client import GrafanaAPIClient -from apps.user_management.models import Organization -from apps.user_management.sync import sync_organization -from common.api_helpers.mixins import PublicPrimaryKeyMixin - - -class PluginInstallationsView( - PublicPrimaryKeyMixin, - CreateModelMixin, - RetrieveModelMixin, - ListModelMixin, - GenericViewSet, -): - authentication_classes = [BasicAuthentication, SessionAuthentication] - permission_classes = (IsStaff,) - - model = Organization - serializer_class = PluginOrganizationSerializer - - def get_queryset(self): - return Organization.objects.all() - - def create(self, request, *args, **kwargs): - serializer = self.get_serializer(data=request.data) - serializer.is_valid(raise_exception=True) - organization = serializer.save() - sync_organization(organization) - return Response(data=organization.provision_plugin(), status=status.HTTP_201_CREATED) - - @action(methods=["post"], detail=True) - def revoke_and_reissue(self, request, pk): - organization = self.get_object() - serializer = self.get_serializer(organization, data=request.data) - serializer.is_valid(raise_exception=True) - serializer.save() - return Response(data=organization.provision_plugin()) - - @action(methods=["post"], detail=True) - def revoke(self, request, pk): - organization = self.get_object() - organization.revoke_plugin() - return Response(data={"details": "Plugin token revoked"}) - - @action(methods=["get"], detail=True) - def status(self, request, pk): - organization = self.get_object() - client = GrafanaAPIClient(api_url=organization.grafana_url, api_token=organization.api_token) - _, grafana_status = client.check_token() - return Response(data=grafana_status) - - @action(methods=["post"], detail=True) - def sync_organization(self, request, pk): - organization = self.get_object() - sync_organization(organization) - return Response(data={"details": "Sync organization complete"}) diff --git a/engine/engine/urls.py b/engine/engine/urls.py index aeedbd4eea..7a86b57f35 100644 --- a/engine/engine/urls.py +++ b/engine/engine/urls.py @@ -32,7 +32,6 @@ path("api/internal/v1/", include("apps.api.urls", namespace="api-internal")), path("api/internal/v1/", include("social_django.urls", namespace="social")), path("api/internal/v1/plugin/", include("apps.grafana_plugin.urls", namespace="grafana-plugin")), - path("api/internal/v1/", include("apps.grafana_plugin_management.urls", namespace="grafana-plugin-management")), path("api/internal/v1/", include("apps.social_auth.urls", namespace="social_auth")), path("integrations/v1/", include("apps.integrations.urls", namespace="integrations")), path("twilioapp/", include("apps.twilioapp.urls")), diff --git a/engine/settings/base.py b/engine/settings/base.py index 93bd11ed99..0dbdf92ba7 100644 --- a/engine/settings/base.py +++ b/engine/settings/base.py @@ -208,7 +208,6 @@ class DatabaseTypes: "apps.auth_token", "apps.public_api", "apps.grafana_plugin", - "apps.grafana_plugin_management", "corsheaders", "debug_toolbar", "social_django", From cae4dc2f115038e5ba10af7a05f9f85af82069c2 Mon Sep 17 00:00:00 2001 From: Maxim Date: Wed, 9 Nov 2022 14:24:01 +0000 Subject: [PATCH 45/49] fix schedules deletion, remove new schedules feature flag support --- grafana-plugin/src/GrafanaPluginRootPage.tsx | 1 - grafana-plugin/src/pages/schedule/Schedule.tsx | 2 +- grafana-plugin/src/pages/schedules/Schedules.tsx | 6 ++---- grafana-plugin/src/utils/hooks.tsx | 6 +----- 4 files changed, 4 insertions(+), 11 deletions(-) diff --git a/grafana-plugin/src/GrafanaPluginRootPage.tsx b/grafana-plugin/src/GrafanaPluginRootPage.tsx index ed69df654d..3642b46758 100644 --- a/grafana-plugin/src/GrafanaPluginRootPage.tsx +++ b/grafana-plugin/src/GrafanaPluginRootPage.tsx @@ -137,7 +137,6 @@ export const Root = observer((props: AppRootProps) => { grafanaUser: window.grafanaBootData.user, enableLiveSettings: store.hasFeature(AppFeature.LiveSettings), enableCloudPage: store.hasFeature(AppFeature.CloudConnection), - enableNewSchedulesPage: store.hasFeature(AppFeature.WebSchedules), backendLicense, }), [meta, pathWithoutLeadingSlash, page, store.features, backendLicense] diff --git a/grafana-plugin/src/pages/schedule/Schedule.tsx b/grafana-plugin/src/pages/schedule/Schedule.tsx index 779d7bdbb5..7950fc56b1 100644 --- a/grafana-plugin/src/pages/schedule/Schedule.tsx +++ b/grafana-plugin/src/pages/schedule/Schedule.tsx @@ -447,7 +447,7 @@ class SchedulePage extends React.Component } = this.props; store.scheduleStore.delete(scheduleId).then(() => { - getLocationSrv().update({ query: { page: 'schedules-new' } }); + getLocationSrv().update({ query: { page: 'schedules' } }); }); }; } diff --git a/grafana-plugin/src/pages/schedules/Schedules.tsx b/grafana-plugin/src/pages/schedules/Schedules.tsx index ca5a373a7f..111a041e56 100644 --- a/grafana-plugin/src/pages/schedules/Schedules.tsx +++ b/grafana-plugin/src/pages/schedules/Schedules.tsx @@ -1,4 +1,4 @@ -import React, { SyntheticEvent } from 'react'; +import React from 'react'; import { getLocationSrv } from '@grafana/runtime'; import { Button, HorizontalGroup, IconButton, LoadingPlaceholder, VerticalGroup } from '@grafana/ui'; @@ -379,9 +379,7 @@ class SchedulesPage extends React.Component { - event.stopPropagation(); - + return () => { scheduleStore.delete(id).then(this.update); }; }; diff --git a/grafana-plugin/src/utils/hooks.tsx b/grafana-plugin/src/utils/hooks.tsx index 7c4adc74e2..a19967673c 100644 --- a/grafana-plugin/src/utils/hooks.tsx +++ b/grafana-plugin/src/utils/hooks.tsx @@ -17,7 +17,6 @@ type Args = { }; enableLiveSettings: boolean; enableCloudPage: boolean; - enableNewSchedulesPage: boolean; backendLicense: string; }; @@ -34,7 +33,6 @@ export function useNavModel({ grafanaUser, enableLiveSettings, enableCloudPage, - enableNewSchedulesPage, backendLicense, }: Args) { return useMemo(() => { @@ -50,8 +48,7 @@ export function useNavModel({ hideFromTabs || (role === 'Admin' && grafanaUser.orgRole !== role) || (id === 'live-settings' && !enableLiveSettings) || - (id === 'cloud' && !enableCloudPage) || - (id === 'schedules-new' && !enableNewSchedulesPage), + (id === 'cloud' && !enableCloudPage), }); if (page === id) { @@ -84,7 +81,6 @@ export function useNavModel({ enableLiveSettings, enableCloudPage, backendLicense, - enableNewSchedulesPage, grafanaUser.orgRole, ]); } From 925c3f4cc436be35ff9a7e0102181ccd53ec2a7a Mon Sep 17 00:00:00 2001 From: Joey Orlando Date: Wed, 9 Nov 2022 15:42:03 +0100 Subject: [PATCH 46/49] add port to run-backend-server make command (#815) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index fb08b79af5..52aaceaa5c 100644 --- a/Makefile +++ b/Makefile @@ -128,7 +128,7 @@ backend-migrate: $(call backend_command,python manage.py migrate) run-backend-server: - $(call backend_command,python manage.py runserver) + $(call backend_command,python manage.py runserver 0.0.0.0:8080) run-backend-celery: $(call backend_command,python manage.py start_celery) From d2243ba09b623c20dfd9cd7fe7b850db8c556bfc Mon Sep 17 00:00:00 2001 From: Vadim Stepanov Date: Wed, 9 Nov 2022 15:43:12 +0000 Subject: [PATCH 47/49] Add Makefile command for rebuilding images (#817) --- Makefile | 6 ++++++ dev/README.md | 2 ++ 2 files changed, 8 insertions(+) diff --git a/Makefile b/Makefile index 52aaceaa5c..c2ad98d1ca 100644 --- a/Makefile +++ b/Makefile @@ -77,6 +77,9 @@ stop: restart: $(call run_docker_compose_command,restart) +build: + $(call run_docker_compose_command,build) + cleanup: stop docker system prune --filter label="$(DOCKER_COMPOSE_DEV_LABEL)" --all --volumes @@ -112,6 +115,9 @@ shell: dbshell: $(call run_engine_docker_command,python manage.py dbshell) +exec-engine: + docker exec -it oncall_engine bash + # The below commands are useful for running backend services outside of docker define backend_command export `grep -v '^#' $(DEV_ENV_FILE) | xargs -0` && \ diff --git a/dev/README.md b/dev/README.md index eb7aed700c..adeca62e4e 100644 --- a/dev/README.md +++ b/dev/README.md @@ -91,6 +91,7 @@ make init # build the frontend plugin code then run make start make start # start all of the docker containers make stop # stop all of the docker containers make restart # restart all docker containers +make build # rebuild images (e.g. when changing requirements.txt) # this will remove all of the images, containers, volumes, and networks # associated with your local OnCall developer setup @@ -101,6 +102,7 @@ make start-celery-beat # start celery beat make purge-queues # purge celery queues make shell # starts an OnCall engine Django shell make dbshell # opens a DB shell +make exec-engine # exec into engine container's bash make test # run backend tests # run both frontend and backend linters From 1802eb711bc18c619bff1344e6b6c744c08bde0b Mon Sep 17 00:00:00 2001 From: Matias Bordese Date: Wed, 9 Nov 2022 14:53:35 -0300 Subject: [PATCH 48/49] Update .drone.yml signature --- .drone.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.drone.yml b/.drone.yml index 5bcf9b2fa2..9b689d3e5e 100644 --- a/.drone.yml +++ b/.drone.yml @@ -415,4 +415,6 @@ kind: secret name: drone_token --- kind: signature -hmac: 8a060649c132677ba1b5693b5ac6c846c02f9a5bb645fe990b26a7ea42a0fb66 +hmac: f77d17560f910f1a99ab8230674dc25c226d2b3c73cb90e63e53fb8ba760d57a + +... From 4dc8adee1f81abc296676a7089da16873262e0cc Mon Sep 17 00:00:00 2001 From: Matias Bordese Date: Wed, 9 Nov 2022 15:05:55 -0300 Subject: [PATCH 49/49] Update CHANGELOG.md --- CHANGELOG.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a9eb4636fb..f5c8b4f2a3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,11 @@ # Change Log -## v1.0.52 (TBD) +## v1.0.52 (2022-11-09) - Allow use of API keys as alternative to account auth token for Twilio - Remove `grafana_plugin_management` Django app +- Enable new schedules UI +- Bug fixes ## v1.0.51 (2022-11-05)