Skip to content

Commit

Permalink
Merge pull request #858 from grafana/dev
Browse files Browse the repository at this point in the history
Release v1.1.1
  • Loading branch information
Konstantinov-Innokentii authored Nov 16, 2022
2 parents b7fd452 + f5fd3dd commit 2c384ef
Show file tree
Hide file tree
Showing 96 changed files with 2,206 additions and 2,256 deletions.
21 changes: 16 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ DEV_ENV_FILE = $(DEV_ENV_DIR)/.env.dev
DEV_ENV_EXAMPLE_FILE = $(DEV_ENV_FILE).example

ENGINE_DIR = ./engine
REQUIREMENTS_TXT = $(ENGINE_DIR)/requirements.txt
REQUIREMENTS_ENTERPRISE_TXT = $(ENGINE_DIR)/requirements-enterprise.txt
SQLITE_DB_FILE = $(ENGINE_DIR)/oncall.db

# -n flag only copies DEV_ENV_EXAMPLE_FILE-> DEV_ENV_FILE if it doesn't already exist
Expand Down Expand Up @@ -45,12 +47,18 @@ else
BROKER_TYPE=$(REDIS_PROFILE)
endif

define run_engine_docker_command
DB=$(DB) BROKER_TYPE=$(BROKER_TYPE) docker-compose -f $(DOCKER_COMPOSE_FILE) run --rm oncall_engine_commands $(1)
endef
# SQLITE_DB_FiLE is set to properly mount the sqlite db file
DOCKER_COMPOSE_ENV_VARS := COMPOSE_PROFILES=$(COMPOSE_PROFILES) DB=$(DB) BROKER_TYPE=$(BROKER_TYPE)
ifeq ($(DB),$(SQLITE_PROFILE))
DOCKER_COMPOSE_ENV_VARS += SQLITE_DB_FILE=$(SQLITE_DB_FILE)
endif

define run_docker_compose_command
COMPOSE_PROFILES=$(COMPOSE_PROFILES) DB=$(DB) BROKER_TYPE=$(BROKER_TYPE) docker-compose -f $(DOCKER_COMPOSE_FILE) $(1)
$(DOCKER_COMPOSE_ENV_VARS) docker compose -f $(DOCKER_COMPOSE_FILE) $(1)
endef

define run_engine_docker_command
$(call run_docker_compose_command,run --rm oncall_engine_commands $(1))
endef

# touch SQLITE_DB_FILE if it does not exist and DB is eqaul to SQLITE_PROFILE
Expand Down Expand Up @@ -128,7 +136,10 @@ endef

backend-bootstrap:
pip install -U pip wheel
cd engine && pip install -r requirements.txt
pip install -r $(REQUIREMENTS_TXT)
@if [ -f $(REQUIREMENTS_ENTERPRISE_TXT) ]; then \
pip install -r $(REQUIREMENTS_ENTERPRISE_TXT); \
fi

backend-migrate:
$(call backend_command,python manage.py migrate)
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ See [Grafana docs](https://grafana.com/docs/grafana/latest/administration/plugin

## Further Reading

- _Migration from the PagerDuty_ - [Migrator](https://github.com/grafana/oncall/tree/dev/tools/pagerduty-migrator)
- _Migration from PagerDuty_ - [Migrator](https://github.com/grafana/oncall/tree/dev/tools/pagerduty-migrator)
- _Documentation_ - [Grafana OnCall](https://grafana.com/docs/grafana-cloud/oncall/)
- _Overview Webinar_ - [YouTube](https://www.youtube.com/watch?v=7uSe1pulgs8)
- _How To Add Integration_ - [How to Add Integration](https://github.com/grafana/oncall/tree/dev/engine/config_integrations/README.md)
Expand Down
28 changes: 26 additions & 2 deletions dev/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
- [ld: library not found for -lssl](#ld-library-not-found-for--lssl)
- [Could not build wheels for cryptography which use PEP 517 and cannot be installed directly](#could-not-build-wheels-for-cryptography-which-use-pep-517-and-cannot-be-installed-directly)
- [django.db.utils.OperationalError: (1366, "Incorrect string value ...")](#djangodbutilsoperationalerror-1366-incorrect-string-value)
- [/bin/sh: line 0: cd: grafana-plugin: No such file or directory](#binsh-line-0-cd-grafana-plugin-no-such-file-or-directory)
- [IDE Specific Instructions](#ide-specific-instructions)
- [PyCharm](#pycharm-professional-edition)

Expand All @@ -21,7 +22,7 @@ Related: [How to develop integrations](/engine/config_integrations/README.md)

By default everything runs inside Docker. These options can be modified via the [`COMPOSE_PROFILES`](#compose_profiles) environment variable.

1. Firstly, ensure that you have `docker` [installed](https://docs.docker.com/get-docker/) and running on your machine. **NOTE**: the `docker-compose-developer.yml` file uses some syntax/features that are only supported by Docker Compose v2. For insturctions on how to enable this (if you haven't already done so), see [here](https://www.docker.com/blog/announcing-compose-v2-general-availability/).
1. Firstly, ensure that you have `docker` [installed](https://docs.docker.com/get-docker/) and running on your machine. **NOTE**: the `docker-compose-developer.yml` file uses some syntax/features that are only supported by Docker Compose v2. For instructions on how to enable this (if you haven't already done so), see [here](https://www.docker.com/blog/announcing-compose-v2-general-availability/).
2. Run `make init start`. By default this will run everything in Docker, using SQLite as the database and Redis as the message broker/cache. See [Running in Docker](#running-in-docker) below for more details on how to swap out/disable which components are run in Docker.
3. Open Grafana in a browser [here](http://localhost:3000/plugins/grafana-oncall-app) (login: `oncall`, password: `oncall`).
4. You should now see the OnCall plugin configuration page. Fill out the configuration options as follows:
Expand All @@ -41,7 +42,7 @@ This configuration option represents a comma-separated list of [`docker-compose`

This option can be configured in two ways:

1. Setting a `COMPOSE_PROFILE` environment variable in `.env.dev`. This allows you to avoid having to set `COMPOSE_PROFILE` for each `make` command you execute afterwards.
1. Setting a `COMPOSE_PROFILES` environment variable in `dev/.env.dev`. This allows you to avoid having to set `COMPOSE_PROFILES` for each `make` command you execute afterwards.
2. Passing in a `COMPOSE_PROFILES` argument when running `make` commands. For example:

```bash
Expand Down Expand Up @@ -191,6 +192,29 @@ django.db.utils.OperationalError: (1366, "Incorrect string value: '\\xF0\\x9F\\x

Recreate the database with the correct encoding.

### /bin/sh: line 0: cd: grafana-plugin: No such file or directory

**Problem:**

When running `make init`:

```
/bin/sh: line 0: cd: grafana-plugin: No such file or directory
make: *** [init] Error 1
```

This arises when the environment variable `[CDPATH](https://www.theunixschool.com/2012/04/what-is-cdpath.html)` is set _and_ when the current path (`.`) is not explicitly part of `CDPATH`.

**Solution:**

Either make `.` part of `CDPATH` in your .rc file setup, or temporarily override the variable when running `make` commands:

```
$ CDPATH="." make init
# Setting CDPATH to empty seems to also work - only tested on zsh, YMMV
$ CDPATH="" make init
```

## IDE Specific Instructions

### PyCharm
Expand Down
4 changes: 2 additions & 2 deletions docker-compose-developer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ x-oncall-volumes: &oncall-volumes
- ./engine:/etc/app
# https://stackoverflow.com/a/60456034
- ${ENTERPRISE_ENGINE:-/dev/null}:/etc/app/extensions/engine_enterprise
- ./engine/oncall.db:/var/lib/oncall/oncall.db
- ${SQLITE_DB_FILE:-/dev/null}:/var/lib/oncall/oncall.db

x-env-files: &oncall-env-files
- ./dev/.env.dev
Expand Down Expand Up @@ -235,7 +235,7 @@ services:
grafana:
container_name: grafana
labels: *oncall-labels
image: "grafana/grafana:${GRAFANA_VERSION:-main}"
image: "grafana/grafana:${GRAFANA_VERSION:-latest}"
restart: always
environment:
GF_SECURITY_ADMIN_USER: oncall
Expand Down
2 changes: 1 addition & 1 deletion engine/.gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
requirements-enterprise.txt
requirements-enterprise*.txt
extensions/
uwsgi-local.ini
celerybeat-schedule
Expand Down
2 changes: 1 addition & 1 deletion engine/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ FROM base AS dev
RUN apk add sqlite mysql-client postgresql-client

FROM dev AS dev-enterprise
RUN pip install -r requirements-enterprise.txt
RUN pip install -r requirements-enterprise-docker.txt

FROM base AS prod

Expand Down
8 changes: 5 additions & 3 deletions engine/apps/alerts/models/channel_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,17 +159,19 @@ def insight_logs_serialized(self):
"order": self.order,
"slack_notification_enabled": self.notify_in_slack,
"telegram_notification_enabled": self.notify_in_telegram,
# TODO: use names instead of pks, it's needed to rework messaging backends for that
}
# TODO: use names instead of pks, it's needed to rework messaging backends for that
if self.slack_channel_id:
if self.slack_channel_id:
SlackChannel = apps.get_model("slack", "SlackChannel")
sti = self.alert_receive_channel.organization.slack_team_identity
slack_channel = SlackChannel.objects.filter(
slack_team_identity=sti, slack_id=self.slack_channel_id
).first()
result["slack_channel"] = slack_channel.name
if slack_channel is not None:
# Case when slack channel was deleted, but channel filter still has it's id
result["slack_channel"] = slack_channel.name
# TODO: use names instead of pks for telegram and other notifications backends.
# It's needed to rework messaging backends for that
if self.telegram_channel:
result["telegram_channel"] = self.telegram_channel.public_primary_key
if self.escalation_chain:
Expand Down
14 changes: 9 additions & 5 deletions engine/apps/public_api/serializers/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,15 +163,16 @@ class Meta:
def create(self, validated_data):
validated_data = self._correct_validated_data(validated_data)
manual_order = validated_data.pop("manual_order")
if not manual_order:
if manual_order:
self._validate_manual_order(validated_data.get("order", None))
instance = super().create(validated_data)
else:
order = validated_data.pop("order", None)
alert_receive_channel_id = validated_data.get("alert_receive_channel")
# validate 'order' value before creation
self._validate_order(order, {"alert_receive_channel_id": alert_receive_channel_id, "is_default": False})
instance = super().create(validated_data)
self._change_position(order, instance)
else:
instance = super().create(validated_data)

return instance

Expand Down Expand Up @@ -206,10 +207,13 @@ def update(self, instance, validated_data):
validated_data = self._correct_validated_data(validated_data)

manual_order = validated_data.pop("manual_order")
if not manual_order:
if manual_order:
self._validate_manual_order(validated_data.get("order", None))
else:
order = validated_data.pop("order", None)
self._validate_order(
order, {"alert_receive_channel_id": instance.alert_receive_channel_id, "is_default": False}
order,
{"alert_receive_channel_id": instance.alert_receive_channel_id, "is_default": instance.is_default},
)
self._change_position(order, instance)

Expand Down
30 changes: 30 additions & 0 deletions engine/apps/public_api/tests/test_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,3 +381,33 @@ def test_update_route_with_messaging_backend(
assert new_channel_filter.notify_in_slack == data_to_update["slack"]["enabled"]
assert new_channel_filter.notify_in_telegram == data_to_update["telegram"]["enabled"]
assert new_channel_filter.notification_backends == {TestOnlyBackend.backend_id: {"channel": None, "enabled": True}}


@pytest.mark.django_db
def test_update_route_with_manual_ordering(
make_organization_and_user_with_token,
make_alert_receive_channel,
make_channel_filter,
):
organization, _, token = make_organization_and_user_with_token()
alert_receive_channel = make_alert_receive_channel(organization)
channel_filter = make_channel_filter(
alert_receive_channel,
is_default=False,
)

client = APIClient()

url = reverse("api-public:routes-detail", kwargs={"pk": channel_filter.public_primary_key})

# Test negative value. Note, that for "manual_order"=False, -1 is valud option (It will move route to the bottom)
data_to_update = {"position": -1, "manual_order": True}

response = client.put(url, format="json", HTTP_AUTHORIZATION=token, data=data_to_update)
assert response.status_code == status.HTTP_400_BAD_REQUEST

# Test value bigger then PositiveIntegerField can hold
data_to_update = {"position": 9223372036854775807, "manual_order": True}

response = client.put(url, format="json", HTTP_AUTHORIZATION=token, data=data_to_update)
assert response.status_code == status.HTTP_400_BAD_REQUEST
3 changes: 2 additions & 1 deletion engine/apps/public_api/views/slack_channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ def get_queryset(self):
channel_name = self.request.query_params.get("channel_name", None)

queryset = SlackChannel.objects.filter(
slack_team_identity__organizations=self.request.auth.organization
slack_team_identity__organizations=self.request.auth.organization,
is_archived=False,
).distinct()

if channel_name:
Expand Down
1 change: 0 additions & 1 deletion engine/apps/slack/scenarios/distribute_alerts.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ def process_signal(self, alert):
def _send_first_alert(self, alert, channel_id):
attachments = alert.group.render_slack_attachments()
blocks = alert.group.render_slack_blocks()

self.publish_slack_messages(
slack_team_identity=self.slack_team_identity,
alert_group=alert.group,
Expand Down
4 changes: 4 additions & 0 deletions engine/apps/slack/scenarios/slack_channel.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from django.utils import timezone

from apps.slack.scenarios import scenario_step
from apps.slack.tasks import clean_slack_channel_leftovers


class SlackChannelCreatedOrRenamedEventStep(scenario_step.ScenarioStep):
Expand Down Expand Up @@ -53,6 +54,8 @@ def process_scenario(self, slack_user_identity, slack_team_identity, payload, ac
slack_id=slack_id,
slack_team_identity=slack_team_identity,
).delete()
# even if channel is deteletd run the task to clean possible leftowers
clean_slack_channel_leftovers.apply_async((slack_team_identity.id, slack_id))


class SlackChannelArchivedEventStep(scenario_step.ScenarioStep):
Expand All @@ -75,6 +78,7 @@ def process_scenario(self, slack_user_identity, slack_team_identity, payload, ac
slack_id=slack_id,
slack_team_identity=slack_team_identity,
).update(is_archived=True)
clean_slack_channel_leftovers.apply_async((slack_team_identity.id, slack_id))


class SlackChannelUnArchivedEventStep(scenario_step.ScenarioStep):
Expand Down
32 changes: 32 additions & 0 deletions engine/apps/slack/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -771,3 +771,35 @@ def clean_slack_integration_leftovers(organization_id, *args, **kwargs):
OnCallSchedule.objects.filter(organization_id=organization_id).update(channel=None)
logger.info(f"Cleaned OnCallSchedule slack_channel_id for organization {organization_id}")
logger.info(f"Finish clean slack leftovers for organization {organization_id}")


@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), retry_backoff=True, max_retries=10)
def clean_slack_channel_leftovers(slack_team_identity_id, slack_channel_id):
"""
This task removes binding to slack channel after channel arcived or deleted in slack.
"""
SlackTeamIdentity = apps.get_model("slack", "SlackTeamIdentity")
ChannelFilter = apps.get_model("alerts", "ChannelFilter")
Organization = apps.get_model("user_management", "Organization")

try:
sti = SlackTeamIdentity.objects.get(id=slack_team_identity_id)
except SlackTeamIdentity.DoesNotExist:
logger.info(
f"Failed to clean_slack_channel_leftovers slack_channel_id={slack_channel_id} slack_team_identity_id={slack_team_identity_id} : Invalid slack_team_identity_id"
)
return

orgs_to_clean_general_log_channel_id = []
for org in sti.organizations.all():
if org.general_log_channel_id == slack_channel_id:
logger.info(
f"Set general_log_channel_id to None for org_id={org.id} slack_channel_id={slack_channel_id} since slack_channel is arcived or deleted"
)
org.general_log_channel_id = None
orgs_to_clean_general_log_channel_id.append(org)
ChannelFilter.objects.filter(alert_receive_channel__organization=org, slack_channel_id=slack_channel_id).update(
slack_channel_id=None
)

Organization.objects.bulk_update(orgs_to_clean_general_log_channel_id, ["general_log_channel_id"], batch_size=5000)
17 changes: 11 additions & 6 deletions engine/apps/twilioapp/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,17 @@ class AllowOnlyTwilio(BasePermission):
def has_permission(self, request, view):
# https://www.twilio.com/docs/usage/tutorials/how-to-secure-your-django-project-by-validating-incoming-twilio-requests
# https://www.django-rest-framework.org/api-guide/permissions/
validator = RequestValidator(live_settings.TWILIO_AUTH_TOKEN)
location = create_engine_url(request.get_full_path())
request_valid = validator.validate(
request.build_absolute_uri(location=location), request.POST, request.META.get("HTTP_X_TWILIO_SIGNATURE", "")
)
return request_valid
if live_settings.TWILIO_AUTH_TOKEN:
validator = RequestValidator(live_settings.TWILIO_AUTH_TOKEN)
location = create_engine_url(request.get_full_path())
request_valid = validator.validate(
request.build_absolute_uri(location=location),
request.POST,
request.META.get("HTTP_X_TWILIO_SIGNATURE", ""),
)
return request_valid
else:
return live_settings.TWILIO_ACCOUNT_SID == request.data["AccountSid"]


class HealthCheckView(APIView):
Expand Down
12 changes: 12 additions & 0 deletions engine/common/api_helpers/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,18 @@ def _validate_order(self, order, filter_kwargs):
if order > max_order:
raise BadRequest(detail="Invalid value for position field")

def _validate_manual_order(self, order):
"""
For manual ordering validate just that order is valid PositiveIntegrer.
User of manual ordering is responsible for correct ordering.
However, manual ordering not intended for use somewhere, except terraform provider.
"""

# https://docs.djangoproject.com/en/4.1/ref/models/fields/#positiveintegerfield
MAX_POSITIVE_INTEGER = 2147483647
if order is not None and order < 0 or order > MAX_POSITIVE_INTEGER:
raise BadRequest(detail="Invalid value for position field")


class PublicPrimaryKeyMixin:
def get_object(self):
Expand Down
2 changes: 1 addition & 1 deletion grafana-plugin/.eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ module.exports = {
plugins: ['rulesdir', 'import'],
settings: {
'import/internal-regex':
'^assets|^components|^containers|^declare|^icons|^img|^interceptors|^models|^network|^pages|^services|^state|^utils',
'^assets|^components|^containers|^declare|^icons|^img|^interceptors|^models|^network|^pages|^services|^state|^utils|^plugin',
},
rules: {
eqeqeq: 'warn',
Expand Down
1 change: 1 addition & 0 deletions grafana-plugin/jest.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,6 @@ module.exports = {
'^jest$': '<rootDir>/src/jest',
'^.+\\.(css|scss)$': '<rootDir>/src/jest/styleMock.ts',
'^lodash-es$': 'lodash',
"^.+\\.svg$": "<rootDir>/src/jest/svgTransform.ts"
},
};
10 changes: 5 additions & 5 deletions grafana-plugin/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,12 @@
"@babel/preset-env": "^7.18.10",
"@babel/preset-react": "^7.18.6",
"@babel/preset-typescript": "^7.18.6",
"@grafana/data": "9.1.1",
"@grafana/data": "^9.2.4",
"@grafana/eslint-config": "^5.0.0",
"@grafana/runtime": "9.1.1",
"@grafana/toolkit": "9.1.1",
"@grafana/ui": "9.1.1",
"@jest/globals": "27.5.1",
"@grafana/runtime": "^9.2.4",
"@grafana/toolkit": "^9.2.4",
"@grafana/ui": "^9.2.4",
"@jest/globals": "^27.5.1",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "12",
"@types/dompurify": "^2.3.4",
Expand Down
Loading

0 comments on commit 2c384ef

Please sign in to comment.