Skip to content

Commit

Permalink
bump uwsgi to 2.0.26 + Python to 3.12.3 (#4495)
Browse files Browse the repository at this point in the history
# What this PR does

- bumps `uwsgi` to latest version (`2.0.26`), which unblocks us from
bumping Python to 3.12
- bumps Python to 3.12.3
- refactor the Snyk GitHub Actions workflow to use the composable
actions for installed frontend and backend dependencies
- fixes several `AttributeError`s in our tests that went from a warning
to an error in Python 3.12 (see
python/cpython#100690)

# Which issue(s) this PR closes

Closes #4358
Closes #4387
  • Loading branch information
joeyorlando committed Jun 10, 2024
1 parent 4d3e254 commit 49d20f1
Show file tree
Hide file tree
Showing 36 changed files with 204 additions and 176 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
*.whl filter=lfs diff=lfs merge=lfs -text
2 changes: 1 addition & 1 deletion .github/actions/setup-python/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ runs:
id: setup-python
uses: actions/setup-python@v5.1.0
with:
python-version: "3.11.4"
python-version: "3.12.3"
cache: "pip"
cache-dependency-path: ${{ inputs.python-requirements-paths }}
- name: Install Python dependencies
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/on-pull-requests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,7 @@ jobs:
linting-and-tests:
name: Linting and tests
uses: ./.github/workflows/linting-and-tests.yml

snyk-security-scan:
name: Snyk security scan
uses: ./.github/workflows/snyk-security-scan.yml
5 changes: 5 additions & 0 deletions .github/workflows/on-release-published.yml
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,16 @@ jobs:
type=raw,value=${{ github.ref_name }}
type=raw,value=latest
snyk-security-scan:
name: Snyk security scan
uses: ./.github/workflows/snyk-security-scan.yml

merge-helm-release-pr:
name: Merge Helm release PR
needs:
- build-sign-and-publish-plugin-to-gcom
- build-engine-docker-image-and-publish-to-dockerhub
- snyk-security-scan
runs-on: ubuntu-latest
# These permissions are needed to assume roles from Github's OIDC.
# https://github.com/grafana/shared-workflows/tree/main/actions/get-vault-secrets
Expand Down
20 changes: 20 additions & 0 deletions .github/workflows/snyk-security-scan.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Snyk security scan

on:
workflow_call:

jobs:
synk-security-scan:
name: Snyk security scan
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Python
uses: ./.github/actions/setup-python
- name: Install frontend dependencies
uses: ./.github/actions/install-frontend-dependencies
- name: Run Snyk
continue-on-error: true
run: snyk monitor --all-projects --severity-threshold=high
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
39 changes: 0 additions & 39 deletions .github/workflows/snyk.yml

This file was deleted.

6 changes: 5 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,10 @@ _backend-debug-disable: ## disable Django's debug mode and Silk profiling
backend-debug-enable: _backend-debug-enable stop start
backend-debug-disable: _backend-debug-disable stop start

pip-compile-locked-dependencies: ## compile engine requirements.txt files
$(shell cd engine && uv pip compile requirements.in -o requirements.txt)
$(shell cd engine && uv pip compile requirements-dev.in -o requirements-dev.txt)

# The below commands are useful for running backend services outside of docker
define backend_command
export `grep -v '^#' $(DEV_ENV_FILE) | xargs -0` && \
Expand All @@ -247,7 +251,7 @@ define backend_command
endef

backend-bootstrap:
python3.11 -m venv $(VENV_DIR)
python3.12 -m venv $(VENV_DIR)
$(VENV_DIR)/bin/pip install -U pip wheel uv
$(VENV_DIR)/bin/uv pip sync $(REQUIREMENTS_TXT) $(REQUIREMENTS_DEV_TXT)
@if [ -f $(REQUIREMENTS_ENTERPRISE_TXT) ]; then \
Expand Down
2 changes: 1 addition & 1 deletion dev/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ See the `django-silk` documentation [here](https://github.com/jazzband/django-si
By default everything runs inside Docker. If you would like to run the backend services outside of Docker
(for integrating w/ PyCharm for example), follow these instructions:

1. Make sure you have Python 3.11 installed.
1. Make sure you have Python 3.12 installed.
2. `postgres` is a dependency on some of our Python dependencies (notably `psycopg2`
([docs](https://www.psycopg.org/docs/install.html#prerequisites))). Please visit
[here](https://www.postgresql.org/download/) for installation instructions.
Expand Down
2 changes: 1 addition & 1 deletion dev/scripts/generate-fake-data/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ capable of generating the following objects:

## Prerequisites

1. Create/active a Python 3.11 virtual environment
1. Create/active a Python 3.12 virtual environment
2. `pip install -r requirements.txt`
3. Must have a local version of Grafana and OnCall up and running
4. Generate an API key inside of Grafana OnCall
Expand Down
2 changes: 0 additions & 2 deletions docker-compose-developer.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
version: "3.9"

x-labels: &oncall-labels
- "com.grafana.oncall.env=dev"

Expand Down
2 changes: 0 additions & 2 deletions docker-compose-mysql-rabbitmq.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
version: "3.9"

x-environment: &oncall-environment
BASE_URL: $DOMAIN
SECRET_KEY: $SECRET_KEY
Expand Down
2 changes: 0 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
version: "3.9"

x-environment: &oncall-environment
DATABASE_TYPE: sqlite3
BROKER_TYPE: redis
Expand Down
8 changes: 4 additions & 4 deletions engine/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:3.11.4-alpine3.18 AS base
FROM python:3.12.3-alpine3.18 AS base
ARG TARGETPLATFORM

# Create a group and user to run an app
Expand All @@ -18,13 +18,13 @@ RUN apk add bash \

WORKDIR /etc/app
COPY ./requirements.txt ./
COPY ./grpcio-1.57.0-cp311-cp311-linux_aarch64.whl ./
COPY ./grpcio-1.64.1-cp312-cp312-linux_aarch64.whl ./

# grpcio is not available for arm64 on pypi, so we need to install it from a local wheel
# this can be removed once https://github.com/grpc/grpc/issues/34998 is resolved
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
pip install grpcio-1.57.0-cp311-cp311-linux_aarch64.whl \
&& rm grpcio-1.57.0-cp311-cp311-linux_aarch64.whl; \
pip install grpcio-1.64.1-cp312-cp312-linux_aarch64.whl \
&& rm grpcio-1.64.1-cp312-cp312-linux_aarch64.whl; \
fi

RUN pip install uv
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def get_alerting_config_for_datasource(cls, client: "GrafanaAPIClient", datasour
if config is None:
logger.warning(
f"GrafanaAlertingSyncManager: Got config None in get_alerting_config_for_datasource "
f"for is_grafana_datasource {datasource_uid==cls.GRAFANA_ALERTING_DATASOURCE}, "
f"for is_grafana_datasource {datasource_uid == cls.GRAFANA_ALERTING_DATASOURCE}, "
f"response: {response_info}"
)
return
Expand Down Expand Up @@ -232,7 +232,7 @@ def update_alerting_config_for_datasource(
if response is None:
logger.warning(
f"GrafanaAlertingSyncManager: Failed to update contact point (POST) for is_grafana_datasource "
f"{datasource_uid==cls.GRAFANA_ALERTING_DATASOURCE}; response: {response_info}"
f"{datasource_uid == cls.GRAFANA_ALERTING_DATASOURCE}; response: {response_info}"
)
if response_info.get("status_code") == status.HTTP_400_BAD_REQUEST:
logger.warning(f"GrafanaAlertingSyncManager: Config: {config}, Updated config: {updated_config}")
Expand Down
50 changes: 33 additions & 17 deletions engine/apps/alerts/tests/test_paging.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from unittest.mock import patch
from unittest.mock import call, patch

import pytest
from django.utils import timezone
Expand Down Expand Up @@ -78,7 +78,7 @@ def test_direct_paging_user(make_organization, make_user_for_organization, djang
assert len(callbacks) == 3
# notifications sent
for u, important in ((user, False), (other_user, True)):
assert notify_task.apply_async.called_with(
notify_task.apply_async.assert_any_call(
(u.pk, ag.pk), {"important": important, "notify_even_acknowledged": True, "notify_anyway": True}
)
expected_info = {"user": u.public_primary_key, "important": important}
Expand Down Expand Up @@ -158,16 +158,21 @@ def test_direct_paging_no_team_and_no_users(make_organization, make_user_for_org

@pytest.mark.django_db
def test_direct_paging_reusing_alert_group(
make_organization, make_user_for_organization, make_alert_receive_channel, make_alert_group
make_organization,
make_user_for_organization,
make_alert_receive_channel,
make_alert_group,
django_capture_on_commit_callbacks,
):
organization = make_organization()
user = make_user_for_organization(organization)
from_user = make_user_for_organization(organization)
alert_receive_channel = make_alert_receive_channel(organization=organization)
alert_group = make_alert_group(alert_receive_channel=alert_receive_channel)

with patch("apps.alerts.paging.notify_user_task") as notify_task:
direct_paging(organization, from_user, "Fire!", users=[(user, False)], alert_group=alert_group)
with django_capture_on_commit_callbacks(execute=True):
with patch("apps.alerts.paging.notify_user_task") as notify_task:
direct_paging(organization, from_user, "Fire!", users=[(user, False)], alert_group=alert_group)

# no new alert group is created
alert_groups = AlertGroup.objects.all()
Expand All @@ -176,8 +181,8 @@ def test_direct_paging_reusing_alert_group(

# notifications sent
ag = alert_groups.get()
assert notify_task.apply_async.called_with(
(user.pk, ag.pk), {"important": False, "notify_even_acknowledged": True, "notify_anyway": True}
notify_task.apply_async.assert_has_calls(
[call((user.pk, ag.pk), {"important": False, "notify_even_acknowledged": True, "notify_anyway": True})]
)


Expand Down Expand Up @@ -229,28 +234,39 @@ def test_unpage_user_ok(make_organization, make_user_for_organization, make_aler


@pytest.mark.django_db
def test_direct_paging_always_create_group(make_organization, make_user_for_organization):
def test_direct_paging_always_create_group(
make_organization,
make_user_for_organization,
django_capture_on_commit_callbacks,
):
organization = make_organization()
user = make_user_for_organization(organization)
from_user = make_user_for_organization(organization)
msg = "Help!"
users = [(user, False)]

with patch("apps.alerts.paging.notify_user_task") as notify_task:
# although calling twice with same params, there should be 2 alert groups
direct_paging(organization, from_user, msg, users=users)
direct_paging(organization, from_user, msg, users=users)
with django_capture_on_commit_callbacks(execute=True):
with patch("apps.alerts.paging.notify_user_task") as notify_task:
# although calling twice with same params, there should be 2 alert groups
direct_paging(organization, from_user, msg, users=users)
direct_paging(organization, from_user, msg, users=users)

# alert group created
alert_groups = AlertGroup.objects.all()
assert alert_groups.count() == 2

# notifications sent
assert notify_task.apply_async.called_with(
(user.pk, alert_groups[0].pk), {"important": False, "notify_even_acknowledged": True, "notify_anyway": True}
)
assert notify_task.apply_async.called_with(
(user.pk, alert_groups[1].pk), {"important": False, "notify_even_acknowledged": True, "notify_anyway": True}
notify_task.apply_async.assert_has_calls(
[
call(
(user.pk, alert_groups[0].pk),
{"important": False, "notify_even_acknowledged": True, "notify_anyway": True},
),
call(
(user.pk, alert_groups[1].pk),
{"important": False, "notify_even_acknowledged": True, "notify_anyway": True},
),
]
)


Expand Down
2 changes: 1 addition & 1 deletion engine/apps/grafana_plugin/tests/test_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def test_it_triggers_an_organization_sync_and_saves_the_grafana_token(
response = client.post(reverse("grafana-plugin:install"), format="json", **auth_headers)

assert response.status_code == status.HTTP_204_NO_CONTENT
assert mocked_sync_organization.called_once_with(organization)
mocked_sync_organization.assert_called_once_with(organization)

# make sure api token is saved on the org
organization.refresh_from_db()
Expand Down
26 changes: 13 additions & 13 deletions engine/apps/grafana_plugin/tests/test_self_hosted_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ def test_it_properly_handles_errors_from_the_grafana_api(
url = reverse("grafana-plugin:self-hosted-install")
response = client.post(url, format="json", **make_self_hosted_install_header(GRAFANA_TOKEN))

assert mocked_grafana_api_client.called_once_with(api_url=GRAFANA_API_URL, api_token=GRAFANA_TOKEN)
assert mocked_grafana_api_client.return_value.check_token.called_once_with()
mocked_grafana_api_client.assert_called_once_with(api_url=GRAFANA_API_URL, api_token=GRAFANA_TOKEN)
mocked_grafana_api_client.return_value.check_token.assert_called_once_with()

assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["error"] == expected_error_msg
Expand Down Expand Up @@ -106,13 +106,13 @@ def test_if_organization_exists_it_is_updated(
url = reverse("grafana-plugin:self-hosted-install")
response = client.post(url, format="json", **make_self_hosted_install_header(GRAFANA_TOKEN))

assert mocked_grafana_api_client.called_once_with(api_url=GRAFANA_API_URL, api_token=GRAFANA_TOKEN)
assert mocked_grafana_api_client.return_value.check_token.called_once_with()
assert mocked_grafana_api_client.return_value.is_rbac_enabled_for_organization.called_once_with()
mocked_grafana_api_client.assert_called_once_with(api_url=GRAFANA_API_URL, api_token=GRAFANA_TOKEN)
mocked_grafana_api_client.return_value.check_token.assert_called_once_with()
mocked_grafana_api_client.return_value.is_rbac_enabled_for_organization.assert_called_once_with()

assert mocked_sync_organization.called_once_with(organization)
assert mocked_provision_plugin.called_once_with()
assert mocked_revoke_plugin.called_once_with()
mocked_sync_organization.assert_called_once_with(organization)
mocked_provision_plugin.assert_called_once_with()
mocked_revoke_plugin.assert_called_once_with()

assert response.status_code == status.HTTP_201_CREATED
assert response.data == {"error": None, **provision_plugin_response}
Expand Down Expand Up @@ -151,12 +151,12 @@ def test_if_organization_does_not_exist_it_is_created(

organization = Organization.objects.filter(stack_id=STACK_ID, org_id=ORG_ID).first()

assert mocked_grafana_api_client.called_once_with(api_url=GRAFANA_API_URL, api_token=GRAFANA_TOKEN)
assert mocked_grafana_api_client.return_value.check_token.called_once_with()
assert mocked_grafana_api_client.return_value.is_rbac_enabled_for_organization.called_once_with()
mocked_grafana_api_client.assert_called_once_with(api_url=GRAFANA_API_URL, api_token=GRAFANA_TOKEN)
mocked_grafana_api_client.return_value.check_token.assert_called_once_with()
mocked_grafana_api_client.return_value.is_rbac_enabled_for_organization.assert_called_once_with()

assert mocked_sync_organization.called_once_with(organization)
assert mocked_provision_plugin.called_once_with()
mocked_sync_organization.assert_called_once_with(organization)
mocked_provision_plugin.assert_called_once_with()
assert not mocked_revoke_plugin.called

assert response.status_code == status.HTTP_201_CREATED
Expand Down
12 changes: 10 additions & 2 deletions engine/apps/integrations/tests/test_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from django.db import OperationalError
from django.urls import reverse
from django.utils import timezone
from pytest_django.plugin import _DatabaseBlocker
from pytest_django.plugin import DjangoDbBlocker
from rest_framework import status
from rest_framework.test import APIClient

Expand All @@ -21,9 +21,17 @@
INTEGRATION_TYPES = sorted(AlertReceiveChannel.INTEGRATION_TYPES)


class DatabaseBlocker(_DatabaseBlocker):
class DatabaseBlocker(DjangoDbBlocker):
"""Customize pytest_django db blocker to raise OperationalError exception."""

def __init__(self, *args, **kwargs):
"""
Override the constructor to get around this:
https://github.com/pytest-dev/pytest-django/blob/v4.8.0/pytest_django/plugin.py#L778-L782
"""
self._history = []
self._real_ensure_connection = None

def _blocking_wrapper(*args, **kwargs):
__tracebackhide__ = True
__tracebackhide__ # Silence pyflakes
Expand Down
2 changes: 1 addition & 1 deletion engine/apps/oss_installation/cloud_heartbeat.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def setup_heartbeat_integration(name=None):
}
)
else:
setup_heartbeat_integration(f"{name} { random.randint(1, 1024)}")
setup_heartbeat_integration(f"{name} {random.randint(1, 1024)}")
except requests.Timeout:
logger.warning("Unable to create cloud heartbeat integration. Request timeout.")
except requests.exceptions.RequestException as e:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def test_notify_by_provider_call_limits_warning(
phone_backend = PhoneBackend()
phone_backend._notify_by_provider_call(user, "some_message")

assert mock_add_call_limit_warning.called_once_with(2, "some_message")
mock_add_call_limit_warning.assert_called_once_with(2, "some_message")


@pytest.mark.django_db
Expand Down
Loading

0 comments on commit 49d20f1

Please sign in to comment.