Skip to content

revert: "feat(plugin-server): Support preserving distinct ID locality on overflow rerouting (#20945)" #34400

revert: "feat(plugin-server): Support preserving distinct ID locality on overflow rerouting (#20945)"

revert: "feat(plugin-server): Support preserving distinct ID locality on overflow rerouting (#20945)" #34400

name: Plugin Server CI
on:
pull_request:
push:
branches:
- master
env:
OBJECT_STORAGE_ENABLED: true
OBJECT_STORAGE_ENDPOINT: 'http://localhost:19000'
OBJECT_STORAGE_ACCESS_KEY_ID: 'object_storage_root_user'
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password'
OBJECT_STORAGE_SESSION_RECORDING_FOLDER: 'session_recordings'
OBJECT_STORAGE_BUCKET: 'posthog'
# set the max buffer size small enough that the functional tests behave the same in CI as when running locally
SESSION_RECORDING_MAX_BUFFER_SIZE_KB: 1024
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
# Job to decide if we should run plugin server ci
# See https://github.com/dorny/paths-filter#conditional-execution for more details
changes:
runs-on: depot-ubuntu-latest-4
timeout-minutes: 5
if: github.repository == 'PostHog/posthog'
name: Determine need to run plugin server checks
outputs:
plugin-server: ${{ steps.filter.outputs.plugin-server }}
steps:
# For pull requests it's not necessary to checkout the code, but we
# also want this to run on master so we need to checkout
- uses: actions/checkout@v3
- uses: dorny/paths-filter@v2
id: filter
with:
filters: |
plugin-server:
- .github/workflows/ci-plugin-server.yml
- 'plugin-server/**'
- 'posthog/clickhouse/migrations/**'
- 'ee/migrations/**'
- 'ee/management/commands/setup_test_environment.py'
- 'posthog/migrations/**'
- 'posthog/plugins/**'
- 'docker*.yml'
- '*Dockerfile'
code-quality:
name: Code quality
needs: changes
if: needs.changes.outputs.plugin-server == 'true'
runs-on: depot-ubuntu-latest-4
defaults:
run:
working-directory: 'plugin-server'
steps:
- uses: actions/checkout@v3
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 8.x.x
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 18.12.1
cache: pnpm
- name: Install package.json dependencies with pnpm
run: pnpm install --frozen-lockfile
- name: Check formatting with prettier
run: pnpm prettier:check
- name: Lint with ESLint
run: pnpm lint
tests:
name: Plugin Server Tests (${{matrix.shard}})
needs: changes
runs-on: depot-ubuntu-latest-4
strategy:
fail-fast: false
matrix:
shard: [1/3, 2/3, 3/3]
env:
REDIS_URL: 'redis://localhost'
CLICKHOUSE_HOST: 'localhost'
CLICKHOUSE_DATABASE: 'posthog_test'
KAFKA_HOSTS: 'kafka:9092'
steps:
- name: Code check out
if: needs.changes.outputs.plugin-server == 'true'
uses: actions/checkout@v3
- name: Stop/Start stack with Docker Compose
if: needs.changes.outputs.plugin-server == 'true'
run: |
docker compose -f docker-compose.dev.yml down
docker compose -f docker-compose.dev.yml up -d
- name: Add Kafka to /etc/hosts
if: needs.changes.outputs.plugin-server == 'true'
run: echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts
- name: Set up Python
if: needs.changes.outputs.plugin-server == 'true'
uses: actions/setup-python@v5
with:
python-version: 3.10.10
cache: 'pip'
cache-dependency-path: '**/requirements*.txt'
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
- uses: syphar/restore-virtualenv@v1
if: needs.changes.outputs.plugin-server == 'true'
id: cache-backend-tests
with:
custom_cache_key_element: v1-
- name: Install SAML (python3-saml) dependencies
if: needs.changes.outputs.plugin-server == 'true'
run: |
sudo apt-get update
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl
- name: Install python dependencies
if: needs.changes.outputs.plugin-server == 'true' && steps.cache-backend-tests.outputs.cache-hit != 'true'
run: |
python -m pip install -r requirements-dev.txt
python -m pip install -r requirements.txt
- name: Install pnpm
if: needs.changes.outputs.plugin-server == 'true'
uses: pnpm/action-setup@v2
with:
version: 8.x.x
- name: Set up Node.js
if: needs.changes.outputs.plugin-server == 'true'
uses: actions/setup-node@v4
with:
node-version: 18.12.1
cache: pnpm
cache-dependency-path: plugin-server/pnpm-lock.yaml
- name: Install package.json dependencies with pnpm
if: needs.changes.outputs.plugin-server == 'true'
run: cd plugin-server && pnpm i
- name: Wait for Clickhouse & Kafka
if: needs.changes.outputs.plugin-server == 'true'
run: bin/check_kafka_clickhouse_up
- name: Set up databases
if: needs.changes.outputs.plugin-server == 'true'
env:
TEST: 'true'
SECRET_KEY: 'abcdef' # unsafe - for testing only
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog'
run: cd plugin-server && pnpm setup:test
- name: Test with Jest
if: needs.changes.outputs.plugin-server == 'true'
env:
# Below DB name has `test_` prepended, as that's how Django (ran above) creates the test DB
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/test_posthog'
REDIS_URL: 'redis://localhost'
NODE_OPTIONS: '--max_old_space_size=4096'
run: cd plugin-server && pnpm test -- --runInBand --forceExit tests/ --shard=${{matrix.shard}}
functional-tests:
name: Functional tests (POE=${{matrix.POE_EMBRACE_JOIN_FOR_TEAMS}},RDK=${{matrix.KAFKA_CONSUMPTION_USE_RDKAFKA}})
needs: changes
if: needs.changes.outputs.plugin-server == 'true'
runs-on: depot-ubuntu-latest-4
strategy:
fail-fast: false
matrix:
POE_EMBRACE_JOIN_FOR_TEAMS: ['', '*']
env:
REDIS_URL: 'redis://localhost'
CLICKHOUSE_HOST: 'localhost'
CLICKHOUSE_DATABASE: 'posthog_test'
KAFKA_HOSTS: 'kafka:9092'
DATABASE_URL: 'postgres://posthog:posthog@localhost:5432/posthog'
RELOAD_PLUGIN_JITTER_MAX_MS: 0
POE_EMBRACE_JOIN_FOR_TEAMS: ${{matrix.POE_EMBRACE_JOIN_FOR_TEAMS}}
steps:
- name: Code check out
uses: actions/checkout@v3
- name: Stop/Start stack with Docker Compose
run: |
docker compose -f docker-compose.dev.yml down
docker compose -f docker-compose.dev.yml up -d
- name: Add Kafka to /etc/hosts
run: echo "127.0.0.1 kafka" | sudo tee -a /etc/hosts
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.10.10
cache: 'pip'
cache-dependency-path: '**/requirements*.txt'
token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }}
- uses: syphar/restore-virtualenv@v1
id: cache-backend-tests
with:
custom_cache_key_element: v1-
- name: Install SAML (python3-saml) dependencies
run: |
sudo apt-get update
sudo apt-get install libxml2-dev libxmlsec1-dev libxmlsec1-openssl
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
- name: Install python dependencies
if: steps.cache-backend-tests.outputs.cache-hit != 'true'
run: |
python -m pip install -r requirements-dev.txt
python -m pip install -r requirements.txt
- name: Install pnpm
uses: pnpm/action-setup@v2
with:
version: 8.x.x
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 18.12.1
cache: pnpm
cache-dependency-path: plugin-server/pnpm-lock.yaml
- name: Install package.json dependencies with pnpm
run: |
cd plugin-server
pnpm install --frozen-lockfile
pnpm build
- name: Wait for Clickhouse & Kafka
run: bin/check_kafka_clickhouse_up
- name: Set up databases
env:
DEBUG: 'true'
SECRET_KEY: 'abcdef' # unsafe - for testing only
run: |
./manage.py migrate
./manage.py migrate_clickhouse
- name: Run functional tests
run: |
cd plugin-server
./bin/ci_functional_tests.sh
- name: Upload coverage report
uses: actions/upload-artifact@v3
if: always()
with:
name: functional-coverage
if-no-files-found: warn
retention-days: 1
path: 'plugin-server/coverage'