Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Pin integration test dependencies in main #434

Merged
merged 5 commits into from
Apr 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
106 changes: 66 additions & 40 deletions charms/kfp-api/tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,32 @@
APP_NAME = "kfp-api"
METADATA = yaml.safe_load(Path("./metadata.yaml").read_text())

KFP_DB = "kfp-db"
MARIADB_CHANNEL = "latest/edge"
MARIADB_CONFIG = {"database": "mlpipeline"}
MARIADB_CHARM = "charmed-osm-mariadb-k8s"
MARIADB_TRUST = True
KFP_VIZ = "kfp-viz"
KFP_VIZ_CHANNEL = "latest/edge"
KFP_VIZ_TRUST = True
MINIO_CHANNEL = "latest/edge"
MINIO = "minio"
MINIO_TRUST = True
MINIO_CONFIG = {"access-key": "minio", "secret-key": "minio-secret-key"}
KFP_DB_CONFIG = {"database": "mlpipeline"}
MYSQL = "mysql-k8s"
MYSQL_CHANNEL = "8.0/stable"
MYSQL_CONFIG = {"profile": "testing"}
MYSQL_TRUST = True
PROMETHEUS_K8S = "prometheus-k8s"
PROMETHEUS_K8S_CHANNEL = "latest/stable"
PROMETHEUS_K8S_TRUST = True
GRAFANA_K8S = "grafana-k8s"
GRAFANA_K8S_CHANNEL = "latest/stable"
GRAFANA_K8S_TRUST = True
PROMETHEUS_SCRAPE_K8S = "prometheus-scrape-config-k8s"
PROMETHEUS_SCRAPE_K8S_CHANNEL = "latest/stable"
PROMETHEUS_SCRAPE_CONFIG = {"scrape_interval": "30s"}
PROMETHEUS_SCRAPE_TRUST = True


class TestCharm:
Expand All @@ -43,27 +67,29 @@ async def test_build_and_deploy(self, ops_test: OpsTest):
# 1) The team has acceped and started using mysql-k8s more extensively
# 2) The repository level integration tests use mysql-k8s only
await ops_test.model.deploy(
entity_url="charmed-osm-mariadb-k8s",
application_name="kfp-db",
config=KFP_DB_CONFIG,
channel="latest/stable",
trust=True,
entity_url=MARIADB_CHARM,
application_name=KFP_DB,
config=MARIADB_CONFIG,
channel=MARIADB_CHANNEL,
trust=MARIADB_TRUST,
)
await ops_test.model.deploy(
entity_url=MINIO, config=MINIO_CONFIG, channel=MINIO_CHANNEL, trust=MINIO_TRUST
)
await ops_test.model.deploy(
entity_url="minio", config=MINIO_CONFIG, channel="ckf-1.7/stable", trust=True
entity_url=KFP_VIZ, channel=KFP_VIZ_CHANNEL, trust=KFP_VIZ_TRUST
)
await ops_test.model.deploy(entity_url="kfp-viz", channel="2.0/stable", trust=True)

# FIXME: This assertion belongs to unit tests
# test no database relation, charm should be in blocked state
# assert ops_test.model.applications[APP_NAME].units[0].workload_status == "blocked"

await ops_test.model.add_relation(f"{APP_NAME}:mysql", "kfp-db:mysql")
await ops_test.model.add_relation(f"{APP_NAME}:object-storage", "minio:object-storage")
await ops_test.model.add_relation(f"{APP_NAME}:kfp-viz", "kfp-viz:kfp-viz")
await ops_test.model.add_relation(f"{APP_NAME}:mysql", f"{KFP_DB}:mysql")
await ops_test.model.add_relation(f"{APP_NAME}:object-storage", f"{MINIO}:object-storage")
await ops_test.model.add_relation(f"{APP_NAME}:kfp-viz", f"{KFP_VIZ}:kfp-viz")

await ops_test.model.wait_for_idle(
apps=[APP_NAME, "kfp-viz", "kfp-db", "minio"],
apps=[APP_NAME, KFP_VIZ, KFP_DB, MINIO],
status="active",
raise_on_blocked=False,
raise_on_error=False,
Expand All @@ -77,16 +103,14 @@ async def test_build_and_deploy(self, ops_test: OpsTest):
async def test_relational_db_relation_with_mysql_relation(self, ops_test: OpsTest):
"""Test failure of addition of relational-db relation with mysql relation present."""
# deploy mysql-k8s charm
# We should use `8.0/stable` once changes for
# https://github.com/canonical/mysql-k8s-operator/issues/337 are published there.
await ops_test.model.deploy(
"mysql-k8s",
channel="8.0/edge",
config={"profile": "testing"},
trust=True,
MYSQL,
channel=MYSQL_CHANNEL,
config=MYSQL_CONFIG,
trust=MYSQL_TRUST,
)
await ops_test.model.wait_for_idle(
apps=["mysql-k8s"],
apps=[MYSQL],
status="active",
raise_on_blocked=True,
timeout=90 * 30,
Expand All @@ -95,7 +119,7 @@ async def test_relational_db_relation_with_mysql_relation(self, ops_test: OpsTes

# add relational-db relation which should put charm into blocked state,
# because at this point mysql relation is already established
await ops_test.model.relate(f"{APP_NAME}:relational-db", "mysql-k8s:database")
await ops_test.model.relate(f"{APP_NAME}:relational-db", f"{MYSQL}:database")

# verify that charm goes into blocked state
await ops_test.model.wait_for_idle(
Expand All @@ -109,7 +133,7 @@ async def test_relational_db_relation_with_mysql_relation(self, ops_test: OpsTes
assert ops_test.model.applications[APP_NAME].units[0].workload_status == "blocked"

# remove just added relational-db relation
await ops_test.juju("remove-relation", f"{APP_NAME}:relational-db", "mysql-k8s:database")
await ops_test.juju("remove-relation", f"{APP_NAME}:relational-db", f"{MYSQL}:database")

# FIXME: this test case belongs in unit tests as it is asserting the status of the
# unit under a certain condition, we don't actually need the presence of any deployed
Expand All @@ -130,7 +154,7 @@ async def test_relational_db_relation_with_mysql_k8s(self, ops_test: OpsTest):

# remove existing mysql relation which should put charm into blocked state,
# because there will be no database relations
await ops_test.juju("remove-relation", f"{APP_NAME}:mysql", "kfp-db:mysql")
await ops_test.juju("remove-relation", f"{APP_NAME}:mysql", f"{KFP_DB}:mysql")

# verify that charm goes into blocked state
await ops_test.model.wait_for_idle(
Expand All @@ -143,7 +167,7 @@ async def test_relational_db_relation_with_mysql_k8s(self, ops_test: OpsTest):
assert ops_test.model.applications[APP_NAME].units[0].workload_status == "blocked"

# add relational-db relation which should put charm into active state
await ops_test.model.relate(f"{APP_NAME}:relational-db", "mysql-k8s:database")
await ops_test.model.relate(f"{APP_NAME}:relational-db", f"{MYSQL}:database")

# verify that charm goes into active state
await ops_test.model.wait_for_idle(
Expand All @@ -164,7 +188,7 @@ async def test_msql_relation_with_relational_db_relation(self, ops_test: OpsTest

# add mysql relation which should put charm into blocked state,
# because at this point relational-db relation is already established
await ops_test.model.relate(f"{APP_NAME}:mysql", "kfp-db:mysql")
await ops_test.model.relate(f"{APP_NAME}:mysql", f"{KFP_DB}:mysql")

# verify that charm goes into blocked state
await ops_test.model.wait_for_idle(
Expand All @@ -177,31 +201,33 @@ async def test_msql_relation_with_relational_db_relation(self, ops_test: OpsTest
assert ops_test.model.applications[APP_NAME].units[0].workload_status == "blocked"

# remove redundant relation
await ops_test.juju("remove-relation", f"{APP_NAME}:mysql", "kfp-db:mysql")
await ops_test.juju("remove-relation", f"{APP_NAME}:mysql", f"{KFP_DB}:mysql")

async def test_prometheus_grafana_integration(self, ops_test: OpsTest):
"""Deploy prometheus, grafana and required relations, then test the metrics."""
prometheus = "prometheus-k8s"
grafana = "grafana-k8s"
prometheus_scrape = "prometheus-scrape-config-k8s"
scrape_config = {"scrape_interval": "30s"}

# Deploy and relate prometheus
await ops_test.model.deploy(prometheus, channel="latest/stable", trust=True)
await ops_test.model.deploy(grafana, channel="latest/stable", trust=True)
await ops_test.model.deploy(
prometheus_scrape, channel="latest/stable", config=scrape_config, trust=True
PROMETHEUS_K8S, channel=PROMETHEUS_K8S_CHANNEL, trust=PROMETHEUS_K8S_TRUST
)
await ops_test.model.deploy(
GRAFANA_K8S, channel=GRAFANA_K8S_CHANNEL, trust=GRAFANA_K8S_TRUST
)
await ops_test.model.deploy(
PROMETHEUS_SCRAPE_K8S,
channel=PROMETHEUS_SCRAPE_K8S_CHANNEL,
config=PROMETHEUS_SCRAPE_CONFIG,
trust=PROMETHEUS_SCRAPE_TRUST,
)

await ops_test.model.add_relation(APP_NAME, prometheus_scrape)
await ops_test.model.add_relation(APP_NAME, PROMETHEUS_SCRAPE_K8S)
await ops_test.model.add_relation(
f"{prometheus}:grafana-dashboard", f"{grafana}:grafana-dashboard"
f"{PROMETHEUS_K8S}:grafana-dashboard", f"{GRAFANA_K8S}:grafana-dashboard"
)
await ops_test.model.add_relation(
f"{APP_NAME}:grafana-dashboard", f"{grafana}:grafana-dashboard"
f"{APP_NAME}:grafana-dashboard", f"{GRAFANA_K8S}:grafana-dashboard"
)
await ops_test.model.add_relation(
f"{prometheus}:metrics-endpoint", f"{prometheus_scrape}:metrics-endpoint"
f"{PROMETHEUS_K8S}:metrics-endpoint", f"{PROMETHEUS_SCRAPE_K8S}:metrics-endpoint"
)

# prometheus-k8s needs a significant amount of time to deploy in GH runners,
Expand All @@ -216,9 +242,9 @@ async def test_prometheus_grafana_integration(self, ops_test: OpsTest):
)

status = await ops_test.model.get_status()
prometheus_unit_ip = status["applications"][prometheus]["units"][f"{prometheus}/0"][
"address"
]
prometheus_unit_ip = status["applications"][PROMETHEUS_K8S]["units"][
f"{PROMETHEUS_K8S}/0"
]["address"]
logger.info(f"Prometheus available at http://{prometheus_unit_ip}:9090")

for attempt in self.retry_for_5_attempts:
Expand Down
45 changes: 31 additions & 14 deletions charms/kfp-persistence/tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,21 @@
APP_NAME = "kfp-persistence"
METADATA = yaml.safe_load(Path("./metadata.yaml").read_text())

KFP_API = "kfp-api"
KFP_API_CHANNEL = "latest/edge"
KFP_API_TRUST = True
KFP_DB = "kfp-db"
KFP_DB_CHANNEL = "8.0/stable"
KFP_DB_CONFIG = {"profile": "testing"}
KFP_DB_ENTITY = "mysql-k8s"
KFP_DB_TRUST = True
KFP_VIZ = "kfp-viz"
KFP_VIZ_CHANNEL = "latest/edge"
KFP_VIZ_TRUST = True
MINIO_CHANNEL = "latest/edge"
MINIO = "minio"
MINIO_TRUST = True
MINIO_CONFIG = {"access-key": "minio", "secret-key": "minio-secret-key"}
KFP_DB_CONFIG = {"database": "mlpipeline"}


class TestCharm:
Expand All @@ -37,35 +50,39 @@ async def test_build_and_deploy(self, ops_test: OpsTest):
)

await ops_test.model.deploy(
entity_url="mysql-k8s",
application_name="kfp-db",
config={"profile": "testing"},
channel="8.0/edge",
trust=True,
entity_url=KFP_DB_ENTITY,
application_name=KFP_DB,
config=KFP_DB_CONFIG,
channel=KFP_DB_CHANNEL,
trust=KFP_DB_TRUST,
)

await ops_test.model.deploy(
entity_url="minio", config=MINIO_CONFIG, channel="ckf-1.7/stable", trust=True
entity_url=MINIO, config=MINIO_CONFIG, channel=MINIO_CHANNEL, trust=MINIO_TRUST
)
await ops_test.model.deploy(
entity_url=KFP_VIZ, channel=KFP_VIZ_CHANNEL, trust=KFP_VIZ_TRUST
)
await ops_test.model.deploy(entity_url="kfp-viz", channel="2.0/stable", trust=True)

# deploy kfp-api which needs to be related to this charm
await ops_test.model.deploy(entity_url="kfp-api", channel="2.0/stable", trust=True)
await ops_test.model.deploy(
entity_url=KFP_API, channel=KFP_API_CHANNEL, trust=KFP_API_TRUST
)

await ops_test.model.add_relation("kfp-api:relational-db", "kfp-db:database")
await ops_test.model.add_relation("kfp-api:object-storage", "minio:object-storage")
await ops_test.model.add_relation("kfp-api:kfp-viz", "kfp-viz:kfp-viz")
await ops_test.model.add_relation(f"{KFP_API}:relational-db", f"{KFP_DB}:database")
await ops_test.model.add_relation(f"{KFP_API}:object-storage", f"{MINIO}:object-storage")
await ops_test.model.add_relation(f"{KFP_API}:kfp-viz", f"{KFP_VIZ}:kfp-viz")

await ops_test.model.wait_for_idle(
apps=["kfp-api", "kfp-db"],
apps=[KFP_API, KFP_DB],
status="active",
raise_on_blocked=False,
raise_on_error=False,
timeout=90 * 30,
idle_period=30,
)

await ops_test.model.add_relation(f"{APP_NAME}:kfp-api", "kfp-api:kfp-api")
await ops_test.model.add_relation(f"{APP_NAME}:kfp-api", f"{KFP_API}:kfp-api")

await ops_test.model.wait_for_idle(
apps=[APP_NAME],
Expand Down
45 changes: 28 additions & 17 deletions charms/kfp-profile-controller/tests/integration/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,26 @@
METADATA = yaml.safe_load(Path("./metadata.yaml").read_text())
CHARM_NAME = METADATA["name"]

MINIO_APP_NAME = "minio"
MINIO_CONFIG = {"access-key": "minio", "secret-key": "minio-secret-key"}
CUSTOM_FRONTEND_IMAGE = "gcr.io/ml-pipeline/frontend:latest"
CUSTOM_VISUALISATION_IMAGE = "gcr.io/ml-pipeline/visualization-server:latest"

PodDefault = create_namespaced_resource(
group="kubeflow.org", version="v1alpha1", kind="PodDefault", plural="poddefaults"
)

ADMISSION_WEBHOOK_CHANNEL = "latest/edge"
ADMISSION_WEBHOOK = "admission-webhook"
ADMISSION_WEBHOOK_TRUST = True
METACONTROLLER_CHANNEL = "latest/edge"
METACONTROLLER = "metacontroller-operator"
METACONTROLLER_TRUST = True
MINIO_CHANNEL = "latest/edge"
MINIO = "minio"
MINIO_CONFIG = {"access-key": "minio", "secret-key": "minio-secret-key"}
KUBEFLOW_PROFILES_CHANNEL = "latest/edge"
KUBEFLOW_PROFILES = "kubeflow-profiles"
KUBEFLOW_PROFILES_TRUST = True


@pytest.mark.abort_on_fail
async def test_build_and_deploy(ops_test: OpsTest):
Expand All @@ -40,38 +51,38 @@ async def test_build_and_deploy(ops_test: OpsTest):
resources = {"oci-image": image_path}

# Deploy the admission webhook to apply the PodDefault CRD required by the charm workload
await ops_test.model.deploy(entity_url="admission-webhook", channel="latest/edge", trust=True)
await ops_test.model.deploy(
entity_url=ADMISSION_WEBHOOK,
channel=ADMISSION_WEBHOOK_CHANNEL,
trust=ADMISSION_WEBHOOK_TRUST,
)
# TODO: The webhook charm must be active before the metacontroller is deployed, due to the bug
# described here: https://github.com/canonical/metacontroller-operator/issues/86
# Drop this wait_for_idle once the above issue is closed
await ops_test.model.wait_for_idle(apps=["admission-webhook"], status="active")
await ops_test.model.wait_for_idle(apps=[ADMISSION_WEBHOOK], status="active")

await ops_test.model.deploy(
entity_url="metacontroller-operator",
# TODO: Revert once metacontroller stable supports k8s 1.22
channel="latest/edge",
# Remove this config option after the metacontroller-operator is updated to v3
config={"metacontroller-image": "docker.io/metacontrollerio/metacontroller:v3.0.0"},
trust=True,
entity_url=METACONTROLLER,
channel=METACONTROLLER_CHANNEL,
trust=METACONTROLLER_TRUST,
)

await ops_test.model.deploy(
built_charm_path, application_name=CHARM_NAME, resources=resources, trust=True
)

# Deploy required relations
await ops_test.model.deploy(entity_url=MINIO_APP_NAME, config=MINIO_CONFIG)
await ops_test.model.deploy(entity_url=MINIO, config=MINIO_CONFIG)
await ops_test.model.add_relation(
f"{CHARM_NAME}:object-storage",
f"{MINIO_APP_NAME}:object-storage",
f"{MINIO}:object-storage",
)

# Deploy charms responsible for CRDs creation
await ops_test.model.deploy(
entity_url="kubeflow-profiles",
# TODO: Revert once kubeflow-profiles stable supports k8s 1.22
channel="latest/edge",
trust=True,
entity_url=KUBEFLOW_PROFILES,
channel=KUBEFLOW_PROFILES_CHANNEL,
trust=KUBEFLOW_PROFILES_TRUST,
)

# Wait for everything to deploy
Expand Down Expand Up @@ -200,7 +211,7 @@ async def test_model_resources(ops_test: OpsTest):
Verifies that the secret was created, decoded secret-key matches the minio config value,
and that the pods are running.
"""
minio_config = await ops_test.model.applications[MINIO_APP_NAME].get_config()
minio_config = await ops_test.model.applications[MINIO].get_config()

await assert_minio_secret(
access_key=minio_config["access-key"]["value"],
Expand Down
1 change: 1 addition & 0 deletions tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ This directory has the following structure:
├── README.md
└── integration
├── bundles
| ├── kfp_1.8_stable_install.yaml.j2
│   ├── kfp_1.7_stable_install.yaml.j2
│   └── kfp_latest_edge.yaml.j2
├── conftest.py
Expand Down
Loading
Loading