Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Run integration test with a single group #152

Merged
merged 4 commits into from
Sep 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions src/databricks/labs/ucx/providers/groups_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@ def __init__(self):
def add(self, group: MigrationGroupInfo):
self.groups.append(group)

def is_in_scope(self, attr: str, group: Group) -> bool:
for info in self.groups:
if getattr(info, attr).id == group.id:
return True
return False

def get_by_workspace_group_name(self, workspace_group_name: str) -> MigrationGroupInfo | None:
found = [g for g in self.groups if g.workspace.display_name == workspace_group_name]
if len(found) == 0:
Expand Down
41 changes: 31 additions & 10 deletions src/databricks/labs/ucx/providers/mixins/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from typing import BinaryIO

import pytest
from databricks.sdk import WorkspaceClient
from databricks.sdk import AccountClient, WorkspaceClient
from databricks.sdk.core import DatabricksError
from databricks.sdk.service import compute, iam, jobs, pipelines, workspace

Expand All @@ -31,10 +31,8 @@ def inner(**kwargs):
_LOG.debug(f"removing {name} fixture: {x}")
remove(x)
except DatabricksError as e:
if e.error_code in ("RESOURCE_DOES_NOT_EXIST",):
_LOG.debug(f"ignoring error while {name} {x} teardown: {e}")
continue
raise e
# TODO: fix on the databricks-labs-pytester level
_LOG.debug(f"ignoring error while {name} {x} teardown: {e}")


@pytest.fixture
Expand All @@ -55,6 +53,13 @@ def ws() -> WorkspaceClient:
return WorkspaceClient()


@pytest.fixture(scope="session")
def acc() -> AccountClient:
# Use variables from Unified Auth
# See https://databricks-sdk-py.readthedocs.io/en/latest/authentication.html
return AccountClient()


@pytest.fixture
def make_secret_scope(ws, make_random):
def create(**kwargs):
Expand Down Expand Up @@ -125,19 +130,35 @@ def _scim_values(ids: list[str]) -> list[iam.ComplexValue]:
return [iam.ComplexValue(value=x) for x in ids]


@pytest.fixture
def make_group(ws, make_random):
def _make_group(name, interface, make_random):
def create(
*, members: list[str] | None = None, roles: list[str] | None = None, display_name: str | None = None, **kwargs
*,
members: list[str] | None = None,
roles: list[str] | None = None,
entitlements: list[str] | None = None,
display_name: str | None = None,
**kwargs,
):
kwargs["display_name"] = f"sdk-{make_random(4)}" if display_name is None else display_name
if members is not None:
kwargs["members"] = _scim_values(members)
if roles is not None:
kwargs["roles"] = _scim_values(roles)
return ws.groups.create(**kwargs)
if entitlements is not None:
kwargs["entitlements"] = _scim_values(entitlements)
return interface.create(**kwargs)

yield from factory(name, create, lambda item: interface.delete(item.id))


@pytest.fixture
def make_group(ws, make_random):
yield from _make_group("workspace group", ws.groups, make_random)

yield from factory("workspace group", create, lambda item: ws.groups.delete(item.id))

@pytest.fixture
def make_acc_group(acc, make_random):
yield from _make_group("account group", acc.groups, make_random)


@pytest.fixture
Expand Down
88 changes: 33 additions & 55 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,10 @@
import logging
import os
import random
import uuid
from functools import partial

import databricks.sdk.core
import pytest
from _pytest.fixtures import SubRequest
from databricks.sdk import AccountClient, WorkspaceClient
from databricks.sdk.core import Config, DatabricksError
from databricks.sdk.service.compute import (
Expand Down Expand Up @@ -46,8 +44,6 @@
EnvironmentInfo,
InstanceProfile,
WorkspaceObjects,
_cleanup_groups,
_create_groups,
_get_basic_job_cluster,
_get_basic_task,
_set_random_permissions,
Expand Down Expand Up @@ -220,48 +216,30 @@ def test_table_fixture(make_table):
logger.info(f'Created new view in new schema: {make_table(view=True, ctas="SELECT 2+2 AS four")}')


@pytest.fixture(scope="session")
def env(ws: WorkspaceClient, acc: AccountClient, request: SubRequest) -> EnvironmentInfo:
# prepare environment
test_uid = f"{UCX_TESTING_PREFIX}_{str(uuid.uuid4())[:8]}"
logger.debug(f"Creating environment with uid {test_uid}")
groups = _create_groups(ws, acc, test_uid, NUM_TEST_GROUPS, Threader)

def post_cleanup():
print("\n")
logger.debug("Cleaning up the environment")
logger.debug("Deleting test groups")
cleanups = [partial(_cleanup_groups, ws, acc, g) for g in groups]

def error_silencer(func):
def _wrapped(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception as e:
logger.warning(f"Cannot delete temp group, skipping it. Original exception {e}")

return _wrapped

silent_delete = error_silencer(ws.groups.delete)

temp_cleanups = [
# TODO: this is too heavy for SCIM API, refactor to ID lookup
partial(silent_delete, g.id)
for g in ws.groups.list(filter=f"displayName sw 'db-temp-{test_uid}'")
]
new_ws_groups_cleanups = [
partial(silent_delete, g.id) for g in ws.groups.list(filter=f"displayName sw '{test_uid}'")
]
@pytest.fixture
def user_pool(ws):
return list(ws.users.list(filter="displayName sw 'test-user-'", attributes="id, userName, displayName"))

all_cleanups = cleanups + temp_cleanups + new_ws_groups_cleanups
Threader(all_cleanups).run()
logger.debug(f"Finished cleanup for the environment {test_uid}")

request.addfinalizer(post_cleanup)
yield EnvironmentInfo(test_uid=test_uid, groups=groups)
@pytest.fixture
def make_ucx_group(make_random, make_group, make_acc_group, user_pool):
def inner():
display_name = f"ucx_{make_random(4)}"
members = [_.id for _ in random.choices(user_pool, k=random.randint(1, 40))]
ws_group = make_group(display_name=display_name, members=members, entitlements=["allow-cluster-create"])
acc_group = make_acc_group(display_name=display_name, members=members)
return ws_group, acc_group

return inner

@pytest.fixture(scope="session")

@pytest.fixture
def env(make_ucx_group, make_random) -> EnvironmentInfo:
test_uid = f"ucx_{make_random(4)}"
yield EnvironmentInfo(test_uid=test_uid, groups=[make_ucx_group()])


@pytest.fixture
def instance_profiles(env: EnvironmentInfo, ws: WorkspaceClient) -> list[InstanceProfile]:
logger.debug("Adding test instance profiles")
profiles: list[InstanceProfile] = []
Expand Down Expand Up @@ -295,7 +273,7 @@ def instance_profiles(env: EnvironmentInfo, ws: WorkspaceClient) -> list[Instanc
logger.debug("Test instance profiles deleted")


@pytest.fixture(scope="session")
@pytest.fixture
def instance_pools(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreateInstancePoolResponse]:
logger.debug("Creating test instance pools")

Expand All @@ -320,7 +298,7 @@ def instance_pools(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreateInst
Threader(executables).run()


@pytest.fixture(scope="session")
@pytest.fixture
def pipelines(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreatePipelineResponse]:
logger.debug("Creating test DLT pipelines")

Expand Down Expand Up @@ -350,7 +328,7 @@ def pipelines(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreatePipelineR
Threader(executables).run()


@pytest.fixture(scope="session")
@pytest.fixture
def jobs(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreateResponse]:
logger.debug("Creating test jobs")

Expand All @@ -377,7 +355,7 @@ def jobs(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreateResponse]:
Threader(executables).run()


@pytest.fixture(scope="session")
@pytest.fixture
def cluster_policies(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreatePolicyResponse]:
logger.debug("Creating test cluster policies")

Expand Down Expand Up @@ -412,7 +390,7 @@ def cluster_policies(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreatePo
Threader(executables).run()


@pytest.fixture(scope="session")
@pytest.fixture
def clusters(env: EnvironmentInfo, ws: WorkspaceClient) -> list[ClusterDetails]:
logger.debug("Creating test clusters")

Expand Down Expand Up @@ -447,7 +425,7 @@ def clusters(env: EnvironmentInfo, ws: WorkspaceClient) -> list[ClusterDetails]:
logger.debug("Test clusters deleted")


@pytest.fixture(scope="session")
@pytest.fixture
def experiments(ws: WorkspaceClient, env: EnvironmentInfo) -> list[CreateExperimentResponse]:
logger.debug("Creating test experiments")

Expand Down Expand Up @@ -480,7 +458,7 @@ def experiments(ws: WorkspaceClient, env: EnvironmentInfo) -> list[CreateExperim
logger.debug("Test experiments deleted")


@pytest.fixture(scope="session")
@pytest.fixture
def models(ws: WorkspaceClient, env: EnvironmentInfo) -> list[ModelDatabricks]:
logger.debug("Creating models")

Expand Down Expand Up @@ -513,7 +491,7 @@ def models(ws: WorkspaceClient, env: EnvironmentInfo) -> list[ModelDatabricks]:
logger.debug("Test models deleted")


@pytest.fixture(scope="session")
@pytest.fixture
def warehouses(ws: WorkspaceClient, env: EnvironmentInfo) -> list[GetWarehouseResponse]:
logger.debug("Creating warehouses")

Expand Down Expand Up @@ -548,13 +526,13 @@ def warehouses(ws: WorkspaceClient, env: EnvironmentInfo) -> list[GetWarehouseRe
logger.debug("Test warehouses deleted")


@pytest.fixture(scope="session")
@pytest.fixture
def tokens(ws: WorkspaceClient, env: EnvironmentInfo) -> list[AccessControlRequest]:
logger.debug("Adding token-level permissions to groups")

token_permissions = [
AccessControlRequest(group_name=ws_group.display_name, permission_level=PermissionLevel.CAN_USE)
for ws_group, _ in random.sample(env.groups, k=NUM_TEST_TOKENS)
for ws_group, _ in random.sample(env.groups, k=min(len(env.groups), NUM_TEST_TOKENS))
]

ws.permissions.update(
Expand All @@ -566,7 +544,7 @@ def tokens(ws: WorkspaceClient, env: EnvironmentInfo) -> list[AccessControlReque
yield token_permissions


@pytest.fixture(scope="session")
@pytest.fixture
def secret_scopes(ws: WorkspaceClient, env: EnvironmentInfo) -> list[SecretScope]:
logger.debug("Creating test secret scopes")

Expand All @@ -587,7 +565,7 @@ def secret_scopes(ws: WorkspaceClient, env: EnvironmentInfo) -> list[SecretScope
Threader(executables).run()


@pytest.fixture(scope="session")
@pytest.fixture
def workspace_objects(ws: WorkspaceClient, env: EnvironmentInfo) -> WorkspaceObjects:
logger.info(f"Creating test workspace objects under /{env.test_uid}")
ws.workspace.mkdirs(f"/{env.test_uid}")
Expand Down Expand Up @@ -642,7 +620,7 @@ def workspace_objects(ws: WorkspaceClient, env: EnvironmentInfo) -> WorkspaceObj
logger.debug("Test workspace objects deleted")


@pytest.fixture(scope="session")
@pytest.fixture
def verifiable_objects(
clusters,
instance_pools,
Expand Down
35 changes: 11 additions & 24 deletions tests/integration/test_e2e.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,21 +181,8 @@ def test_e2e(
toolkit = GroupMigrationToolkit(config)
toolkit.prepare_environment()

logger.debug("Verifying that the groups were created")

assert len(ws.groups.list(filter=f"displayName sw '{config.groups.backup_group_prefix}{env.test_uid}'")) == len(
toolkit.group_manager.migration_groups_provider.groups
)

assert len(ws.groups.list(filter=f"displayName sw '{env.test_uid}'")) == len(
toolkit.group_manager.migration_groups_provider.groups
)

assert len(toolkit.group_manager._list_account_level_groups(filter=f"displayName sw '{env.test_uid}'")) == len(
toolkit.group_manager.migration_groups_provider.groups
)

for _info in toolkit.group_manager.migration_groups_provider.groups:
group_migration_state = toolkit.group_manager.migration_groups_provider
for _info in group_migration_state.groups:
_ws = ws.groups.get(id=_info.workspace.id)
_backup = ws.groups.get(id=_info.backup.id)
_ws_members = sorted([m.value for m in _ws.members])
Expand All @@ -216,28 +203,28 @@ def test_e2e(
for _objects, id_attribute, request_object_type in verifiable_objects:
_verify_group_permissions(_objects, id_attribute, request_object_type, ws, toolkit, "backup")

_verify_roles_and_entitlements(toolkit.group_manager.migration_groups_provider, ws, "backup")
_verify_roles_and_entitlements(group_migration_state, ws, "backup")

toolkit.replace_workspace_groups_with_account_groups()

new_groups = list(ws.groups.list(filter=f"displayName sw '{env.test_uid}'", attributes="displayName,meta"))
assert len(new_groups) == len(toolkit.group_manager.migration_groups_provider.groups)
new_groups = [
_ for _ in ws.groups.list(attributes="displayName,meta") if group_migration_state.is_in_scope("account", _)
]
assert len(new_groups) == len(group_migration_state.groups)
assert all(g.meta.resource_type == "Group" for g in new_groups)

toolkit.apply_permissions_to_account_groups()

for _objects, id_attribute, request_object_type in verifiable_objects:
_verify_group_permissions(_objects, id_attribute, request_object_type, ws, toolkit, "account")

_verify_roles_and_entitlements(toolkit.group_manager.migration_groups_provider, ws, "account")
_verify_roles_and_entitlements(group_migration_state, ws, "account")

toolkit.delete_backup_groups()

backup_groups = list(
ws.groups.list(
filter=f"displayName sw '{config.groups.backup_group_prefix}{env.test_uid}'", attributes="displayName,meta"
)
)
backup_groups = [
_ for _ in ws.groups.list(attributes="displayName,meta") if group_migration_state.is_in_scope("backup", _)
]
assert len(backup_groups) == 0

toolkit.cleanup_inventory_table()
Loading