Skip to content

Commit

Permalink
use explicit --mount with types of mounts rather than --volume flags (#…
Browse files Browse the repository at this point in the history
…23982)

The --volume flag is an old style of specifying mounts used by docker,
the newer and more explicit version is --mount where you have to
specify type, source, destination in the form of key/value pairs.

This is more explicit and avoids some guesswork when volumes are
mounted (for example seems that on WSL2 volume name might be
guessed as path wrongly). The change explicitly specifies which
of the mounts are bind mounts and which are volume mounts.

Another nice side effect of this change is that when source is
missing, docker will not automatically create directories with the
missing name but it will fail. This is nicer because before it
led to creating directories when they were missing (for example
.bash_aliases and similar). This allows us to avoid some cleanups
to account for those files being created - instead we simply
skip those mounts if the file/folder does not exist.

(cherry picked from commit 4936764)
  • Loading branch information
potiuk authored and ephraimbuddy committed May 30, 2022
1 parent 312b7f3 commit 8d522e1
Show file tree
Hide file tree
Showing 6 changed files with 56 additions and 56 deletions.
2 changes: 0 additions & 2 deletions breeze-legacy
Original file line number Diff line number Diff line change
Expand Up @@ -2595,8 +2595,6 @@ breeze::check_and_save_all_params

initialization::make_constants_read_only

sanity_checks::sanitize_mounted_files

breeze::prepare_command_files

breeze::run_build_command
Expand Down
85 changes: 47 additions & 38 deletions dev/breeze/src/airflow_breeze/utils/docker_command_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,36 +65,36 @@
)

NECESSARY_HOST_VOLUMES = [
"/.bash_aliases:/root/.bash_aliases:cached",
"/.bash_history:/root/.bash_history:cached",
"/.coveragerc:/opt/airflow/.coveragerc:cached",
"/.dockerignore:/opt/airflow/.dockerignore:cached",
"/.flake8:/opt/airflow/.flake8:cached",
"/.github:/opt/airflow/.github:cached",
"/.inputrc:/root/.inputrc:cached",
"/.rat-excludes:/opt/airflow/.rat-excludes:cached",
"/RELEASE_NOTES.rst:/opt/airflow/RELEASE_NOTES.rst:cached",
"/LICENSE:/opt/airflow/LICENSE:cached",
"/MANIFEST.in:/opt/airflow/MANIFEST.in:cached",
"/NOTICE:/opt/airflow/NOTICE:cached",
"/airflow:/opt/airflow/airflow:cached",
"/provider_packages:/opt/airflow/provider_packages:cached",
"/dags:/opt/airflow/dags:cached",
"/dev:/opt/airflow/dev:cached",
"/docs:/opt/airflow/docs:cached",
"/hooks:/opt/airflow/hooks:cached",
"/logs:/root/airflow/logs:cached",
"/pyproject.toml:/opt/airflow/pyproject.toml:cached",
"/pytest.ini:/opt/airflow/pytest.ini:cached",
"/scripts:/opt/airflow/scripts:cached",
"/scripts/docker/entrypoint_ci.sh:/entrypoint:cached",
"/setup.cfg:/opt/airflow/setup.cfg:cached",
"/setup.py:/opt/airflow/setup.py:cached",
"/tests:/opt/airflow/tests:cached",
"/kubernetes_tests:/opt/airflow/kubernetes_tests:cached",
"/docker_tests:/opt/airflow/docker_tests:cached",
"/chart:/opt/airflow/chart:cached",
"/metastore_browser:/opt/airflow/metastore_browser:cached",
(".bash_aliases", "/root/.bash_aliases"),
(".bash_history", "/root/.bash_history"),
(".coveragerc", "/opt/airflow/.coveragerc"),
(".dockerignore", "/opt/airflow/.dockerignore"),
(".flake8", "/opt/airflow/.flake8"),
(".github", "/opt/airflow/.github"),
(".inputrc", "/root/.inputrc"),
(".rat-excludes", "/opt/airflow/.rat-excludes"),
("RELEASE_NOTES.rst", "/opt/airflow/RELEASE_NOTES.rst"),
("LICENSE", "/opt/airflow/LICENSE"),
("MANIFEST.in", "/opt/airflow/MANIFEST.in"),
("NOTICE", "/opt/airflow/NOTICE"),
("airflow", "/opt/airflow/airflow"),
("provider_packages", "/opt/airflow/provider_packages"),
("dags", "/opt/airflow/dags"),
("dev", "/opt/airflow/dev"),
("docs", "/opt/airflow/docs"),
("hooks", "/opt/airflow/hooks"),
("logs", "/root/airflow/logs"),
("pyproject.toml", "/opt/airflow/pyproject.toml"),
("pytest.ini", "/opt/airflow/pytest.ini"),
("scripts", "/opt/airflow/scripts"),
("scripts/docker/entrypoint_ci.sh", "/entrypoint"),
("setup.cfg", "/opt/airflow/setup.cfg"),
("setup.py", "/opt/airflow/setup.py"),
("tests", "/opt/airflow/tests"),
("kubernetes_tests", "/opt/airflow/kubernetes_tests"),
("docker_tests", "/opt/airflow/docker_tests"),
("chart", "/opt/airflow/chart"),
("metastore_browser", "/opt/airflow/metastore_browser"),
]


Expand All @@ -117,17 +117,26 @@ def get_extra_docker_flags(mount_sources: str) -> List[str]:
"""
extra_docker_flags = []
if mount_sources == MOUNT_ALL:
extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}:/opt/airflow/:cached"])
extra_docker_flags.extend(["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT},dst=/opt/airflow/"])
elif mount_sources == MOUNT_SELECTED:
for flag in NECESSARY_HOST_VOLUMES:
extra_docker_flags.extend(["-v", str(AIRFLOW_SOURCES_ROOT) + flag])
extra_docker_flags.extend(['-v', "docker-compose_mypy-cache-volume:/opt/airflow/.mypy_cache/"])
for (src, dst) in NECESSARY_HOST_VOLUMES:
if (AIRFLOW_SOURCES_ROOT / src).exists():
extra_docker_flags.extend(
["--mount", f'type=bind,src={AIRFLOW_SOURCES_ROOT / src},dst={dst}']
)
extra_docker_flags.extend(
['--mount', "type=volume,src=docker-compose_mypy-cache-volume,dst=/opt/airflow/.mypy_cache"]
)
else: # none
extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT / 'empty'}:/opt/airflow/airflow"])
extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}/files:/files"])
extra_docker_flags.extend(["-v", f"{AIRFLOW_SOURCES_ROOT}/dist:/dist"])
extra_docker_flags.extend(
["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT / 'empty'},dst=/opt/airflow/airflow"]
)
extra_docker_flags.extend(["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT / 'files'},dst=/files"])
extra_docker_flags.extend(["--mount", f"type=bind,src={AIRFLOW_SOURCES_ROOT / 'dist'},dst=/dist"])
extra_docker_flags.extend(["--rm"])
extra_docker_flags.extend(["--env-file", f"{AIRFLOW_SOURCES_ROOT}/scripts/ci/docker-compose/_docker.env"])
extra_docker_flags.extend(
["--env-file", f"{AIRFLOW_SOURCES_ROOT / 'scripts' / 'ci' / 'docker-compose' / '_docker.env' }"]
)
return extra_docker_flags


Expand Down
4 changes: 3 additions & 1 deletion dev/breeze/src/airflow_breeze/utils/path_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,10 +237,11 @@ def find_airflow_sources_root_to_operate_on() -> Path:
return airflow_sources


AIRFLOW_SOURCES_ROOT = find_airflow_sources_root_to_operate_on()
AIRFLOW_SOURCES_ROOT = find_airflow_sources_root_to_operate_on().resolve()
BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / '.build'
FILES_DIR = AIRFLOW_SOURCES_ROOT / 'files'
MSSQL_DATA_VOLUME = AIRFLOW_SOURCES_ROOT / 'tmp_mssql_volume'
KUBE_DIR = AIRFLOW_SOURCES_ROOT / ".kube"
LOGS_DIR = AIRFLOW_SOURCES_ROOT / 'logs'
DIST_DIR = AIRFLOW_SOURCES_ROOT / 'dist'
SCRIPTS_CI_DIR = AIRFLOW_SOURCES_ROOT / 'scripts' / 'ci'
Expand All @@ -258,6 +259,7 @@ def create_directories() -> None:
BUILD_CACHE_DIR.mkdir(parents=True, exist_ok=True)
FILES_DIR.mkdir(parents=True, exist_ok=True)
MSSQL_DATA_VOLUME.mkdir(parents=True, exist_ok=True)
KUBE_DIR.mkdir(parents=True, exist_ok=True)
LOGS_DIR.mkdir(parents=True, exist_ok=True)
DIST_DIR.mkdir(parents=True, exist_ok=True)
OUTPUT_LOG.mkdir(parents=True, exist_ok=True)
2 changes: 1 addition & 1 deletion dev/breeze/tests/test_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_get_extra_docker_flags_all():
def test_get_extra_docker_flags_selected():
flags = get_extra_docker_flags(MOUNT_SELECTED)
assert "empty" not in "".join(flags)
assert len(flags) > 60
assert len(flags) > 40


def test_get_extra_docker_flags_none():
Expand Down
5 changes: 5 additions & 0 deletions scripts/ci/libraries/_initialization.sh
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,10 @@ function initialization::create_directories() {
export BUILD_CACHE_DIR="${AIRFLOW_SOURCES}/.build"
readonly BUILD_CACHE_DIR

# Directory where Kind credential information is kept
export KUBE_CACHE_DIR="${AIRFLOW_SOURCES}/.kube"
readonly KUBE_CACHE_DIR

# In case of tmpfs backend for docker, mssql fails because TMPFS does not support
# O_DIRECT parameter for direct writing to the filesystem
# https://github.com/microsoft/mssql-docker/issues/13
Expand All @@ -55,6 +59,7 @@ function initialization::create_directories() {
mkdir -p "${BUILD_CACHE_DIR}" >/dev/null
mkdir -p "${FILES_DIR}" >/dev/null
mkdir -p "${MSSQL_DATA_VOLUME}" >/dev/null
mkdir -p "${KUBE_CACHE_DIR}" >/dev/null
# MSSQL 2019 runs with non-root user by default so we have to make the volumes world-writeable
# This is a bit scary and we could get by making it group-writeable but the group would have
# to be set to "root" (GID=0) for the volume to work and this cannot be accomplished without sudo
Expand Down
14 changes: 0 additions & 14 deletions scripts/ci/libraries/_sanity_checks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,6 @@ function sanity_checks::sanitize_file() {
touch "${1}"
}

# Those files are mounted into container when run locally
# .bash_history is preserved and you can modify .bash_aliases and .inputrc
# according to your liking
function sanity_checks::sanitize_mounted_files() {
sanity_checks::sanitize_file "${AIRFLOW_SOURCES}/.bash_history"
sanity_checks::sanitize_file "${AIRFLOW_SOURCES}/.bash_aliases"
sanity_checks::sanitize_file "${AIRFLOW_SOURCES}/.inputrc"

# When KinD cluster is created, the folder keeps authentication information
# across sessions
mkdir -p "${AIRFLOW_SOURCES}/.kube" >/dev/null 2>&1
}

#
# Creates cache directory where we will keep temporary files needed for the docker build
#
Expand Down Expand Up @@ -150,5 +137,4 @@ function sanity_checks::basic_sanity_checks() {
initialization::set_default_python_version_if_empty
sanity_checks::go_to_airflow_sources
sanity_checks::check_if_coreutils_installed
sanity_checks::sanitize_mounted_files
}

0 comments on commit 8d522e1

Please sign in to comment.