-
Notifications
You must be signed in to change notification settings - Fork 107
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Make c9s variant contain c9s content only, no OCP content
This is a first stab at #799, aimed at the c9s variant to start. In this model, the base (container and disk) images we build in the pipeline do not contain any OCP-specific details. The compose is made up purely of RPMs coming out directly from the c9s pungi composes. Let's go over details of this in bullet form: 1. To emphasize the binding to c9s composes, we change the versioning scheme: the version string is now *exactly* the same version as the pungi compose from which we've built (well, we do add a `.N` field because we want to be able to rebuild multiple times on top of the same base pungi compose). It's almost like if our builds are part of the c9s pungi composes directly. (And maybe one day they will be...) This is implemented using a `versionary` script that queries compose info. 2. We no longer include `packages-openshift.yaml`: this has all the OCP stuff that we want to do in a layered build instead. 3. We no longer completely rewrite `/etc/os-release`. The host *is* image-mode CentOS Stream and e.g. `ID` will now say `centos`. However, we do still inject `VARIANT` and `VARIANT_ID` fields to note that it's of the CoreOS kind. We should probably actually match FCOS here and properly add a CoreOS variant in the `centos-release` package. 4. Tests which have to do with the OpenShift layer now have the required tag `openshift`. This means that it'll no longer run in the default set of kola tests. When building the derived image, we will run just those tests using `kola run --tag openshift --oscontainer ...`. Note that to make this work, OCP itself still needs to actually have that derived image containing the OCP bits. For now, we will build this in the pipelines (as a separate artifact that we push to the repos) but the eventual goal is that we'd split that out of the pipeline and have it be more like how the rest of OCP is built (using Prow/OSBS/Konflux). Note also we don't currently build the c9s variant in the pipelines but this is a long time overdue IMO.
- Loading branch information
Showing
5 changed files
with
157 additions
and
92 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,134 @@ | ||
#!/usr/bin/python3 -u | ||
|
||
# This script requires the yum repos used by our compose to be direct output | ||
# from pungi composes. | ||
|
||
import configparser | ||
import dnf | ||
import json | ||
import os | ||
import re | ||
import requests | ||
import subprocess | ||
import sys | ||
import yaml | ||
|
||
from urllib.parse import urlparse, urlunparse | ||
|
||
|
||
def main(): | ||
manifest_path = get_manifest_path() | ||
manifest = get_flattened_manifest(manifest_path) | ||
base = get_dnf_base('src/config') | ||
repoids = manifest['repos'] | ||
|
||
# get base compose URL, verifying at the same time that all the repos point | ||
# into the same pungi compose | ||
compose_url = None | ||
for id in repoids: | ||
repo_compose_url = get_compose_url(base.repos[id]) | ||
if compose_url is None: | ||
compose_url = repo_compose_url | ||
else: | ||
if compose_url != repo_compose_url: | ||
raise Exception(f"inconsistent compose URLs detected: {compose_url} vs {repo_compose_url}") | ||
|
||
config = configparser.ConfigParser() | ||
config.read_string(requests.get(compose_url + '/.composeinfo').text) | ||
|
||
composeid = config['product']['name'].removeprefix(config['product']['short'] + '-') | ||
eprint(f"composeid: {composeid}") | ||
|
||
next_iteration = get_next_iteration(composeid) | ||
|
||
# XXX: we should verify before leaving that the cached rpmmd repos we have | ||
# are from the compose we found to rule out race conditions between `cosa | ||
# fetch` and `requests.get()` above (e.g. compare the sha256 of repomd.xml) | ||
|
||
print(f"{composeid}.{next_iteration}") | ||
|
||
|
||
def get_compose_url(repo): | ||
assert len(repo.baseurl) == 1 | ||
url = urlparse(repo.baseurl[0]) | ||
path = url.path.rstrip('/') | ||
components = path.split('/') | ||
assert components[-1] == 'os' | ||
# go up three levels since the structure is e.g. .../compose/<variant>/ | ||
# <arch>/os and we want to get to .../compose | ||
return urlunparse(url._replace(path='/'.join(components[:-3]))) | ||
|
||
|
||
def get_dnf_base(basedir): | ||
base = dnf.Base() | ||
base.conf.reposdir = basedir | ||
base.read_all_repos() | ||
return base | ||
|
||
|
||
def setup_repos(base, treefile): | ||
for repo in base.repos.values(): | ||
repo.disable() | ||
|
||
eprint("Enabled repos:") | ||
for repo in treefile['repos']: | ||
base.repos[repo].enable() | ||
eprint(f"- {repo}") | ||
|
||
|
||
def get_next_iteration(composeid): | ||
try: | ||
with open('builds/builds.json') as f: | ||
builds = json.load(f) | ||
except FileNotFoundError: | ||
builds = {'builds': []} | ||
|
||
if len(builds['builds']) == 0: | ||
eprint("n: 0 (no previous builds)") | ||
return 0 | ||
|
||
last_buildid = builds['builds'][0]['id'] | ||
last_version = parse_version(last_buildid) | ||
if not last_version: | ||
eprint(f"n: 0 (previous version {last_buildid} does not match scheme)") | ||
return 0 | ||
|
||
last_composeid, last_iteration = last_version | ||
|
||
if composeid != last_composeid: | ||
eprint(f"n: 0 (previous version {last_buildid} compose ID does not match)") | ||
return 0 | ||
|
||
n = last_iteration + 1 | ||
eprint(f"n: {n} (incremented from previous version {last_buildid})") | ||
return n | ||
|
||
|
||
def parse_version(version): | ||
m = re.match(r'^([0-9]+-[0-9]{8}\.[0-9]+)\.([0-9]+)$', version) | ||
if m is None: | ||
return None | ||
composeid, iteration = m.groups() | ||
return tuple((composeid, int(iteration))) | ||
|
||
|
||
def get_manifest_path(): | ||
if os.path.exists('src/config.json'): | ||
config_json = json.load(open('src/config.json')) | ||
variant = config_json['coreos-assembler.config-variant'] | ||
return f'src/config/manifest-{variant}.yaml' | ||
return 'src/config/manifest.yaml' | ||
|
||
|
||
def get_flattened_manifest(fn): | ||
return yaml.safe_load( | ||
subprocess.check_output(['rpm-ostree', 'compose', 'tree', | ||
'--print-only', fn])) | ||
|
||
|
||
def eprint(*args): | ||
print(*args, file=sys.stderr) | ||
|
||
|
||
if __name__ == "__main__": | ||
sys.exit(main()) |