diff --git a/MANIFEST.in b/MANIFEST.in index f0ed3bdcf..07608d8c7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -22,6 +22,7 @@ exclude contrib/opentimelineio_contrib/adapters/Makefile exclude Makefile exclude */.DS_Store exclude .clang-format +exclude OTIO_VERSION.json global-exclude *.pyc prune maintainers diff --git a/Makefile b/Makefile index b64fd167c..6559e5b42 100644 --- a/Makefile +++ b/Makefile @@ -33,6 +33,10 @@ CLANG_FORMAT_PROG := $(shell command -v clang-format 2> /dev/null) # AUTOPEP8_PROG := $(shell command -v autopep8 2> /dev/null) TEST_ARGS= +GIT = git +GITSTATUS := $(shell git diff-index --quiet HEAD . 1>&2 2> /dev/null; echo $$?) + + ifeq ($(VERBOSE), 1) TEST_ARGS:=-v endif @@ -187,6 +191,7 @@ version-map: @python src/py-opentimelineio/opentimelineio/console/autogen_version_map.py -i src/opentimelineio/CORE_VERSION_MAP.last.cpp --dryrun version-map-update: + @echo "updating the CORE_VERSION_MAP..." @python src/py-opentimelineio/opentimelineio/console/autogen_version_map.py -i src/opentimelineio/CORE_VERSION_MAP.last.cpp -o src/opentimelineio/CORE_VERSION_MAP.cpp # generate documentation in html @@ -198,3 +203,95 @@ doc-cpp: @cd doxygen ; doxygen config/dox_config ; cd .. @echo "wrote doxygen output to: doxygen/output/html/index.html" +# release related targets +confirm-release-intent: +ifndef OTIO_DO_RELEASE + $(error \ + "If you are sure you want to perform a release, set OTIO_DO_RELEASE=1") +endif + @echo "Starting release process..." + +check-git-status: +ifneq ($(GITSTATUS), 0) + $(error \ + "Git repository is dirty, cannot create release. Run 'git status' \ + for more info") +endif + @echo "Git status is clean, ready to proceed with release." + +verify-license: + @echo "Verifying licenses in files..." + @python maintainers/verify_license.py -s . + +fix-license: + @python maintainers/verify_license.py -s . -f + +freeze-ci-versions: + @echo "freezing CI versions..." + @python maintainers/freeze_ci_versions.py -f + +unfreeze-ci-versions: + @echo "unfreezing CI versions..." + @python maintainers/freeze_ci_versions.py -u + +# needs to happen _before_ version-map-update so that version in +# CORE_VERSION_MAP does not have the .dev1 suffix at release time +remove-dev-suffix: + @echo "Removing .dev1 suffix" + @python maintainers/remove_dev_suffix.py -r + +check-github-token: +ifndef OTIO_RELEASE_GITHUB_TOKEN + $(error \ + OTIO_RELEASE_GITHUB_TOKEN is not set, unable to update contributors) +endif + +update-contributors: check-github-token + @echo "Updating CONTRIBUTORS.md..." + @python maintainers/fetch_contributors.py \ + --repo AcademySoftwareFoundation/OpenTimelineIO \ + --token $(OTIO_RELEASE_GITHUB_TOKEN) + +dev-python-install: + @python setup.py install + +# make target for preparing a release candidate +release: \ + confirm-release-intent \ + check-git-status \ + check-github-token \ + verify-license \ + freeze-ci-versions \ + remove-dev-suffix \ + format \ + dev-python-install \ + version-map-update \ + test-core \ + update-contributors + @echo "Release is ready. Commit, push and open a PR!" + +# targets for creating a new version (after making a release, to start the next +# development cycle) +bump-otio-minor-version: + @python maintainers/bump_version_number.py -i minor + +shuffle-core-version-map: + @cp -f src/opentimelineio/CORE_VERSION_MAP.cpp \ + src/opentimelineio/CORE_VERSION_MAP.last.cpp + @echo "set the current version map as the next one" + +add-dev-suffix: + @echo "Adding .dev1 suffix" + @python maintainers/remove_dev_suffix.py -a + +# make target for starting a new version (after a release is completed) +start-dev-new-minor-version: \ + check-git-status \ + unfreeze-ci-versions \ + bump-otio-minor-version \ + shuffle-core-version-map \ + add-dev-suffix \ + dev-python-install \ + version-map-update \ + test-core + @echo "New version made. Commit, push and open a PR!" diff --git a/OTIO_VERSION.json b/OTIO_VERSION.json new file mode 100644 index 000000000..bdef9208a --- /dev/null +++ b/OTIO_VERSION.json @@ -0,0 +1 @@ +{"version": ["0", "15", "0"]} diff --git a/examples/sample_plugin/otio_counter/__init__.py b/examples/sample_plugin/otio_counter/__init__.py index 0d0c0ac60..738f673af 100644 --- a/examples/sample_plugin/otio_counter/__init__.py +++ b/examples/sample_plugin/otio_counter/__init__.py @@ -1,3 +1,5 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright Contributors to the OpenTimelineIO project import pkg_resources from opentimelineio.plugins import manifest diff --git a/examples/sample_plugin/otio_counter/adapter.py b/examples/sample_plugin/otio_counter/adapter.py index 0608db011..7c9a422cb 100644 --- a/examples/sample_plugin/otio_counter/adapter.py +++ b/examples/sample_plugin/otio_counter/adapter.py @@ -1,3 +1,5 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright Contributors to the OpenTimelineIO project import opentimelineio as otio """ diff --git a/examples/sample_plugin/setup.py b/examples/sample_plugin/setup.py index 25ee2ed4c..d7f925738 100644 --- a/examples/sample_plugin/setup.py +++ b/examples/sample_plugin/setup.py @@ -1,3 +1,5 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright Contributors to the OpenTimelineIO project from setuptools import setup """ diff --git a/maintainers/bump_version_number.py b/maintainers/bump_version_number.py new file mode 100644 index 000000000..98527aec4 --- /dev/null +++ b/maintainers/bump_version_number.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Contributors to the OpenTimelineIO project + +__doc__ = """Manage and apply the version in the OTIO_VERSION.json file""" + +import argparse +import sys +import json + +OTIO_VERSION_JSON_PATH = "OTIO_VERSION.json" + + +def version(): + with open(OTIO_VERSION_JSON_PATH, 'r') as fi: + return json.load(fi)['version'] + + +def _parsed_args(): + parser = argparse.ArgumentParser( + description='Fetch a list of contributors for a given GitHub repo.' + ) + + op_grp = parser.add_mutually_exclusive_group(required=True) + op_grp.add_argument( + "-i", + "--increment", + type=str, + default=None, + choices=("major", "minor", "patch"), + help="Increment either the major or minor version number." + ) + op_grp.add_argument( + "-s", + "--set", + type=str, + default=None, + nargs=3, + help="Set the version string, in the form of MAJOR MINOR PATCH" + ) + op_grp.add_argument( + "-q", + "--query", + default=False, + action="store_true", + help="Query/print the current version without changing it" + ) + parser.add_argument( + "-d", + "--dryrun", + default=False, + action="store_true", + help="Perform actions but modify no files on disk." + ) + return parser.parse_args() + + +def main(): + args = _parsed_args() + + major, minor, patch = (int(v) for v in version()) + + if args.increment == "major": + major += 1 + minor = 0 + patch = 0 + elif args.increment == "minor": + minor += 1 + patch = 0 + elif args.increment == "patch": + patch += 1 + elif args.set: + major, minor, patch = args.set + elif args.query: + print(".".join(str(v) for v in (major, minor, patch))) + return + + print("Setting version to: {}.{}.{}".format(major, minor, patch)) + + # update the OTIO_VERSION file + with open(OTIO_VERSION_JSON_PATH, "w") as fo: + fo.write( + json.dumps({"version": [str(v) for v in (major, minor, patch)]}) + ) + print("Updated {}".format(OTIO_VERSION_JSON_PATH)) + + # update the CMakeLists.txt + with open("CMakeLists.txt", 'r') as fi: + cmake_input = fi.read() + + cmake_output = [] + key_map = {"MAJOR": major, "MINOR": minor, "PATCH": patch} + for ln in cmake_input.split("\n"): + for label, new_value in key_map.items(): + if "set(OTIO_VERSION_{} \"".format(label) in ln: + cmake_output.append( + "set(OTIO_VERSION_{} \"{}\")".format(label, new_value) + ) + break + else: + cmake_output.append(ln) + + with open("CMakeLists.txt", 'w') as fo: + fo.write("\n".join(cmake_output)) + print("Updated {}".format("CMakeLists.txt")) + + # update the setup.py + with open("setup.py", 'r') as fi: + setup_input = fi.read() + + setup_output = [] + for ln in setup_input.split("\n"): + if "\"version\": " in ln: + + setup_output.append( + " \"version\": \"{}.{}.{}{}\",".format( + major, + minor, + patch, + (".dev1" in ln) and ".dev1" or "" + ) + ) + else: + setup_output.append(ln) + + with open("setup.py", 'w') as fo: + fo.write("\n".join(setup_output)) + print("Updated {}".format("setup.py")) + + +def add_suffix(content, version): + if version not in content: + sys.stderr.write( + "Version {} not found, suffix may have already been " + "added.\n".format(version) + ) + return False + + print("adding suffix, version will be: {}".format(version + ".dev1")) + content.replace(version, version + ".dev1") + return True + + +def remove_suffix(content, version): + if version + '.dev1' not in content: + sys.stderr.write( + "Version+Suffix {} not found, suffix may have already been " + "removed.\n".format(version + '.dev1') + ) + return False + + content.replace(version + ' .dev1', version) + return True + + +if __name__ == "__main__": + main() diff --git a/maintainers/fetch_contributors.py b/maintainers/fetch_contributors.py index 22851059c..1a055b1c2 100755 --- a/maintainers/fetch_contributors.py +++ b/maintainers/fetch_contributors.py @@ -6,6 +6,9 @@ import argparse import json import urllib.request +import os + +CONTRIBUTORS_FILE = "CONTRIBUTORS.md" def parse_args(): @@ -20,7 +23,8 @@ def parse_args(): ) parser.add_argument( '--token', - required=True, + required=False, + default=None, help='GitHub personal access token, used for authorization.' ' Get one here: https://github.com/settings/tokens/new' ) @@ -30,6 +34,13 @@ def parse_args(): def main(): args = parse_args() + token = args.token or os.environ.get("OTIO_RELEASE_GITHUB_TOKEN") + if not token: + raise RuntimeError( + "Error: a github token is required to run {}. Either pass it in " + "via --token or set $OTIO_RELEASE_GITHUB_TOKEN".format(__file__) + ) + # Note: un-authenticated requests have a strict rate limit. # We avoid this by using authentication for all our requests, # even the ones that don't need it. @@ -43,23 +54,32 @@ def main(): # response = urllib.request.urlopen(request).read().decode('utf-8') # print("Rate limit: {}".format(response)) + with open(CONTRIBUTORS_FILE, 'r') as fi: + input_contributors = fi.read() + request = urllib.request.Request( "https://api.github.com/repos/{}/stats/contributors".format(args.repo), headers={"Authorization": "token {}".format(args.token)} ) response = urllib.request.urlopen(request).read().decode('utf-8') - contributors = json.loads(response) + # this just ensures that response is really waited on so that json.loads + # works + print("Response size: {}".format(len(response))) + + contributors = json.loads(response[:]) output_lines = [] if not contributors: print("No contributors found, something likely went wrong.") + print(response) for contributor in contributors: login = contributor['author']['login'] url = contributor['author']['html_url'] + total = contributor['total'] request = urllib.request.Request( "https://api.github.com/users/{}".format(login), @@ -70,10 +90,45 @@ def main(): user = json.loads(response) name = user['name'] or "?" - # Print the output in markdown format - output_lines.append("* {} ([{}]({}))".format(name, login, url)) + if ( + login not in input_contributors + and name not in input_contributors + and "?" not in name + ): + print("Missing: {} [{}] # commits: {}".format(name, login, total)) + + # Print the output in markdown format + output_lines.append("* {} ([{}]({}))".format(name, login, url)) + + if output_lines: + # split the input_contributors into preamble and contributors list + split_contribs = input_contributors.split('\n') + + header = [] + body = [] + in_body = False + for ln in split_contribs: + if not in_body and ln.startswith("* "): + in_body = True + + if not in_body: + header.append(ln) + continue + + if ln.strip(): + body.append(ln) + + body.extend(output_lines) + body.sort(key=lambda v: v.lower()) + + result = '\n'.join(header + body) + + with open(CONTRIBUTORS_FILE, 'w') as fo: + fo.write(result) + else: + print("All contributors present in {}".format(CONTRIBUTORS_FILE)) - print("\n".join(sorted(output_lines, key=str.casefold))) + # print("\n".join(sorted(output_lines, key=str.casefold))) if __name__ == '__main__': diff --git a/maintainers/freeze_ci_versions.py b/maintainers/freeze_ci_versions.py new file mode 100644 index 000000000..53063784b --- /dev/null +++ b/maintainers/freeze_ci_versions.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Contributors to the OpenTimelineIO project + +__doc__ = """Freeze and unfreeze image versions for CI, part of the release +process. + +""" + +import argparse +import re +import urllib.request + +CI_WORKFLOW_FP = ".github/workflows/python-package.yml" +GITHUB_README_URL = ( + "https://raw.githubusercontent.com/actions/runner-images/main/README.md" +) +PLATFORMS = ["ubuntu", "macos", "windows"] + + +def _parsed_args(): + parser = argparse.ArgumentParser( + description='Fetch a list of contributors for a given GitHub repo.' + ) + + op_grp = parser.add_mutually_exclusive_group(required=True) + op_grp.add_argument( + "-f", + "--freeze", + default=False, + action="store_true", + help="freeze the ci version from latest to their version." + ) + op_grp.add_argument( + "-u", + "--unfreeze", + default=False, + action="store_true", + help="unfreeze the ci version from the version back to latest." + ) + parser.add_argument( + "-d", + "--dryrun", + default=False, + action="store_true", + help="Perform actions but modify no files on disk." + ) + return parser.parse_args() + + +def main(): + args = _parsed_args() + + request = urllib.request.Request(GITHUB_README_URL) + response = urllib.request.urlopen(request).read().decode('utf-8') + + # HACK: pull the image version corresponding to -latest out of the + # README.md for the github repo where they are stored + lines = response.split("\n") + plat_map = {} + for plat in PLATFORMS: + plat_latest = plat + "-latest" + for ln in lines: + if plat_latest not in ln: + continue + plat_map[plat] = ( + re.match(".*(" + plat + "-.*)`.*", ln).groups(0)[0] + ) + + if args.freeze: + freeze_ci(plat_map, args.dryrun) + + if args.unfreeze: + unfreeze_ci(plat_map, args.dryrun) + + +def freeze_ci(plat_map, dryrun=False): + modified = False + with open(CI_WORKFLOW_FP, 'r') as fi: + output_content = fi.read() + + for plat in plat_map: + plat_latest = plat + "-latest" + if plat_latest not in output_content: + print("Platform {} appears to already be frozen.".format(plat)) + continue + + output_content = output_content.replace(plat_latest, plat_map[plat]) + modified = True + print("Platform {} frozen to version: {}".format(plat, plat_map[plat])) + + if modified and not dryrun: + with open(CI_WORKFLOW_FP, 'w') as fo: + fo.write(output_content) + return True + + return False + + +def unfreeze_ci(plat_map, dryrun=False): + modified = False + with open(CI_WORKFLOW_FP, 'r') as fi: + output_content = fi.read() + + for plat, plat_current in plat_map.items(): + plat_latest = plat + "-latest" + if plat_current not in output_content: + print( + "Platform {} appears to already be set to -latest.".format( + plat + ) + ) + continue + + output_content = output_content.replace(plat_current, plat_latest) + modified = True + print("Platform {} unfrozen back to: {}".format(plat, plat_latest)) + + if modified and not dryrun: + with open(CI_WORKFLOW_FP, 'w') as fo: + fo.write(output_content) + return True + + return False + + +if __name__ == "__main__": + main() diff --git a/maintainers/remove_dev_suffix.py b/maintainers/remove_dev_suffix.py new file mode 100644 index 000000000..082fbcb1e --- /dev/null +++ b/maintainers/remove_dev_suffix.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python +# +# SPDX-License-Identifier: Apache-2.0 +# Copyright Contributors to the OpenTimelineIO project + +__doc__ = """Strip or add the .dev1 suffix, part of the release process""" + +import argparse +import sys + +TARGET_FILES = [ + "setup.py" +] + + +def _parsed_args(): + parser = argparse.ArgumentParser( + description='Fetch a list of contributors for a given GitHub repo.' + ) + + op_grp = parser.add_mutually_exclusive_group(required=True) + op_grp.add_argument( + "-a", + "--add", + default=False, + action="store_true", + help="add the dev1 suffix to the version" + ) + op_grp.add_argument( + "-r", + "--remove", + default=False, + action="store_true", + help="remove the dev1 suffix to the version" + ) + parser.add_argument( + "-d", + "--dryrun", + default=False, + action="store_true", + help="Perform actions but modify no files on disk." + ) + return parser.parse_args() + + +def _target_version(): + import bump_version_number + return ".".join(bump_version_number.version()) + + +def main(): + args = _parsed_args() + + version = _target_version() + + for fp in TARGET_FILES: + with open(fp, 'r') as fi: + content = fi.read() + + if args.add: + modified = add_suffix(content, version) + elif args.remove: + modified = remove_suffix(content, version) + + if modified and not args.dryrun: + with open(fp, 'w') as fo: + fo.write(modified) + print("Wrote modified {}.".format(fp)) + + +def add_suffix(content, version): + if version not in content: + sys.stderr.write( + "Version {} not found, suffix may have already been " + "added.\n".format(version) + ) + return False + + print("adding suffix, version will be: {}".format(version + ".dev1")) + return content.replace(version, version + ".dev1") + + +def remove_suffix(content, version): + if version + '.dev1' not in content: + sys.stderr.write( + "Version+Suffix {} not found, suffix may have already been " + "removed.\n".format(version + '.dev1') + ) + return False + + print("removing suffix, version will be: {}".format(version)) + return content.replace(version + '.dev1', version) + + +if __name__ == "__main__": + main() diff --git a/maintainers/verify_license.py b/maintainers/verify_license.py index 4767bed61..12818f142 100755 --- a/maintainers/verify_license.py +++ b/maintainers/verify_license.py @@ -6,10 +6,11 @@ __doc__ = """The utility script checks to make sure that all of the source files in the OpenTimelineIO project have the correct license header.""" +import argparse import os import sys -licenses = { +LICENSES = { ".py": """# SPDX-License-Identifier: Apache-2.0 # Copyright Contributors to the OpenTimelineIO project """, @@ -30,31 +31,108 @@ """ } -yes = 0 -no = 0 -total = 0 - -for root, dirs, files in os.walk("."): - for filename in files: - fullpath = os.path.join(root, filename) - for ext, license in licenses.items(): - if filename.endswith(ext): - total += 1 - try: - content = open(fullpath, 'r').read() - except Exception as ex: - print("ERROR: Unable to read file: {}\n{}".format( - fullpath, ex)) - # raise - continue - if len(content) > 10 and license not in content: - print("MISSING: {}".format(fullpath)) - no += 1 - else: - yes += 1 - -print("{} of {} files have the correct license.".format(yes, total)) - -if no != 0: - print("ERROR: {} files do NOT have the correct license.".format(no)) - sys.exit(1) +# dependencies and build dir do not need to be checked +SKIP_DIRS = [ + os.path.join("src", "deps"), + "build", + ".git", +] + + +def _parsed_args(): + """ parse commandline arguments with argparse """ + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + '-s', + '--start-dir', + default='.', + type=str, + help=("Directory to start searching for files in.") + ) + parser.add_argument( + '-f', + '--fix', + default=False, + action="store_true", + help="Fix licenses in place when possible" + ) + + return parser.parse_args() + + +def main(): + correct_license = 0 + incorrect_license = 0 + total = 0 + + args = _parsed_args() + + for root, dirs, files in os.walk(args.start_dir): + for filename in files: + # make sure the dependencies aren't checked + if any(d in root for d in SKIP_DIRS): + continue + fullpath = os.path.join(root, filename) + for ext, lic in LICENSES.items(): + if filename.endswith(ext): + total += 1 + try: + content = open(fullpath, 'r').read() + except Exception as ex: + sys.stderr.write( + "ERROR: Unable to read file: {}\n{}".format( + fullpath, + ex + ) + ) + continue + + if len(content) > 0 and lic not in content: + print("MISSING: {}".format(os.path.relpath(fullpath))) + if args.fix: + content = LICENSES[os.path.splitext(fullpath)[1]] + with open(fullpath, 'r') as fi: + content += fi.read() + with open(fullpath, 'w') as fo: + fo.write(content) + print( + "...FIXED: {}".format( + os.path.relpath(fullpath) + ) + ) + incorrect_license += 1 + else: + correct_license += 1 + + print( + "{} of {} files have the correct license.".format( + correct_license, + total + ) + ) + + if incorrect_license != 0: + if not args.fix: + raise RuntimeError( + "ERROR: {} files do NOT have the correct license.\n".format( + incorrect_license + ) + ) + else: + print( + "{} files had the correct license added.".format( + incorrect_license + ) + ) + + +if __name__ == "__main__": + try: + main() + except RuntimeError as err: + sys.stderr.write(err.args[0]) + sys.exit(1)