-
Notifications
You must be signed in to change notification settings - Fork 2k
/
update_versions.py
327 lines (294 loc) · 18.2 KB
/
update_versions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# Python version 3.4 or higher is required to run this script.
# Use case: Update all the versions in README.md and pom.xml files based on
# the versions in versions_[client|data|management].txt, external_dependencies.txt
#
# It's worth noting that there are 3 update types, library, external_dependencies and all. 'All' means update both the libraries
# for the track selected as well as the external_dependencies.
#
# python eng/versioning/update_versions.py --update-type [library|external_dependency|all] --build-type [client|data|management]
# For example: To update the library versions for the client track without touching the README files
# python eng/versioning/update_versions.py --ut library --bt client --sr
#
# Use case: Update the versions in a particular file
#
# python utilities/update_versions.py --update-type [library|external_dependency|all] -build-type [client|data|management] --target-file pom-file-to-update
# For example: To update all versions for the client track for a given pom file
# python eng/versioning/update_versions.py --ut all --bt client --tf <pathToPomFile>\pom.xml
#
# Use case: Update the external_dependencies
#
# python utilities/update_versions.py --update-type [library|external_dependency|all] -build-type [client|data|management] --target-file pom-file-to-update
# For example: To update all versions for the client track for a given pom file. While the skip readme flag isn't entirely
# necessary here, since our README.md files don't contain externaly dependency versions, there's no point in scanning files
# that shouldn't require changes.
# python eng/versioning/update_versions.py --ut external_dependency --sr
#
# The script must be run at the root of azure-sdk-for-java.
import argparse
from datetime import timedelta
import json
import os
import re
import sys
import time
import traceback
from utils import BuildType
from utils import CodeModule
from utils import external_dependency_version_regex
from utils import external_dependency_include_regex
from utils import run_check_call
from utils import UpdateType
from utils import include_update_marker
from utils import version_regex_str_no_anchor
from utils import version_update_start_marker
from utils import version_update_end_marker
from utils import version_update_marker
import xml.etree.ElementTree as ET
exception_list = []
def update_versions(update_type, version_map, ext_dep_map, target_file, skip_readme, auto_version_increment, library_array):
newlines = []
repl_open, repl_thisline, file_changed, is_include = False, False, False, False
print('processing: ' + target_file)
try:
with open(target_file, encoding='utf-8') as f:
for line in f:
is_include = False
repl_thisline = repl_open
match = version_update_marker.search(line)
if match and not target_file.endswith('.md'):
module_name, version_type = match.group(1), match.group(2)
repl_thisline = True
elif include_update_marker.search(line):
match = include_update_marker.search(line)
module_name, version_type = match.group(1), match.group(2)
repl_thisline = True
is_include = True
else:
match = version_update_start_marker.search(line)
if match:
module_name, version_type = match.group(1), match.group(2)
# only update the version in the MD file if the module is in the list or the list is empty
if len(library_array) == 0 or module_name in library_array:
repl_open, repl_thisline = True, True
else:
repl_open, repl_thisline = False, False
else:
match = version_update_end_marker.search(line)
if match:
repl_open, repl_thisline = False, False
if repl_thisline:
# If the module isn't found then just continue. This can happen if we're going through and updating
# library versions for one track and tag entry is for another track or if we're only updating
# external_dependency versions.
if module_name not in version_map and (version_type == 'current' or version_type == 'dependency'):
newlines.append(line)
continue
new_version = ''
if version_type == 'current':
try:
module = version_map[module_name]
new_version = module.current
newline = re.sub(version_regex_str_no_anchor, new_version, line)
except (KeyError, AttributeError):
# This can happen when a dependency is an unreleased_ or beta_ dependency and the tag is current instead of dependency
raise ValueError('Module: {0} does not have a current version.\nFile={1}\nLine={2}'.format(module_name, target_file, line))
elif version_type == 'dependency':
try:
module = version_map[module_name]
new_version = module.dependency
newline = re.sub(version_regex_str_no_anchor, new_version, line)
except (KeyError, AttributeError):
# This should never happen unless the version file is malformed
raise ValueError('Module: {0} does not have a dependency version.\nFile={1}\nLine={2}'.format(module_name, target_file, line))
elif version_type == 'external_dependency':
# The external dependency map will be empty if the update type is library
if update_type == UpdateType.library:
newlines.append(line)
continue
if is_include:
try:
module = ext_dep_map[module_name]
new_include_version = module.string_for_allowlist_include()
newline = re.sub(external_dependency_include_regex, new_include_version, line)
except (KeyError, AttributeError):
raise ValueError('Module: {0} does not have an external dependency version.\nFile={1}\nLine={2}'.format(module_name, target_file, line))
else:
try:
module = ext_dep_map[module_name]
new_version = module.external_dependency
newline = re.sub(external_dependency_version_regex, new_version, line)
except (KeyError, AttributeError):
raise ValueError('Module: {0} does not have an external dependency version.\nFile={1}\nLine={2}'.format(module_name, target_file, line))
else:
raise ValueError('Invalid version type: {} for module: {}.\nFile={}\nLine={}'.format(version_type, module_name, target_file, line))
newlines.append(newline)
if line != newline:
file_changed = True
else:
newlines.append(line)
if not repl_open:
module_name, version_type = '', ''
if file_changed:
with open(target_file, 'w', encoding='utf-8') as f:
for line in newlines:
f.write(line)
# If the pom file changed check and see if we need to add a version line to the Changelog
file_name = os.path.basename(target_file)
if ((auto_version_increment or not skip_readme) and (file_name.startswith('pom.') and file_name.endswith('.xml'))):
update_changelog(target_file, auto_version_increment, library_array)
except Exception as e:
exception_list.append(e)
# Updating the changelog is special. Grab the version from the respective pom file
def update_changelog(pom_file, is_increment, library_array):
# Before doing anything, ensure that there is a changelog.md file sitting next to the pom file
dirname = os.path.dirname(pom_file)
changelog = os.path.join(dirname, "CHANGELOG.md")
if os.path.isfile(changelog):
tree = ET.parse(pom_file)
xml_root = tree.getroot()
xml_version = xml_root.find('{http://maven.apache.org/POM/4.0.0}version')
version = xml_version.text
xml_artifactId = xml_root.find('{http://maven.apache.org/POM/4.0.0}artifactId')
xml_groupId = xml_root.find('{http://maven.apache.org/POM/4.0.0}groupId')
library = xml_groupId.text + ":" + xml_artifactId.text
if len(library_array) == 0 or library in library_array:
script = os.path.join(".", "eng", "common", "scripts", "Update-ChangeLog.ps1")
commands = [
"pwsh",
script,
"--Version",
version,
"--ChangeLogPath",
changelog,
"--Unreleased:$true", # If is_increment is false then a release is being prepped
"--ReplaceLatestEntryTitle:$" + str(not is_increment) # If this call is not a result of auto version increment then replace the latest entry with the current version
]
# Run script to update change log
run_check_call(commands, '.')
else:
print('There is no CHANGELOG.md file in {}, skipping update'.format(dirname))
def load_version_map_from_file(the_file, version_map):
with open(the_file) as f:
for raw_line in f:
stripped_line = raw_line.strip()
if not stripped_line or stripped_line.startswith('#'):
continue
module = CodeModule(stripped_line)
# verify no duplicate entries
if (module.name in version_map):
raise ValueError('Version file: {0} contains a duplicate entry: {1}'.format(the_file, module.name))
# verify that if the module is beta_ or unreleased_ that there's a matching non-beta_ or non-unreleased_ entry
if (module.name.startswith('beta_') or module.name.startswith('unreleased_')):
tempName = module.name
if tempName.startswith('beta_'):
tempName = module.name[len('beta_'):]
else:
tempName = module.name[len('unreleased_'):]
# if there isn't a non beta or unreleased entry then raise an issue
if tempName not in version_map:
raise ValueError('Version file: {0} does not contain a non-beta or non-unreleased entry for beta_/unreleased_ library: {1}'.format(the_file, module.name))
version_map[module.name] = module
def load_version_overrides(the_file, version_map, overrides_name):
with open(the_file) as f:
data = json.load(f)
if overrides_name not in data:
raise ValueError('Version override name: {0} is not found in {1}'.format(overrides_name, the_file))
overrides = data[overrides_name]
for override in overrides:
if len(override) != 1:
raise ValueError('Expected exactly one module, but got: {0}'.format(override))
for module_name in override:
module_str = module_name + ";" + override[module_name]
module = CodeModule(module_str)
version_map[module.name] = module
break
def display_version_info(version_map):
for value in version_map.values():
print(value)
def update_versions_all(update_type, build_type, target_file, skip_readme, auto_version_increment, library_array, version_overrides, include_perf_tests):
version_map = {}
ext_dep_map = {}
# Load the version and/or external dependency file for the given UpdateType
# into the verion_map. If UpdateType.all is selected then versions for both
# the libraries and external dependencies are being updated.
if update_type == UpdateType.library or update_type == UpdateType.all:
version_file = os.path.normpath('eng/versioning/version_' + build_type.name + '.txt')
load_version_map_from_file(version_file, version_map)
if update_type == UpdateType.external_dependency or update_type == UpdateType.all:
dependency_file = os.path.normpath('eng/versioning/external_dependencies.txt')
load_version_map_from_file(dependency_file, ext_dep_map)
if version_overrides and not version_overrides.startswith('$'):
# Azure DevOps passes '$(VersionOverrides)' when the variable value is not set
load_version_overrides("eng/versioning/supported_external_dependency_versions.json", ext_dep_map, version_overrides)
# The dependency files are always loaded but reporting their information is based on the update type.
if update_type == UpdateType.library or update_type == UpdateType.all:
print('version_file=' + version_file)
display_version_info(version_map)
if update_type == UpdateType.external_dependency or update_type == UpdateType.all:
print('external_dependency_file=' + dependency_file)
display_version_info(ext_dep_map)
if target_file:
update_versions(update_type, version_map, ext_dep_map, target_file, skip_readme, auto_version_increment, library_array)
else:
for root, _, files in os.walk("."):
for file_name in files:
file_path = root + os.sep + file_name
if (file_name.endswith('.md') and not skip_readme) or (file_name.startswith('pom') and file_name.endswith('.xml')):
update_versions(update_type, version_map, ext_dep_map, file_path, skip_readme, auto_version_increment, library_array)
elif (file_name.startswith('perf-tests') and (file_name.endswith('.yaml') or file_name.endswith('.yml')) and include_perf_tests):
update_versions(UpdateType.all, version_map, ext_dep_map, file_path, True, False, library_array)
# This is a temporary stop gap to deal with versions hard coded in java files.
# Everything within the begin/end tags below can be deleted once
# https://github.com/Azure/azure-sdk-for-java/issues/7106 has been fixed.
# version_*_java_files.txt
# BEGIN:Versions_in_java_files
if not target_file and BuildType.none != build_type:
# the good thing here is that the java files only contain library versions, not
# external versions
version_java_file = os.path.normpath('eng/versioning/version_' + build_type.name + '_java_files.txt')
if os.path.exists(version_java_file):
with open(version_java_file) as f:
for raw_line in f:
java_file_to_update = raw_line.strip()
if not java_file_to_update or java_file_to_update.startswith('#'):
continue
if os.path.isfile(java_file_to_update):
update_versions(update_type, version_map, ext_dep_map, java_file_to_update, skip_readme, auto_version_increment, library_array)
else:
# In pipeline contexts, files not local to the current SDK directory may not be checked out from git.
print(java_file_to_update + ' does not exist. Skipping')
else:
print(version_java_file + ' does not exist. Skipping.')
# END:Versions_in_java_files
def main():
parser = argparse.ArgumentParser(description='Replace version numbers in poms and READMEs.')
parser.add_argument('--update-type', '--ut', type=UpdateType, choices=list(UpdateType))
parser.add_argument('--build-type', '--bt', nargs='?', type=BuildType, choices=list(BuildType), default=BuildType.none)
parser.add_argument('--skip-readme', '--sr', action='store_true', help='Skip updating of readme files if argument is present')
parser.add_argument('--target-file', '--tf', nargs='?', help='File to update (optional) - all files in the current directory and subdirectories are scanned if omitted')
parser.add_argument('--auto-version-increment', '--avi', action='store_true', help='If this script is being run after an auto version increment, add changelog entry for new version')
# Comma separated list artifacts, has to be split into an array. If we're not skipping README updates, only update MD files for entries for the list of libraries passed in
parser.add_argument('--library-list', '--ll', nargs='?', help='(Optional) Comma seperated list of groupId:artifactId. If updating MD files, only update entries in this list.')
parser.add_argument('--version_override', '--vo', nargs='?', help='(Optional) identifier of version update configuratation matching (exactly) first-level identifier in supported_external_dependency_versions.json')
parser.add_argument('--include-perf-tests', '--ipt', action='store_true', help='Whether perf-tests.yml/perf-tests.yaml files are included in the update')
args = parser.parse_args()
if args.build_type == BuildType.management:
raise ValueError('{} is not currently supported.'.format(BuildType.management.name))
start_time = time.time()
library_array = []
if args.library_list:
library_array = args.library_list.split(',')
print('library_array length: {0}'.format(len(library_array)))
print(library_array)
update_versions_all(args.update_type, args.build_type, args.target_file, args.skip_readme, args.auto_version_increment, library_array, args.version_override, args.include_perf_tests)
elapsed_time = time.time() - start_time
print('elapsed_time={}'.format(elapsed_time))
print('Total time for replacement: {}'.format(str(timedelta(seconds=elapsed_time))))
if len(exception_list) > 0:
for ex in exception_list:
print("ERROR: " + str(ex))
sys.exit('There were replacement errors. All errors are immediately above this message.')
if __name__ == '__main__':
main()