Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update shipping_year when milestones are edited. #4397

Merged
merged 2 commits into from
Sep 27, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 5 additions & 43 deletions api/channels_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@
import settings


SCHEDULE_CACHE_TIME = 60 * 60 # 1 hour


def construct_chrome_channels_details():
omaha_data = fetchchannels.get_omaha_data()
channels = {}
Expand All @@ -34,71 +31,36 @@ def construct_chrome_channels_details():
for v in win_versions:
channel = v['channel']
major_version = int(v['version'].split('.')[0])
channels[channel] = fetch_chrome_release_info(major_version)
channels[channel] = fetchchannels.fetch_chrome_release_info(major_version)
channels[channel]['version'] = major_version

# Adjust for the brief period after next miletone gets promted to stable/beta
# channel and their major versions are the same.
if channels['stable']['version'] == channels['beta']['version']:
new_beta_version = channels['stable']['version'] + 1
channels['beta'] = fetch_chrome_release_info(new_beta_version)
channels['beta'] = fetchchannels.fetch_chrome_release_info(new_beta_version)
channels['beta']['version'] = new_beta_version
if channels['beta']['version'] == channels['dev']['version']:
new_dev_version = channels['beta']['version'] + 1
channels['dev'] = fetch_chrome_release_info(new_dev_version)
channels['dev'] = fetchchannels.fetch_chrome_release_info(new_dev_version)
channels['dev']['version'] = new_dev_version

# In the situation where some versions are in a gap between
# stable and beta, show one as 'stable_soon'.
if channels['stable']['version'] + 1 < channels['beta']['version']:
stable_soon_version = channels['stable']['version'] + 1
channels['stable_soon'] = fetch_chrome_release_info(stable_soon_version)
channels['stable_soon'] = fetchchannels.fetch_chrome_release_info(stable_soon_version)
channels['stable_soon']['version'] = stable_soon_version

return channels


def fetch_chrome_release_info(version):
key = 'chromerelease|%s' % version

data = rediscache.get(key)
if data is None:
url = ('https://chromiumdash.appspot.com/fetch_milestone_schedule?'
'mstone=%s' % version)
result = requests.get(url, timeout=60)
if result.status_code == 200:
try:
logging.info(
'result.content is:\n%s', result.content[:settings.MAX_LOG_LINE])
result_json = json.loads(result.content)
if 'mstones' in result_json:
data = result_json['mstones'][0]
del data['owners']
del data['feature_freeze']
del data['ldaps']
rediscache.set(key, data, time=SCHEDULE_CACHE_TIME)
except ValueError:
pass # Handled by next statement

if not data:
data = {
'stable_date': None,
'earliest_beta': None,
'latest_beta': None,
'mstone': version,
'version': version,
}
# Note: we don't put placeholder data into redis.

return data


def construct_specified_milestones_details(start, end):
channels = {}
win_versions = list(range(start,end+1))

for milestone in win_versions:
channels[milestone] = fetch_chrome_release_info(milestone)
channels[milestone] = fetchchannels.fetch_chrome_release_info(milestone)

return channels

Expand Down
63 changes: 4 additions & 59 deletions api/channels_api_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def setUp(self):
self.handler = channels_api.ChannelsAPI()
self.request_path = '/api/v0/channels'

@mock.patch('api.channels_api.fetch_chrome_release_info')
@mock.patch('internals.fetchchannels.fetch_chrome_release_info')
@mock.patch('internals.fetchchannels.get_omaha_data')
def test_construct_chrome_channels_details(
self, mock_get_omaha_data, mock_fetch_chrome_release_info):
Expand Down Expand Up @@ -93,7 +93,7 @@ def fcri(version):
self.maxDiff = None
self.assertEqual(expected, actual)

@mock.patch('api.channels_api.fetch_chrome_release_info')
@mock.patch('internals.fetchchannels.fetch_chrome_release_info')
@mock.patch('internals.fetchchannels.get_omaha_data')
def test_construct_chrome_channels_details__beta_promotion(
self, mock_get_omaha_data, mock_fetch_chrome_release_info):
Expand Down Expand Up @@ -142,7 +142,7 @@ def fcri(version):
self.maxDiff = None
self.assertEqual(expected, actual)

@mock.patch('api.channels_api.fetch_chrome_release_info')
@mock.patch('internals.fetchchannels.fetch_chrome_release_info')
@mock.patch('internals.fetchchannels.get_omaha_data')
def test_construct_chrome_channels_details__dev_promotion(
self, mock_get_omaha_data, mock_fetch_chrome_release_info):
Expand Down Expand Up @@ -192,62 +192,7 @@ def fcri(version):
self.maxDiff = None
self.assertEqual(expected, actual)

@mock.patch('requests.get')
def test_fetch_chrome_release_info__found(self, mock_requests_get):
"""We can get channel data from the chromiumdash app."""
mock_requests_get.return_value = testing_config.Blank(
status_code=200,
content=json.dumps({
'mstones': [{
'owners': 'ignored',
'feature_freeze': 'ignored',
'ldaps': 'ignored',
'everything else': 'kept',
}],
}))

actual = channels_api.fetch_chrome_release_info(90)

self.assertEqual(
{'everything else': 'kept'},
actual)

@mock.patch('requests.get')
def test_fetch_chrome_release_info__not_found(self, mock_requests_get):
"""If chromiumdash app does not have the data, use a placeholder."""
mock_requests_get.return_value = testing_config.Blank(
status_code=404, content='')

actual = channels_api.fetch_chrome_release_info(91)

self.assertEqual(
{'stable_date': None,
'earliest_beta': None,
'latest_beta': None,
'mstone': 91,
'version': 91,
},
actual)

@mock.patch('requests.get')
def test_fetch_chrome_release_info__error(self, mock_requests_get):
"""We can get channel data from the chromiumdash app."""
mock_requests_get.return_value = testing_config.Blank(
status_code=200,
content='{')

actual = channels_api.fetch_chrome_release_info(90)

self.assertEqual(
{'stable_date': None,
'earliest_beta': None,
'latest_beta': None,
'mstone': 90,
'version': 90,
},
actual)

@mock.patch('api.channels_api.fetch_chrome_release_info')
@mock.patch('internals.fetchchannels.fetch_chrome_release_info')
def test_construct_specified_milestones_details(
self, mock_fetch_chrome_release_info):
mstone_data = {
Expand Down
56 changes: 43 additions & 13 deletions api/features_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
from internals import feature_helpers
from internals import search
from internals import search_fulltext
from internals import stage_helpers
from internals.user_models import AppUser
import settings

Expand Down Expand Up @@ -180,9 +181,9 @@ def _patch_update_stages(
self,
stage_changes_list: list[dict[str, Any]],
changed_fields: CHANGED_FIELDS_LIST_TYPE
) -> bool:
) -> list[Stage]:
"""Update stage fields with changes provided in the PATCH request."""
stages: list[Stage] = []
stages_to_store: list[Stage] = []
for change_info in stage_changes_list:
stage_was_updated = False
# Check if valid ID is provided and fetch stage if it exists.
Expand Down Expand Up @@ -223,29 +224,32 @@ def _patch_update_stages(
stage.milestones = milestones

if stage_was_updated:
stages.append(stage)
stages_to_store.append(stage)

# Save all of the updates made.
if stages:
ndb.put_multi(stages)
if stages_to_store:
ndb.put_multi(stages_to_store)

# Return a boolean representing if any changes were made to any stages.
return len(stages) > 0
# Return the list of modified stages.
return stages_to_store

def _patch_update_special_fields(
self,
feature: FeatureEntry,
feature_changes: dict[str, Any],
has_updated: bool
has_updated: bool,
updated_stages: list[Stage],
changed_fields: CHANGED_FIELDS_LIST_TYPE,
) -> None:
"""Handle any special FeatureEntry fields."""
now = datetime.now()
# Handle accuracy notifications if this is an accuracy verification request.
# Set accurate_as_of if this is an accuracy verification request.
if 'accurate_as_of' in feature_changes:
feature.accurate_as_of = now
feature.outstanding_notifications = 0
has_updated = True

# Set enterprise first notification milestones.
if is_update_first_notification_milestone(feature, feature_changes):
feature.first_enterprise_notification_milestone = int(feature_changes['first_enterprise_notification_milestone'])
has_updated = True
Expand All @@ -255,6 +259,30 @@ def _patch_update_special_fields(
if should_remove_first_notice_milestone(feature, feature_changes):
feature.first_enterprise_notification_milestone = None

# Set shipping_year based on milestones.
if updated_stages:
existing_shipping_stages = (
stage_helpers.get_all_shipping_stages_with_milestones(
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This seems like a large check for any time any stage is updated. Is it safe to check only when a shipping milestone is changed?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point. Done.

feature_id=feature.key.integer_id()))
shipping_stage_dict = {
es.key.integer_id(): es
for es in existing_shipping_stages
}
shipping_stage_dict.update({
us.key.integer_id(): us
for us in updated_stages
if us.stage_type in STAGE_TYPES_SHIPPING.values()
})
earliest = stage_helpers.find_earliest_milestone(
list(shipping_stage_dict.values()))
if earliest:
year = stage_helpers.look_up_year(earliest)
if year != feature.shipping_year:
changed_fields.append(('shipping_year', feature.shipping_year, year))
feature.shipping_year = year
has_updated = True

# If changes were made, set the feature.updated timestamp.
if has_updated:
user_email = self.get_current_user().email()
feature.updater_email = user_email
Expand All @@ -264,10 +292,11 @@ def _patch_update_feature(
self,
feature: FeatureEntry,
feature_changes: dict[str, Any],
has_updated: bool,
updated_stages: list[Stage],
changed_fields: CHANGED_FIELDS_LIST_TYPE
) -> None:
"""Update feature fields with changes provided in the PATCH request."""
has_updated = len(updated_stages) > 0
for field, field_type in api_specs.FEATURE_FIELD_DATA_TYPES:
if field not in feature_changes:
continue
Expand All @@ -277,7 +306,8 @@ def _patch_update_feature(
changed_fields.append((field, old_value, new_value))
has_updated = True

self._patch_update_special_fields(feature, feature_changes, has_updated)
self._patch_update_special_fields(
feature, feature_changes, has_updated, updated_stages, changed_fields)
feature.put()

def do_patch(self, **kwargs):
Expand All @@ -299,9 +329,9 @@ def do_patch(self, **kwargs):
return redirect_resp

changed_fields: CHANGED_FIELDS_LIST_TYPE = []
has_updated = self._patch_update_stages(body['stages'], changed_fields)
updated_stages = self._patch_update_stages(body['stages'], changed_fields)
self._patch_update_feature(
feature, body['feature_changes'], has_updated, changed_fields)
feature, body['feature_changes'], updated_stages, changed_fields)

notifier_helpers.notify_subscribers_and_save_amendments(
feature, changed_fields, notify=True)
Expand Down
40 changes: 40 additions & 0 deletions internals/fetchchannels.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@

from framework import rediscache
# Note: this file cannot import core_models because it would be circular.
import settings


OMAHA_URL_TEMPLATE = (
'https://versionhistory.googleapis.com'
Expand Down Expand Up @@ -65,3 +67,41 @@ def get_omaha_data():
rediscache.set('omaha_data', omaha_data, time=86400) # cache for 24hrs.

return json.loads(omaha_data)


SCHEDULE_CACHE_TIME = 60 * 60 # 1 hour


def fetch_chrome_release_info(version):
key = 'chromerelease|%s' % version

data = rediscache.get(key)
if data is None:
url = ('https://chromiumdash.appspot.com/fetch_milestone_schedule?'
'mstone=%s' % version)
result = requests.get(url, timeout=60)
if result.status_code == 200:
try:
logging.info(
'result.content is:\n%s', result.content[:settings.MAX_LOG_LINE])
result_json = json.loads(result.content)
if 'mstones' in result_json:
data = result_json['mstones'][0]
del data['owners']
del data['feature_freeze']
del data['ldaps']
rediscache.set(key, data, time=SCHEDULE_CACHE_TIME)
except ValueError:
pass # Handled by next statement

if not data:
data = {
'stable_date': None,
'earliest_beta': None,
'latest_beta': None,
'mstone': version,
'version': version,
}
# Note: we don't put placeholder data into redis.

return data
Loading