Skip to content

Commit

Permalink
Merge pull request #41 from msarahan/console-scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
msarahan authored Jul 17, 2023
2 parents 8f265da + 4cd4679 commit 38a48b1
Show file tree
Hide file tree
Showing 9 changed files with 301 additions and 277 deletions.
13 changes: 6 additions & 7 deletions .github/workflows/run-script.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,20 +9,19 @@ jobs:
run_script_job:
runs-on: ubuntu-latest
steps:
- name: Check out the code
uses: actions/checkout@v3
# TODO: consider replacing python/pip/update-web-metadata installs with docker image
- name: Setup Python
uses: actions/setup-python@v4
- name: Upgrade pip
run: |
# install pip=>20.1 to use "pip cache dir"
python -m pip install --upgrade pip wheel
- name: Install deps
run: python -m pip install -r requirements.txt
- name: Install package
run: python -m pip install -e .
run: python -m pip install git+https://github.com/pyopenSci/update-web-metadata

- name: Check out the code
uses: actions/checkout@v3
- name: Run script from update-web-metadata repo
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
run: |
python ${{ inputs.script_name_with_args }}
run: ${{ inputs.script_name_with_args }}
2 changes: 1 addition & 1 deletion .github/workflows/test-run-script.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@ jobs:
test_run_script_job:
uses: ./.github/workflows/run-script.yml
with:
script_name_with_args: parse_review_issues.py
script_name_with_args: parse-review-issues
secrets: inherit
73 changes: 0 additions & 73 deletions parse-contributors.py

This file was deleted.

79 changes: 0 additions & 79 deletions parse_review_issues.py

This file was deleted.

5 changes: 5 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,9 @@ requires-python = ">=3.10"
readme = "README.md"
license = {text = "MIT"}

[project.scripts]
parse-contributors = "pyosmeta.cli.parse_contributors:main"
parse-review-issues = "pyosmeta.cli.parse_review_issues:main"
update-reviewers = "pyosmeta.cli.update_reviewers:main"

[tool.pdm]
80 changes: 80 additions & 0 deletions src/pyosmeta/cli/parse_contributors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import pickle
from os.path import exists

from pyosmeta.contributors import ProcessContributors
from pyosmeta.file_io import get_api_token

# TODO: Turn this into a conditional that checks for a .env file and
# if that doesn't exist then assume it's being run in actions.


def main():
GITHUB_TOKEN = get_api_token()

json_files = [
"https://raw.githubusercontent.com/pyOpenSci/python-package-guide/main/.all-contributorsrc",
"https://raw.githubusercontent.com/pyOpenSci/software-peer-review/main/.all-contributorsrc",
"https://raw.githubusercontent.com/pyOpenSci/pyopensci.github.io/main/.all-contributorsrc",
"https://raw.githubusercontent.com/pyOpenSci/software-review/main/.all-contributorsrc",
"https://raw.githubusercontent.com/pyOpenSci/update-web-metadata/main/.all-contributorsrc",
# "https://raw.githubusercontent.com/pyOpenSci/examplepy/main/.all-contributorsrc",
]

# Get contribs from pyopensci.github.io repo (this is what is published online)
web_yaml_path = "https://raw.githubusercontent.com/pyOpenSci/pyopensci.github.io/main/_data/contributors.yml"

# Instantiate contrib object
processContribs = ProcessContributors(json_files, web_yaml_path, GITHUB_TOKEN)

# Returns a list of dict objects with gh usernames (lowercase) as keys
web_yml_dict = processContribs.load_website_yml()
bot_all_contribs_dict = processContribs.combine_json_data()

# Create a single dict containing both website and all-contrib bot users

all_contribs_dict = processContribs.combine_users(
bot_all_contribs_dict, web_yml_dict
)

# Force gh username keys to be lowercase
for key in all_contribs_dict:
all_contribs_dict[key]["github_username"] = all_contribs_dict[key][
"github_username"
].lower()
print(all_contribs_dict[key]["github_username"])

gh_data = processContribs.get_gh_data(all_contribs_dict)

# Update user yaml file data from GitHub API
update_keys = [
"twitter",
"website",
"location",
"bio",
"organization",
"email",
]

# Append github data to existing dictionary
all_contribs_dict_up = processContribs.update_contrib_data(
all_contribs_dict, gh_data
)

# Save a pickle locally to support updates after parsing
# reviews
with open("all_contribs.pickle", "wb") as f:
pickle.dump(all_contribs_dict_up, f)

final_contribs = processContribs.dict_to_list(all_contribs_dict_up)
final_yaml = "contributors.yml"
# Create updated YAML file and clean to match the website
processContribs.export_yaml(final_yaml, final_contribs)
processContribs.clean_yaml_file(final_yaml)


### ONE TIME REORDER OF WEB YAML ###
# review[package_name] = {
# key: review[package_name][key] for key in key_order
# }
if __name__ == "__main__":
main()
85 changes: 85 additions & 0 deletions src/pyosmeta/cli/parse_review_issues.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
"""
Script that parses metadata from na issue and adds it to a yml file for the
website. It also grabs some of the package metadata such as stars,
last commit, etc.
Output: packages.yml
To run: python3 parse_issue_metadata.py
# TODO: add maintainer list to the metadata!
"""


from pyosmeta import ProcessIssues
from pyosmeta.file_io import get_api_token


def main():
GITHUB_TOKEN = get_api_token()

# TODO: looks like sometimes the gh username is the name then @. so i need to create
# code that looks for the @ and adds the username to ghusername and the rest to the name
# result.status_code in [200, 302]:
# TODO: I get key errors and name errors when i hit api limits
# Would be good to track API return responses / figure out how long I need to wait
# so it doesn't just fail. how does that get setup?
issueProcess = ProcessIssues(
org="pyopensci",
repo_name="software-submission",
label_name="6/pyOS-approved 🚀🚀🚀",
GITHUB_TOKEN=GITHUB_TOKEN,
)

# Get all issues for approved packages
issues = issueProcess.return_response()
# breakpoint()
# Fixed:
review = issueProcess.parse_issue_header(issues, 12)

# Get list of GitHub API endpoint for each accepted package
all_repo_endpoints = issueProcess.get_repo_endpoints(review)

# Send a GET request to the API endpoint and include a user agent header
gh_stats = [
"name",
"description",
"homepage",
"created_at",
"stargazers_count",
"watchers_count",
"stargazers_count",
"forks",
"open_issues_count",
"forks_count",
]

# Get gh metadata for each package submission
all_repo_meta = {}
for package_name in all_repo_endpoints.keys():
print(package_name)
package_api = all_repo_endpoints[package_name]
all_repo_meta[package_name] = issueProcess.get_repo_meta(package_api, gh_stats)

all_repo_meta[package_name]["contrib_count"] = issueProcess.get_repo_contribs(
package_api
)
all_repo_meta[package_name]["last_commit"] = issueProcess.get_last_commit(
package_api
)
# Add github meta to review metadata
review[package_name]["gh_meta"] = all_repo_meta[package_name]

# Turn the data into a list to support jekyll friendly yaml
final_data = []
for key in review:
final_data.append(review[key])

final_yaml = "packages.yml"
# Export to yaml!
issueProcess.export_yaml(final_yaml, final_data)
issueProcess.clean_yaml_file(final_yaml)


if __name__ == "__main__":
main()
Loading

0 comments on commit 38a48b1

Please sign in to comment.