Skip to content

Commit

Permalink
update to ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
nllong committed May 8, 2024
1 parent 837ed4c commit 6cf129a
Show file tree
Hide file tree
Showing 8 changed files with 15 additions and 31 deletions.
1 change: 1 addition & 0 deletions .cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"words": [
"autoload",
"geodataframe",
"iloc",
"notna",
"quadkey",
"quadkeys",
Expand Down
3 changes: 3 additions & 0 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,11 @@ ignore = [
"Q000", # bad-quotes-inline-string
"RUF012", # mutable-class-default
"S106", # hardcoded-password-func-arg
"S113", # Probable use of requests call without timeout
"SIM115", # Use context handler for opening files
"S311", # suspicious-non-cryptographic-random-usage
"S320", # suspicious-xmle-tree-usage
"S324", # Probable use of insecure hash functions in `hashlib`: `md5`
"S603", # subprocess-without-shell-equals-true
"S607", # start-process-with-partial-path
"S701", # jinja2-autoescape-false
Expand Down
4 changes: 1 addition & 3 deletions utils/chunk.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,4 @@


def chunk(full_list: list, chunk_size: int = 100):
return [
full_list[i * chunk_size : (i + 1) * chunk_size] for i in range((len(full_list) + chunk_size - 1) // chunk_size)
]
return [full_list[i * chunk_size : (i + 1) * chunk_size] for i in range((len(full_list) + chunk_size - 1) // chunk_size)]
10 changes: 3 additions & 7 deletions utils/geocode_addresses.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,7 @@ def _process_result(result):
for i in range(1, 7):
if result.get("locations")[0].get(f"adminArea{i}Type") is None:
continue
admin_areas[result.get("locations")[0].get(f"adminArea{i}Type").lower()] = result.get("locations")[0].get(
f"adminArea{i}"
)
admin_areas[result.get("locations")[0].get(f"adminArea{i}Type").lower()] = result.get("locations")[0].get(f"adminArea{i}")

return {
"quality": quality,
Expand Down Expand Up @@ -81,15 +79,13 @@ def geocode_addresses(locations: list[Location], mapquest_api_key: str):
# Catch invalid API key error before parsing the response
if response.status_code == 401:
raise MapQuestAPIKeyError(
"Failed geocoding property states due to MapQuest error. "
"API Key is invalid with message: {response.content}."
"Failed geocoding property states due to MapQuest error. " "API Key is invalid with message: {response.content}."
)
results += response.json().get("results")
except Exception as e:
if response.status_code == 403:
raise MapQuestAPIKeyError(
"Failed geocoding property states due to MapQuest error. "
"Your MapQuest API Key is either invalid or at its limit."
"Failed geocoding property states due to MapQuest error. " "Your MapQuest API Key is either invalid or at its limit."
)
else:
raise e
Expand Down
8 changes: 2 additions & 6 deletions utils/normalize_address.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,7 @@ def normalize_address(address_val: str):
normalized_address = _normalize_address_number(addr["AddressNumber"])

if "StreetNamePreDirectional" in addr and addr["StreetNamePreDirectional"] is not None:
normalized_address = (
normalized_address + " " + _normalize_address_direction(addr["StreetNamePreDirectional"])
)
normalized_address = normalized_address + " " + _normalize_address_direction(addr["StreetNamePreDirectional"])

if "StreetName" in addr and addr["StreetName"] is not None:
normalized_address = normalized_address + " " + addr["StreetName"]
Expand All @@ -154,9 +152,7 @@ def normalize_address(address_val: str):
normalized_address = normalized_address + " " + _normalize_address_post_type(addr["StreetNamePostType"])

if "StreetNamePostDirectional" in addr and addr["StreetNamePostDirectional"] is not None:
normalized_address = (
normalized_address + " " + _normalize_address_direction(addr["StreetNamePostDirectional"])
)
normalized_address = normalized_address + " " + _normalize_address_direction(addr["StreetNamePostDirectional"])

if "SubaddressType" in addr and addr["SubaddressType"] is not None:
normalized_address = normalized_address + " " + _normalize_subaddress_type(addr["SubaddressType"])
Expand Down
12 changes: 2 additions & 10 deletions utils/open_street_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,13 +132,7 @@ def get_node_coordinates(node_ids: list[int]):

# Extract the latitude and longitude coordinates of the node from the response
for element in data["elements"]:
if (
"type" in element
and element["type"] == "node"
and "id" in element
and "lat" in element
and "lon" in element
):
if "type" in element and element["type"] == "node" and "id" in element and "lat" in element and "lon" in element:
lat = float(element["lat"])
lon = float(element["lon"])
# Check if coordinates are within valid range
Expand Down Expand Up @@ -260,9 +254,7 @@ def process_dataframe_for_osm_buildings(

# check that the method is valid
if method not in ["geometry_centroid", "osm_id", "lat_long"]:
raise ValueError(
f"Invalid processing method: {method}, must be one of ['geometry_centroid', 'osm_id', 'lat_long']"
)
raise ValueError(f"Invalid processing method: {method}, must be one of ['geometry_centroid', 'osm_id', 'lat_long']")

results = []
error_processing = []
Expand Down
4 changes: 1 addition & 3 deletions utils/ubid.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
def encode_ubid(geometry: Polygon) -> str:
min_longitude, min_latitude, max_longitude, max_latitude = geometry.bounds
centroid = geometry.centroid
ubid = encode(
min_latitude, min_longitude, max_latitude, max_longitude, centroid.y, centroid.x, codeLength=PAIR_CODE_LENGTH_
)
ubid = encode(min_latitude, min_longitude, max_latitude, max_longitude, centroid.y, centroid.x, codeLength=PAIR_CODE_LENGTH_)
return ubid


Expand Down
4 changes: 2 additions & 2 deletions utils/update_quadkeys.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,12 @@ def update_quadkeys(quadkeys: list[int], save_directory: Path = Path("data/quadk
Skip the download if it has already been downloaded, and it is up-to-date
"""
save_directory.mkdir(parents=True, exist_ok=True)
df = pd.read_csv(save_directory / "dataset-links.csv")
df_update = pd.read_csv(save_directory / "dataset-links.csv")

for quadkey in tqdm(quadkeys):
download = True
quadkey_file = save_directory / f"{quadkey}.geojsonl.gz"
rows = df[df["QuadKey"] == quadkey]
rows = df_update[df_update["QuadKey"] == quadkey]
if rows.shape[0] == 1:
url = rows.iloc[0]["Url"]
elif rows.shape[0] > 1:
Expand Down

0 comments on commit 6cf129a

Please sign in to comment.