Skip to content

Commit

Permalink
fixed timedelta occurences
Browse files Browse the repository at this point in the history
  • Loading branch information
sh-rp committed Nov 4, 2024
1 parent cc4670d commit e5834f9
Show file tree
Hide file tree
Showing 5 changed files with 6 additions and 8 deletions.
4 changes: 2 additions & 2 deletions docs/tools/lint_setup/template.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,8 +130,8 @@
MAX_PAGE_SIZE: int = 100
API_VERSION: str = ""
FIRST_DAY_OF_MILLENNIUM: TAnyDateTime = pendulum.DateTime(2000, 1, 1)
START_DATE: DateTime = pendulum.DateTime(2024, 1, 1)
END_DATE: DateTime = pendulum.DateTime(2024, 12, 31)
START_DATE: pendulum.DateTime = pendulum.DateTime(2024, 1, 1)
END_DATE: pendulum.DateTime = pendulum.DateTime(2024, 12, 31)
START_DATE_STRING: str = ""
API_KEY: str = ""
ITEMS_PER_PAGE: int = 100
Expand Down
3 changes: 1 addition & 2 deletions docs/website/docs/dlt-ecosystem/destinations/bigquery.md
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,6 @@ The adapter updates the DltResource with metadata about the destination column a
Here is an example of how to use the `bigquery_adapter` method to apply hints to a resource on both the column level and table level:

```py
from datetime import date, timedelta

import dlt
from dlt.destinations.adapters import bigquery_adapter
Expand All @@ -319,7 +318,7 @@ from dlt.destinations.adapters import bigquery_adapter
)
def event_data():
yield from [
{"event_date": date.today() + timedelta(days=i)} for i in range(100)
{"event_date": datetime.date.today() + datetime.timedelta(days=i)} for i in range(100)
]


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ verified source.
min_start_date = pendulum.DateTime(year=2023, month=1, day=1).in_timezone("UTC")
max_end_date = pendulum.today()
# Generate tuples of date ranges, each with 1 week in between.
ranges = make_date_ranges(min_start_date, max_end_date, timedelta(weeks=1))
ranges = make_date_ranges(min_start_date, max_end_date, datetime.timedelta(weeks=1))
# Run the pipeline in a loop for each 1-week range
for start, end in ranges:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ API token.

# Update the exchange rate if it's older than 12 hours
if (currency_pair_state.get("rate") is None or
(datetime.datetime.utcnow() - currency_pair_state["last_update"] >= timedelta(hours=12))):
(datetime.datetime.utcnow() - currency_pair_state["last_update"] >= datetime.timedelta(hours=12))):
url = f"https://v6.exchangerate-api.com/v6/{api_key}/pair/{base_currency}/{target_currency}"
response = requests.get(url)
if response.status_code == 200:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ The first step is to register on [SerpAPI](https://serpapi.com/) and obtain the

1. Create the `fetch_average_price()` function as follows:
```py
from datetime import datetime, timedelta
import requests

# Uncomment transformer function if it is to be used as a transformer,
Expand Down Expand Up @@ -159,7 +158,7 @@ The first step is to register on [SerpAPI](https://serpapi.com/) and obtain the
device_data.get('timestamp', datetime.datetime.min)
)
# Check if the device is not in state or data is older than 180 days
if device not in device_info or last_updated > timedelta(days=180):
if device not in device_info or last_updated > datetime.timedelta(days=180):
try:
# Make an API request to fetch device prices
response = requests.get("https://serpapi.com/search", params={
Expand Down

0 comments on commit e5834f9

Please sign in to comment.