diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 8129ecb..48a4ee0 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -36,7 +36,7 @@ jobs: cancel-in-progress: true steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 with: @@ -65,7 +65,7 @@ jobs: convention-name: conventionalcommits - name: Install Conda Dependencies - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniconda-version: "latest" mamba-version: "*" diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 98e603a..2079847 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -18,7 +18,7 @@ jobs: shell: bash -l {0} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 submodules: recursive diff --git a/alertflow/dags/satellite-weather/brasil.py b/alertflow/dags/satellite-weather/brasil.py index 92d41c0..306d9f5 100644 --- a/alertflow/dags/satellite-weather/brasil.py +++ b/alertflow/dags/satellite-weather/brasil.py @@ -84,16 +84,16 @@ def extract_transform_load( from satellite import downloader as sat_d from satellite import weather as sat_w from satellite.weather.brazil.extract_latlons import MUNICIPALITIES - from sqlalchemy import create_engine + from sqlalchemy import create_engine, text start_date = parser.parse(str(date)) max_update_delay = start_date - timedelta(days=6) with create_engine(psql_uri["PSQL_MAIN_URI"]).connect() as conn: - cur = conn.execute( + cur = conn.execute(text( "SELECT geocodigo FROM weather.copernicus_brasil" f" WHERE date = '{str(max_update_delay.date())}'" - ) + )) table_geocodes = set(chain(*cur.fetchall())) all_geocodes = set([mun["geocodigo"] for mun in MUNICIPALITIES]) @@ -116,17 +116,13 @@ def extract_transform_load( # Reads the NetCDF4 file using Xarray ds = sat_w.load_dataset(netcdf_file) - try: - conn = create_engine(psql_uri["PSQL_MAIN_URI"]).raw_connection() - + with create_engine(psql_uri["PSQL_MAIN_URI"]).connect() as conn: ds.copebr.to_sql( tablename="copernicus_brasil", schema="weather", geocodes=list(geocodes), con=conn, ) - finally: - conn.close() # Deletes the NetCDF4 file Path(netcdf_file).unlink(missing_ok=True) diff --git a/docker/Dockerfile b/docker/Dockerfile index 096dc86..3ec7a04 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -79,7 +79,7 @@ RUN source /home/airflow/mambaforge/bin/activate \ && sed -i "s/include-system-site-packages = false/include-system-site-packages = true/" /opt/envs/py310/pyvenv.cfg \ && source /opt/envs/py310/bin/activate \ && pip install \ - "satellite-weather-downloader >= 1.9.3" \ + "satellite-weather-downloader >= 1.9.4" \ psycopg2 RUN echo "alias activate_episcanner='source /home/airflow/mambaforge/bin/activate episcanner-downloader'" >> /home/airflow/.bashrc