Skip to content

Commit b173488

Browse files
committed
fix(satellite): use latest version of satellite (read-only db)
1 parent 6bb5e61 commit b173488

File tree

6 files changed

+1105
-1048
lines changed

6 files changed

+1105
-1048
lines changed

alertflow/airflow.cfg

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -534,7 +534,7 @@ default_hive_mapred_queue =
534534
# The base url of your website as airflow cannot guess what domain or
535535
# cname you are using. This is used in automated emails that
536536
# airflow sends to point links to the right web server
537-
base_url = http://localhost:8080
537+
base_url = http://localhost:8080/alertflow
538538

539539
# Default timezone to display all dates in the UI, can be UTC, system, or
540540
# any IANA timezone string (e.g. Europe/Amsterdam). If left empty the
@@ -585,7 +585,7 @@ reload_on_plugin_change = False
585585
# The token generated using the secret key has a short expiry time though - make sure that time on
586586
# ALL the machines that you run airflow components on is synchronized (for example using ntpd)
587587
# otherwise you might get "forbidden" errors when the logs are accessed.
588-
# secret_key =
588+
secret_key = $AIRFLOW__WEBSERVER__SECRET_KEY
589589

590590
# Number of workers to run the Gunicorn web server
591591
workers = 4

alertflow/dags/satellite-weather/brasil.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
dag_id="COPERNICUS_BRASIL",
4646
description="ETL of weather data for Brazil",
4747
tags=["Brasil", "Copernicus"],
48-
schedule="@monthly",
48+
schedule="@daily",
4949
default_args=DEFAULT_ARGS,
5050
start_date=pendulum.datetime(2000, 1, 1),
5151
catchup=True,
@@ -75,6 +75,9 @@ def fetch_ds(locale, dt, uri, api_key):
7575
print("TABLE_GEO ", f"[{len(table_geocodes)}]: ", table_geocodes)
7676
print("DIFF_GEO: ", f"[{len(geocodes)}]: ", geocodes)
7777

78+
if not geocodes:
79+
return
80+
7881
basename = str(dt).replace("-", "_") + locale
7982
with request.reanalysis_era5_land(
8083
basename,
@@ -83,12 +86,13 @@ def fetch_ds(locale, dt, uri, api_key):
8386
locale=locale,
8487
) as ds:
8588
for geocode in geocodes:
86-
adm = ADM2.get(code=geocode):
89+
adm = ADM2.get(code=geocode, adm0=locale)
8790
with engine.connect() as conn:
8891
ds.cope.to_sql(adm, conn, tablename, "weather")
89-
file = Path(f"{basename}.zip")
90-
if file.exists():
91-
file.unlink()
92-
print(f"{file} removed")
92+
93+
file = Path(f"{basename}.zip")
94+
if file.exists():
95+
file.unlink()
96+
print(f"{file} removed")
9397

9498
fetch_ds("BRA", DATE, URI["PSQL_MAIN_URI"], KEY["CDSAPI_KEY"])

conda/env.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,4 @@ dependencies:
77
- pip
88
- pre-commit
99
- poetry >= 1.7.1
10+
- docker-compose

docker/compose.yaml

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,12 @@ x-airflow-common:
2828
environment:
2929
&airflow-common-env
3030
AIRFLOW__CORE__FERNET_KEY: ${AIRFLOW__CORE__FERNET_KEY}
31+
AIRFLOW__WEBSERVER__SECRET_KEY: ${AIRFLOW__WEBSERVER__SECRET_KEY}
3132
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true'
3233
AIRFLOW_HOME: ${AIRFLOW_HOME:-/opt/airflow}
3334
AIRFLOW_VAR_PSQL_MAIN_URI: '{"PSQL_MAIN_URI":"${PSQL_URI_MAIN}"}'
3435
AIRFLOW_VAR_CDSAPI_KEY: '{"CDSAPI_KEY":"${CDSAPI_KEY}"}'
36+
3537
HOST_UID: ${HOST_UID}
3638
HOST_GID: ${HOST_GID}
3739
volumes:
@@ -170,17 +172,17 @@ services:
170172
timeout: 10s
171173
retries: 5
172174
start_period: 30s
173-
environment:
174-
AIRFLOW__CORE__FERNET_KEY: ${AIRFLOW__CORE__FERNET_KEY}
175-
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true'
176-
AIRFLOW_HOME: ${AIRFLOW_HOME:-/opt/airflow}
177-
AIRFLOW_VAR_PSQL_MAIN_URI: '{"PSQL_MAIN_URI":"${PSQL_URI_MAIN}"}'
178-
AIRFLOW_VAR_CDSAPI_KEY: '{"CDSAPI_KEY":"${CDSAPI_KEY}"}'
179-
HOST_UID: ${HOST_UID}
180-
HOST_GID: ${HOST_GID}
181-
# Required to handle warm shutdown of the celery workers properly
182-
# See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
183-
DUMB_INIT_SETSID: "0"
175+
# environment:
176+
# AIRFLOW__CORE__FERNET_KEY: ${AIRFLOW__CORE__FERNET_KEY}
177+
# AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true'
178+
# AIRFLOW_HOME: ${AIRFLOW_HOME:-/opt/airflow}
179+
# AIRFLOW_VAR_PSQL_MAIN_URI: '{"PSQL_MAIN_URI":"${PSQL_URI_MAIN}"}'
180+
# AIRFLOW_VAR_CDSAPI_KEY: '{"CDSAPI_KEY":"${CDSAPI_KEY}"}'
181+
# HOST_UID: ${HOST_UID}
182+
# HOST_GID: ${HOST_GID}
183+
# # Required to handle warm shutdown of the celery workers properly
184+
# # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
185+
# DUMB_INIT_SETSID: "0"
184186
restart: always
185187
depends_on:
186188
redis:

0 commit comments

Comments
 (0)