Skip to content

Commit

Permalink
Update to Airflow 2.2.5 (#28)
Browse files Browse the repository at this point in the history
* Use latest actions

* Use pendulum instead of datetime for a DAG's start_date

This aligns our DAGs with the Airflow example DAGs. See apache/airflow#21646.

* Update GitHub Actions with dependabot

* Remove outdated file, directory doesn't exist any more

* Add include directory to pylint

* Update Airflow to 2.2.5
  • Loading branch information
hammerhead committed Apr 5, 2022
1 parent e61056d commit 9ef2293
Show file tree
Hide file tree
Showing 11 changed files with 24 additions and 21 deletions.
5 changes: 5 additions & 0 deletions .github/dependabot.yml
Expand Up @@ -10,3 +10,8 @@ updates:
directory: "/"
schedule:
interval: "monthly"

- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
10 changes: 5 additions & 5 deletions .github/workflows/main.yml
Expand Up @@ -10,10 +10,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v3
with:
python-version: 3.9
cache: 'pip'
Expand All @@ -25,17 +25,17 @@ jobs:
- name: Run pylint
run: |
python -m pylint dags tests
python -m pylint dags tests include
test:
name: Run pytest
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v3
with:
python-version: 3.9
cache: 'pip'
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
@@ -1 +1 @@
FROM quay.io/astronomer/ap-airflow:2.2.4-3-onbuild
FROM quay.io/astronomer/ap-airflow:2.2.5-1-onbuild
1 change: 0 additions & 1 deletion dags/.airflowignore

This file was deleted.

4 changes: 2 additions & 2 deletions dags/data_retention_delete_dag.py
Expand Up @@ -8,10 +8,10 @@
In CrateDB, tables for storing retention policies need to be created once manually.
See the file setup/data_retention_schema.sql in this repository.
"""
import datetime
import json
import logging
from pathlib import Path
import pendulum
from airflow import DAG
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.providers.postgres.hooks.postgres import PostgresHook
Expand Down Expand Up @@ -62,7 +62,7 @@ def delete_partitions(ti):

with DAG(
dag_id="data-retention-delete-dag",
start_date=datetime.datetime(2021, 11, 19),
start_date=pendulum.datetime(2021, 11, 19, tz="UTC"),
schedule_interval="@daily",
catchup=False,
) as dag:
Expand Down
4 changes: 2 additions & 2 deletions dags/data_retention_reallocate_dag.py
Expand Up @@ -6,10 +6,10 @@
In CrateDB, tables for storing retention policies need to be created once manually.
See the file setup/data_retention_schema.sql in this repository.
"""
import datetime
import json
import logging
from pathlib import Path
import pendulum
from airflow import DAG
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.providers.postgres.hooks.postgres import PostgresHook
Expand Down Expand Up @@ -85,7 +85,7 @@ def reallocate_partitions(ti):

with DAG(
dag_id="data-retention-reallocate-dag",
start_date=datetime.datetime(2021, 11, 19),
start_date=pendulum.datetime(2021, 11, 19, tz="UTC"),
schedule_interval="@daily",
catchup=False,
) as dag:
Expand Down
4 changes: 2 additions & 2 deletions dags/data_retention_snapshot_dag.py
Expand Up @@ -6,10 +6,10 @@
In CrateDB, tables for storing retention policies need to be created once manually.
See the file setup/data_retention_schema.sql in this repository.
"""
import datetime
import json
import logging
from pathlib import Path
import pendulum
from airflow import DAG
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.providers.postgres.hooks.postgres import PostgresHook
Expand Down Expand Up @@ -77,7 +77,7 @@ def snapshot_partitions(ti):

with DAG(
dag_id="data-retention-snapshot-dag",
start_date=datetime.datetime(2022, 1, 31),
start_date=pendulum.datetime(2022, 1, 31, tz="UTC"),
schedule_interval="@daily",
catchup=False,
) as dag:
Expand Down
3 changes: 2 additions & 1 deletion dags/financial_data_dag.py
Expand Up @@ -11,6 +11,7 @@
import math
import json
import logging
import pendulum
import requests
from bs4 import BeautifulSoup
import yfinance as yf
Expand Down Expand Up @@ -109,7 +110,7 @@ def format_and_insert_data_function(ti):

with DAG(
dag_id="financial_data_dag",
start_date=datetime.datetime(2022, 1, 10),
start_date=pendulum.datetime(2022, 1, 10, tz="UTC"),
schedule_interval="@daily",
catchup=False,
) as dag:
Expand Down
5 changes: 2 additions & 3 deletions dags/nyc_taxi_dag.py
Expand Up @@ -10,10 +10,9 @@
You can retrieve the CREATE TABLE statements from the file setup/taxi-schema.sql
in this repository.
"""
import datetime
import logging

from pathlib import Path
import pendulum
from airflow import DAG
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.providers.postgres.hooks.postgres import PostgresHook
Expand Down Expand Up @@ -86,7 +85,7 @@ def process_new_files(ti):

with DAG(
dag_id="nyc-taxi",
start_date=datetime.datetime(2021, 11, 11),
start_date=pendulum.datetime(2021, 11, 11, tz="UTC"),
schedule_interval="@daily",
catchup=False,
) as dag:
Expand Down
5 changes: 2 additions & 3 deletions dags/table_export_dag.py
Expand Up @@ -3,9 +3,8 @@
A detailed tutorial is available at https://community.crate.io/t/cratedb-and-apache-airflow-automating-data-export-to-s3/901
"""
import datetime
import os

import pendulum
from airflow import DAG
from airflow.providers.postgres.operators.postgres import PostgresOperator
from airflow.utils.task_group import TaskGroup
Expand All @@ -16,7 +15,7 @@

with DAG(
dag_id="cratedb_table_export",
start_date=datetime.datetime(2021, 11, 11),
start_date=pendulum.datetime(2021, 11, 11, tz="UTC"),
schedule_interval="@daily",
catchup=False,
) as dag:
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -5,7 +5,7 @@
packages=find_packages(),
python_requires=">=3.8",
install_requires=[
"apache-airflow==2.2.4",
"apache-airflow==2.2.5",
"apache-airflow-providers-postgres==4.1.0",
],
extras_require={
Expand Down

0 comments on commit 9ef2293

Please sign in to comment.