diff --git a/README.md b/README.md index 9380e02..dfe83c0 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ This is the code for [Apache Airflow Tutorials](https://www.youtube.com/playlist | 4 | [Writing your first pipeline](https://youtu.be/43wHwwZhJMo) ([blog post](https://www.applydatascience.com/airflow/writing-your-first-pipeline/)) | N/A | | 5 | [Airflow concept](https://youtu.be/4rQSa2zEWfw) ([blog post](https://www.applydatascience.com/airflow/airflow-concept/)) | N/A | | 6 | [Build a data pipeline using Google Cloud Bigquery](https://youtu.be/wAyu5BN3VpY) ([blog post](https://www.applydatascience.com/airflow/bigquery-pipeline-airflow/)) | [v0.6](https://github.com/tuanavu/airflow-tutorial/tree/v0.6) | +| 7 | [Airflow variables](https://youtu.be/bHQ7nzn0j6k) ([blog post](https://www.applydatascience.com/airflow/airflow-variables/)) | [v0.7](https://github.com/tuanavu/airflow-tutorial/tree/v0.7) | ## Getting Started diff --git a/docker-compose-gcloud.yml b/docker-compose-gcloud.yml index 142c802..33c703a 100644 --- a/docker-compose-gcloud.yml +++ b/docker-compose-gcloud.yml @@ -15,7 +15,8 @@ services: context: https://github.com/puckel/docker-airflow.git#1.10.1 dockerfile: Dockerfile args: - AIRFLOW_DEPS: gcp_api,s3 + AIRFLOW_DEPS: gcp_api,s3 + PYTHON_DEPS: sqlalchemy==1.2.0 restart: always depends_on: - postgres diff --git a/docker-compose.yml b/docker-compose.yml index 86930c3..3457c62 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,7 +15,8 @@ services: context: https://github.com/puckel/docker-airflow.git#1.10.1 dockerfile: Dockerfile args: - AIRFLOW_DEPS: gcp_api,s3 + AIRFLOW_DEPS: gcp_api,s3 + PYTHON_DEPS: sqlalchemy==1.2.0 restart: always depends_on: - postgres @@ -34,4 +35,4 @@ services: test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"] interval: 30s timeout: 30s - retries: 3 \ No newline at end of file + retries: 3 diff --git a/examples/intro-example/dags/example_variables.py b/examples/intro-example/dags/example_variables.py index 091b4fd..b015847 100644 --- a/examples/intro-example/dags/example_variables.py +++ b/examples/intro-example/dags/example_variables.py @@ -18,13 +18,13 @@ default_args=default_args) -# Config variables -# Common -var1 = "value1" -var2 = [1, 2, 3] -var3 = {'k': 'value3'} +# # Config variables +# # Common +# var1 = "value1" +# var2 = [1, 2, 3] +# var3 = {'k': 'value3'} -## 3 DB connections called +# # 3 DB connections called # var1 = Variable.get("var1") # var2 = Variable.get("var2") # var3 = Variable.get("var3") @@ -35,33 +35,33 @@ # var2 = dag_config["var2"] # var3 = dag_config["var3"] -start = DummyOperator( - task_id="start", - dag=dag -) +# start = DummyOperator( +# task_id="start", +# dag=dag +# ) -# To test this task, run this command: -# docker-compose run --rm webserver airflow test example_variables get_dag_config 2019-02-15 -t1 = BashOperator( - task_id="get_dag_config", - bash_command='echo "{0}"'.format(dag_config), - dag=dag, -) +# # To test this task, run this command: +# # docker-compose run --rm webserver airflow test example_variables get_dag_config 2019-02-15 +# t1 = BashOperator( +# task_id="get_dag_config", +# bash_command='echo "{0}"'.format(dag_config), +# dag=dag, +# ) -# You can directly use a variable from a jinja template -## {{ var.value. }} +# # You can directly use a variable from a jinja template +# ## {{ var.value. }} -t2 = BashOperator( - task_id="get_variable_value", - bash_command='echo {{ var.value.var3 }} ', - dag=dag, -) +# t2 = BashOperator( +# task_id="get_variable_value", +# bash_command='echo {{ var.value.var3 }} ', +# dag=dag, +# ) -## {{ var.json. }} -t3 = BashOperator( - task_id="get_variable_json", - bash_command='echo {{ var.json.example_variables_config.var3 }} ', - dag=dag, -) +# ## {{ var.json. }} +# t3 = BashOperator( +# task_id="get_variable_json", +# bash_command='echo {{ var.json.example_variables_config.var3 }} ', +# dag=dag, +# ) -start >> [t1, t2, t3] \ No newline at end of file +# start >> [t1, t2, t3] \ No newline at end of file