diff --git a/stock_data_analytics/.gitignore b/stock_data_analytics/.gitignore new file mode 100644 index 0000000..bc83b30 --- /dev/null +++ b/stock_data_analytics/.gitignore @@ -0,0 +1,163 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +#Desktop Services Store +.DS_Store \ No newline at end of file diff --git a/stock_data_analytics/README.md b/stock_data_analytics/README.md new file mode 100644 index 0000000..9268e2b --- /dev/null +++ b/stock_data_analytics/README.md @@ -0,0 +1,250 @@ +# Stock Data Analytics with Airbyte, Polygon Stock API, dbt, Dagster and BigQuery + +Welcome to the "Stock Data Analytics" repository! 🌟 This is your go-to place to easily set up a data stack using Airbyte, dbt, BigQuery, and Dagster. With this setup, you can pull Stock data from Polygon Stock API, put it into BigQuery, and play around with it using dbt and Dagster. + +This Quickstart is all about making things easy, getting you started quickly and showing you how smoothly all these tools can work together! + +## Table of Contents + +- [Stock Data Analytics with Airbyte, Polygon Stock API, dbt, Dagster and BigQuery](#stock-data-analytics-with-airbyte-polygon-stock-api-dbt-dagster-and-bigquery) + - [Table of Contents](#table-of-contents) + - [Infrastracture Layout](#infrastracture-layout) + - [Prerequisites](#prerequisites) + - [1. Setting an environment for your project](#1-setting-an-environment-for-your-project) + - [2. Polygon Stock API Key](#2-polygon-stock-api-key) + - [1. Sign Up for a Polygon Stock API Key](#1-sign-up-for-a-polygon-stock-api-key) + - [3. Setting Up BigQuery](#3-setting-up-bigquery) + - [1. **Create a Google Cloud Project**](#1-create-a-google-cloud-project) + - [2. **Create BigQuery Datasets**](#2-create-bigquery-datasets) + - [3. **Create Service Accounts and Assign Roles**](#3-create-service-accounts-and-assign-roles) + - [4. **Generate JSON Keys for Service Accounts**](#4-generate-json-keys-for-service-accounts) + - [4. Setting Up Airbyte Connectors](#4-setting-up-airbyte-connectors) + - [1. Setting Up Airbyte Connectors with AirByteUI](#1-setting-up-airbyte-connectors-with-airbyteui) + - [5. Setting Up the dbt Project](#5-setting-up-the-dbt-project) + - [Next Steps](#next-steps) + - [1. **Explore the Data and Insights**](#1-explore-the-data-and-insights) + - [2. **Optimize Your dbt Models**](#2-optimize-your-dbt-models) + - [3. **Expand Your Data Sources**](#3-expand-your-data-sources) + - [4. **Enhance Data Quality and Testing**](#4-enhance-data-quality-and-testing) + - [5. **Automate and Monitor Your Pipelines**](#5-automate-and-monitor-your-pipelines) + - [6. **Scale Your Setup**](#6-scale-your-setup) + - [7. **Contribute to the Community**](#7-contribute-to-the-community) + +## Infrastracture Layout +![Infrastracture Layout](./assets/polygon.png) + +## Prerequisites + +Before you embark on this integration, ensure you have the following set up and ready: + +1. **Python 3.10 or later**: If not installed, download and install it from [Python's official website](https://www.python.org/downloads/). + +2. **Docker and Docker Compose (Docker Desktop)**: Install [Docker](https://docs.docker.com/get-docker/) following the official documentation for your specific OS. + +3. **Airbyte OSS version**: Deploy the open-source version of Airbyte. Follow the installation instructions from the [Airbyte Documentation](https://docs.airbyte.com/quickstart/deploy-airbyte/). + +4. **Google Cloud account with BigQuery**: You will also need to add the necessary permissions to allow Airbyte and dbt to access the data in BigQuery. A step-by-step guide is provided [below](#2-setting-up-bigquery). + +5. **Polygon Stock API**: You can grab your free Stock API from [here](https://polygon.io/) after account opening. No Credit card is required for the starter version + +## 1. Setting an environment for your project + +Get the project up and running on your local machine by following these steps: + +1. **Clone the repository (Clone only this quickstart)**: + ```bash + git clone --filter=blob:none --sparse https://github.com/airbytehq/quickstarts.git + ``` + + ```bash + cd quickstarts + ``` + + ```bash + git sparse-checkout add stock_data_analytics + ``` + + +2. **Navigate to the directory**: + ```bash + cd stock_data_analytics + ``` + +3. **Set Up a Virtual Environment**: + - For Mac: + ```bash + python3 -m venv venv + source venv/bin/activate + ``` + - For Windows: + ```bash + python -m venv venv + .\venv\Scripts\activate + ``` + +4. **Install Dependencies**: + ```bash + pip install -e ".[dev]" + ``` + +## 2. Polygon Stock API Key + +To extract Stock data from the Polygon API and store the API key in its own environment variable file, you can follow these steps: + +#### 1. Sign Up for a Polygon Stock API Key + +1. Visit the [Polygon website](https://polygon.io/). +2. Sign up for an account or log in if you already have one. +3. Once logged in, go to the dashboard and scroll to obtain your Polygon API key. + + +## 3. Setting Up BigQuery + +#### 1. **Create a Google Cloud Project** + - If you have a Google Cloud project, you can skip this step. + - Go to the [Google Cloud Console](https://console.cloud.google.com/). + - Click on the "Select a project" dropdown at the top right and select "New Project". + - Give your project a name and follow the steps to create it. + +#### 2. **Create BigQuery Datasets** + - In the Google Cloud Console, go to BigQuery. + - Make two new datasets: `raw_data` for Airbyte and `transformed_data` for dbt. + - If you pick different names, remember to change the names in the code too. + + **How to create a dataset:** + - In the left sidebar, click on your project name. + - Click “Create Dataset”. + - Enter the dataset ID (either `raw_data` or `transformed_data`). + - Click "Create Dataset". + +#### 3. **Create Service Accounts and Assign Roles** + - Go to “IAM & Admin” > “Service accounts” in the Google Cloud Console. + - Click “Create Service Account”. + - Name your service account (like `airbyte-service-account`). + - Assign the “BigQuery Data Editor” and “BigQuery Job User” roles to the service account. + - Follow the same steps to make another service account for dbt (like `dbt-service-account`) and assign the roles. + + **How to create a service account and assign roles:** + - While creating the service account, under the “Grant this service account access to project” section, click the “Role” dropdown. + - Choose the “BigQuery Data Editor” and “BigQuery Job User” roles. + - Finish the creation process. + +#### 4. **Generate JSON Keys for Service Accounts** + - For both service accounts, make a JSON key to let the service accounts sign in. + + **How to generate JSON key:** + - Find the service account in the “Service accounts” list. + - Click on the service account name. + - In the “Keys” section, click “Add Key” and pick JSON. + - The key will download automatically. Keep it safe and don’t share it. + - Do this for the other service account too. + +## 4. Setting Up Airbyte Connectors +Here, you can set up connectors with source and destination manually using the Airbyte UI. + +### 1. Setting Up Airbyte Connectors with AirByteUI +Use the generated public url from the previous step to manually configure using the File Option as source under public HTPPS. + +- Here are specific [BigQuery](https://docs.airbyte.com/integrations/destinations/bigquery) instructions. +- Follow these [steps](https://docs.airbyte.com/quickstart/set-up-a-connection) for more. + +![AirByte UI Layout](./assets/connect.PNG) +## 5. Setting Up the dbt Project + +[dbt (data build tool)](https://www.getdbt.com/) allows you to transform your data by writing, documenting, and executing SQL workflows. Setting up the dbt project requires specifying connection details for your data platform, in this case, BigQuery. Here’s a step-by-step guide to help you set this up: + +1. **Navigate to the dbt Project Directory**: + + Change to the directory containing the dbt configuration: + ```bash + cd dbt_project + ``` + +2. **Update Connection Details**: + + You'll find a `profiles.yml` file within the directory. This file contains configurations for dbt to connect with your data platform. Update this file with your BigQuery connection details. + +3. **Utilize Environment Variables (Optional but Recommended)**: + + To keep your credentials secure, you can leverage environment variables. An example is provided within the `profiles.yml` file. + +4. **Test the Connection**: + + Once you’ve updated the connection details, you can test the connection to your BigQuery instance using: + ```bash + dbt debug + ``` + + If everything is set up correctly, this command should report a successful connection to BigQuery. + +5. **Run the Models**: + + If you would like to run the dbt models manually at this point, you can do so by executing: + ```bash + dbt run + ``` + + You can verify the data has been transformed by going to BigQuery and checking the `transformed_data` dataset. + +## 6. Orchestrating with Dagster + +[Dagster](https://dagster.io/) is a modern data orchestrator designed to help you build, test, and monitor your data workflows. In this section, we'll walk you through setting up Dagster to oversee both the Airbyte and dbt workflows: + +1. **Navigate to the Orchestration Directory**: + + Switch to the directory containing the Dagster orchestration configurations: + ```bash + cd orchestration + ``` + +2. **Set Environment Variables**: + + Dagster requires certain environment variables to be set to interact with other tools like dbt and Airbyte. Set the following variables: + + ```bash + export DAGSTER_DBT_PARSE_PROJECT_ON_LOAD=1 + export AIRBYTE_PASSWORD=password + ``` + + Note: The `AIRBYTE_PASSWORD` is set to `password` as a default for local Airbyte instances. If you've changed this during your Airbyte setup, ensure you use the appropriate password here. + +3. **Launch the Dagster UI**: + + With the environment variables in place, kick-start the Dagster UI: + ```bash + dagster dev + ``` + +4. **Access Dagster in Your Browser**: + + Open your browser and navigate to: + ``` + http://127.0.0.1:3000 + ``` + + Here, you should see assets for both Airbyte and dbt. + +## Next Steps + +Congratulations on deploying and running the Polygon Stock data Quistart! 🎉 Here are some suggestions on what you can explore next to dive deeper and get more out of your project: + +### 1. **Explore the Data and Insights** + - Dive into the datasets in BigQuery, run some queries, and explore the data you've collected and transformed. This is your chance to uncover insights and understand the data better! + +### 2. **Optimize Your dbt Models** + - Review the transformations you’ve applied using dbt. Try optimizing the models or create new ones based on your evolving needs and insights you want to extract. + +### 3. **Expand Your Data Sources** + - Add more data sources to Airbyte. Explore different types of sources available, and see how they can enrich your existing datasets and broaden your analytical capabilities. + +### 4. **Enhance Data Quality and Testing** + - Implement data quality tests in dbt to ensure the reliability and accuracy of your transformations. Use dbt's testing features to validate your data and catch issues early on. + +### 5. **Automate and Monitor Your Pipelines** + - Explore more advanced Dagster configurations and setups to automate your pipelines further and set up monitoring and alerting to be informed of any issues immediately. + +### 6. **Scale Your Setup** + - Consider scaling your setup to handle more data, more sources, and more transformations. Optimize your configurations and resources to ensure smooth and efficient processing of larger datasets. + +### 7. **Contribute to the Community** + - Share your learnings, optimizations, and new configurations with the community. Contribute to the respective tool’s communities and help others learn and grow. \ No newline at end of file diff --git a/stock_data_analytics/assets/connect.PNG b/stock_data_analytics/assets/connect.PNG new file mode 100644 index 0000000..4c33e0f Binary files /dev/null and b/stock_data_analytics/assets/connect.PNG differ diff --git a/stock_data_analytics/assets/polygon.png b/stock_data_analytics/assets/polygon.png new file mode 100644 index 0000000..8552eb6 Binary files /dev/null and b/stock_data_analytics/assets/polygon.png differ diff --git a/stock_data_analytics/dbt_project/.gitignore b/stock_data_analytics/dbt_project/.gitignore new file mode 100644 index 0000000..eae4ddb --- /dev/null +++ b/stock_data_analytics/dbt_project/.gitignore @@ -0,0 +1,6 @@ + +target/ +dbt_packages/ +logs/ + +.user.yml diff --git a/stock_data_analytics/dbt_project/README.md b/stock_data_analytics/dbt_project/README.md new file mode 100644 index 0000000..7874ac8 --- /dev/null +++ b/stock_data_analytics/dbt_project/README.md @@ -0,0 +1,15 @@ +Welcome to your new dbt project! + +### Using the starter project + +Try running the following commands: +- dbt run +- dbt test + + +### Resources: +- Learn more about dbt [in the docs](https://docs.getdbt.com/docs/introduction) +- Check out [Discourse](https://discourse.getdbt.com/) for commonly asked questions and answers +- Join the [chat](https://community.getdbt.com/) on Slack for live discussions and support +- Find [dbt events](https://events.getdbt.com) near you +- Check out [the blog](https://blog.getdbt.com/) for the latest news on dbt's development and best practices diff --git a/stock_data_analytics/dbt_project/analyses/.gitkeep b/stock_data_analytics/dbt_project/analyses/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/stock_data_analytics/dbt_project/dbt_project.yml b/stock_data_analytics/dbt_project/dbt_project.yml new file mode 100644 index 0000000..125acd1 --- /dev/null +++ b/stock_data_analytics/dbt_project/dbt_project.yml @@ -0,0 +1,39 @@ + +# Name your project! Project names should contain only lowercase characters +# and underscores. A good package name should reflect your organization's +# name or the intended use of these models +name: 'dbt_project' +version: '1.0.0' +config-version: 2 + +# This setting configures which "profile" dbt uses for this project. +profile: 'dbt_project' + +# These configurations specify where dbt should look for different types of files. +# The `model-paths` config, for example, states that models in this project can be +# found in the "models/" directory. You probably won't need to change these! +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] + +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_packages" + + +# Configuring models +# Full documentation: https://docs.getdbt.com/docs/configuring-models + +# In this example config, we tell dbt to build all models in the example/ +# directory as views. These settings can be overridden in the individual model +# files using the `{{ config(...) }}` macro. +models: + dbt_project: + # Config indicated by + and applies to all files under models/example/ + staging: + +materialized: view + marts: + +materialized: view diff --git a/stock_data_analytics/dbt_project/macros/.gitkeep b/stock_data_analytics/dbt_project/macros/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/stock_data_analytics/dbt_project/models/marts/calculate_daily_return.sql b/stock_data_analytics/dbt_project/models/marts/calculate_daily_return.sql new file mode 100644 index 0000000..1c606ad --- /dev/null +++ b/stock_data_analytics/dbt_project/models/marts/calculate_daily_return.sql @@ -0,0 +1,5 @@ +SELECT + TIMESTAMP_SECONDS(CAST(t / 1000 AS INT64)) AS date, + (c - lag(c) OVER (ORDER BY t / 1000)) / lag(c) OVER (ORDER BY t / 1000) AS daily_return +FROM + transformed_data.stg_stock_api diff --git a/stock_data_analytics/dbt_project/models/marts/predict_stock_price.sql b/stock_data_analytics/dbt_project/models/marts/predict_stock_price.sql new file mode 100644 index 0000000..813b342 --- /dev/null +++ b/stock_data_analytics/dbt_project/models/marts/predict_stock_price.sql @@ -0,0 +1,13 @@ +SELECT + date, + predicted_label AS predicted_stock_price +FROM + ML.PREDICT(MODEL transformed_data.predict_stock, ( + SELECT + TIMESTAMP_SECONDS(CAST(t / 1000 AS INT64)) AS date, + h AS high, + l AS low, + o AS open + FROM + transformed_data.stg_stock_api + )) \ No newline at end of file diff --git a/stock_data_analytics/dbt_project/models/marts/simple_moving_avarage.sql b/stock_data_analytics/dbt_project/models/marts/simple_moving_avarage.sql new file mode 100644 index 0000000..bf57386 --- /dev/null +++ b/stock_data_analytics/dbt_project/models/marts/simple_moving_avarage.sql @@ -0,0 +1,6 @@ +SELECT + TIMESTAMP_SECONDS(CAST(t / 1000 AS INT64)) AS date, + c AS close, + AVG(c) OVER (ORDER BY t / 1000 ROWS BETWEEN 4 PRECEDING AND CURRENT ROW) AS five_day_sma +FROM + transformed_data.stg_stock_api \ No newline at end of file diff --git a/stock_data_analytics/dbt_project/models/sources/polygon_source.yml b/stock_data_analytics/dbt_project/models/sources/polygon_source.yml new file mode 100644 index 0000000..338565e --- /dev/null +++ b/stock_data_analytics/dbt_project/models/sources/polygon_source.yml @@ -0,0 +1,23 @@ +version: 2 + +sources: + - name: polygon + # Use your BigQuery project ID + database: "{{ env_var('BIGQUERY_PROJECT_ID', '') }}" + # Use your BigQuery dataset name + schema: polygon_airbyte + + tables: + + - name: stock_api + description: "Simulated stock_api data from the Polygon connector." + columns: + - name: c + - name: t + - name: v + - name: h + - name: l + - name: n + - name: o + - name: otc + - name: vw \ No newline at end of file diff --git a/stock_data_analytics/dbt_project/models/staging/stg_stock_api.sql b/stock_data_analytics/dbt_project/models/staging/stg_stock_api.sql new file mode 100644 index 0000000..4268ded --- /dev/null +++ b/stock_data_analytics/dbt_project/models/staging/stg_stock_api.sql @@ -0,0 +1,3 @@ +select + * +from {{ source('polygon', 'stock_api') }} \ No newline at end of file diff --git a/stock_data_analytics/dbt_project/profiles.yml b/stock_data_analytics/dbt_project/profiles.yml new file mode 100644 index 0000000..65cc52f --- /dev/null +++ b/stock_data_analytics/dbt_project/profiles.yml @@ -0,0 +1,16 @@ +dbt_project: + outputs: + dev: + dataset: transformed_data + job_execution_timeout_seconds: 300 + job_retries: 1 + # Use an env variable to indicate your JSON key file path + keyfile: "{{ env_var('DBT_BIGQUERY_KEYFILE_PATH', '') }}" + location: US + method: service-account + priority: interactive + # Indicate your BigQuery project ID + project: your_project_id + threads: 1 + type: bigquery + target: dev \ No newline at end of file diff --git a/stock_data_analytics/dbt_project/seeds/.gitkeep b/stock_data_analytics/dbt_project/seeds/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/stock_data_analytics/dbt_project/snapshots/.gitkeep b/stock_data_analytics/dbt_project/snapshots/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/stock_data_analytics/dbt_project/tests/.gitkeep b/stock_data_analytics/dbt_project/tests/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/stock_data_analytics/infra/.gitignore b/stock_data_analytics/infra/.gitignore new file mode 100644 index 0000000..dc82526 --- /dev/null +++ b/stock_data_analytics/infra/.gitignore @@ -0,0 +1,34 @@ +# Local .terraform directories +**/.terraform/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log +crash.*.log + +# Exclude all .tfvars files, which are likely to contain sensitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +*.tfvars +*.tfvars.json + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include override files you do wish to add to version control using negated pattern +# !example_override.tf + +# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan +# example: *tfplan* + +# Ignore CLI configuration files +.terraformrc +terraform.rc \ No newline at end of file diff --git a/stock_data_analytics/infra/airbyte/.terraform.lock.hcl b/stock_data_analytics/infra/airbyte/.terraform.lock.hcl new file mode 100644 index 0000000..912fae4 --- /dev/null +++ b/stock_data_analytics/infra/airbyte/.terraform.lock.hcl @@ -0,0 +1,25 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/airbytehq/airbyte" { + version = "0.3.3" + constraints = "0.3.3" + hashes = [ + "h1:0LmuAc5LvlMuOUPtNEaCAh9FHrV/C877bDJhm9Lz8MU=", + "zh:0efa470b34d9b912b47efe4469c51713bfc3c2413e52c17e1e903f2a3cddb2f6", + "zh:1bddd69fa2c2d4f3e239d60555446df9bc4ce0c0cabbe7e092fe1d44989ab004", + "zh:2e20540403a0010007b53456663fb037b24e30f6c8943f65da1bcf7fa4dfc8a6", + "zh:2f415369ad884e8b7115a5c5ff229d052f7af1fca27abbfc8ebef379ed11aec4", + "zh:46fd9a906f4b6461112dcc5a5aa01a3fcd7a19a72d4ad0b2e37790da37701fe1", + "zh:83503ebb77bb6d6941c42ba323cf22380d08a1506554a2dcc8ac54e74c0886a1", + "zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f", + "zh:8fd770eff726826d3a63b9e3733c5455b5cde004027b04ee3f75888eb8538c90", + "zh:b0fc890ed4f9b077bf70ed121cc3550e7a07d16e7798ad517623274aa62ad7b0", + "zh:c2a01612362da9b73cd5958f281e1aa7ff09af42182e463097d11ed78e778e72", + "zh:c64b2bb1887a0367d64ba3393d4b3a16c418cf5b1792e2e7aae7c0b5413eb334", + "zh:ce14ebbf0ed91913ec62655a511763dec62b5779de9a209bd6f1c336640cddc0", + "zh:e0662ca837eee10f7733ea9a501d995281f56bd9b410ae13ad03eb106011db14", + "zh:e103d480fc6066004bc98e9e04a141a1f55b918cc2912716beebcc6fc4c872fb", + "zh:e2507049098f0f1b21cb56870f4a5ef624bcf6d3959e5612eada1f8117341648", + ] +} diff --git a/stock_data_analytics/infra/airbyte/main.tf b/stock_data_analytics/infra/airbyte/main.tf new file mode 100644 index 0000000..ab9c5a0 --- /dev/null +++ b/stock_data_analytics/infra/airbyte/main.tf @@ -0,0 +1,49 @@ +// Airbyte Terraform provider documentation: https://registry.terraform.io/providers/airbytehq/airbyte/latest/docs + +// Sources +resource "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" { + configuration = { + api_key = var.api_key + end_date = "2023-09-28" + multiplier = 1 + sort = "asc" + source_type = "polygon-stock-api" + start_date = "2023-09-26" + stocks_ticker = "IBM" + timespan = "day" + } + name = "Polygon Stock API" + workspace_id = var.workspace_id +} + +// Destinations +resource "airbyte_destination_bigquery" "bigquery" { + configuration = { + dataset_id = var.dataset_id + dataset_location = "US" + destination_type = "bigquery" + project_id = var.project_id + credentials_json = var.credentials_json + loading_method = { + destination_bigquery_loading_method_standard_inserts = { + method = "Standard" + } + } + } + name = "BigQuery" + workspace_id = var.workspace_id +} + +// Connections +resource "airbyte_connection" "polygon_bigquery" { + name = "PolygonAPI to BigQuery" + source_id = airbyte_source_polygon_stock_api.my_source_polygonstockapi.source_id + destination_id = airbyte_destination_bigquery.bigquery.destination_id + configurations = { + streams = [ + { + name = "stock-api" + } + ] + } +} \ No newline at end of file diff --git a/stock_data_analytics/infra/airbyte/provider.tf b/stock_data_analytics/infra/airbyte/provider.tf new file mode 100644 index 0000000..20eff1b --- /dev/null +++ b/stock_data_analytics/infra/airbyte/provider.tf @@ -0,0 +1,20 @@ +// Airbyte Terraform provider documentation: https://registry.terraform.io/providers/airbytehq/airbyte/latest/docs + +terraform { + required_providers { + airbyte = { + source = "airbytehq/airbyte" + version = "0.3.3" + } + } +} + +provider "airbyte" { + // If running locally (Airbyte OSS) with docker-compose using the airbyte-proxy, + // include the actual password/username you've set up (or use the defaults below) + username = "airbyte" + password = "password" + + // if running locally (Airbyte OSS), include the server url to the airbyte-api-server + server_url = "http://localhost:8006/v1/" +} diff --git a/stock_data_analytics/infra/airbyte/variables.tf b/stock_data_analytics/infra/airbyte/variables.tf new file mode 100644 index 0000000..cd3935d --- /dev/null +++ b/stock_data_analytics/infra/airbyte/variables.tf @@ -0,0 +1,19 @@ +variable "workspace_id" { + type = string +} + +variable "dataset_id" { + type = string +} + +variable "project_id" { + type = string +} + +variable "credentials_json" { + type = string +} + +variable "api_key" { + type = string +} \ No newline at end of file diff --git a/stock_data_analytics/orchestration/orchestration/__init__.py b/stock_data_analytics/orchestration/orchestration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/stock_data_analytics/orchestration/orchestration/assets.py b/stock_data_analytics/orchestration/orchestration/assets.py new file mode 100644 index 0000000..ac8ee41 --- /dev/null +++ b/stock_data_analytics/orchestration/orchestration/assets.py @@ -0,0 +1,20 @@ +import os +from dagster import OpExecutionContext +from dagster_dbt import DbtCliResource, dbt_assets +from dagster_airbyte import AirbyteResource, load_assets_from_airbyte_instance + +from .constants import dbt_manifest_path + +@dbt_assets(manifest=dbt_manifest_path) +def dbt_project_dbt_assets(context: OpExecutionContext, dbt: DbtCliResource): + yield from dbt.cli(["build"], context=context).stream() + +airbyte_instance = AirbyteResource( + host="localhost", + port="8000", + # If using basic auth, include username and password: + username="airbyte", + password=os.getenv("AIRBYTE_PASSWORD") +) + +airbyte_assets = load_assets_from_airbyte_instance(airbyte_instance, key_prefix="faker") \ No newline at end of file diff --git a/stock_data_analytics/orchestration/orchestration/constants.py b/stock_data_analytics/orchestration/orchestration/constants.py new file mode 100644 index 0000000..709888c --- /dev/null +++ b/stock_data_analytics/orchestration/orchestration/constants.py @@ -0,0 +1,15 @@ +import os +from pathlib import Path + +from dagster_dbt import DbtCliResource + +dbt_project_dir = Path(__file__).joinpath("..", "..", "..", "dbt_project").resolve() +dbt = DbtCliResource(project_dir=os.fspath(dbt_project_dir)) + +# If DAGSTER_DBT_PARSE_PROJECT_ON_LOAD is set, a manifest will be created at runtime. +# Otherwise, we expect a manifest to be present in the project's target directory. +if os.getenv("DAGSTER_DBT_PARSE_PROJECT_ON_LOAD"): + dbt_parse_invocation = dbt.cli(["parse"], manifest={}).wait() + dbt_manifest_path = dbt_parse_invocation.target_path.joinpath("manifest.json") +else: + dbt_manifest_path = dbt_project_dir.joinpath("target", "manifest.json") \ No newline at end of file diff --git a/stock_data_analytics/orchestration/orchestration/definitions.py b/stock_data_analytics/orchestration/orchestration/definitions.py new file mode 100644 index 0000000..2d92dce --- /dev/null +++ b/stock_data_analytics/orchestration/orchestration/definitions.py @@ -0,0 +1,16 @@ +import os + +from dagster import Definitions +from dagster_dbt import DbtCliResource + +from .assets import dbt_project_dbt_assets, airbyte_assets +from .constants import dbt_project_dir +from .schedules import schedules + +defs = Definitions( + assets=[dbt_project_dbt_assets, airbyte_assets], + schedules=schedules, + resources={ + "dbt": DbtCliResource(project_dir=os.fspath(dbt_project_dir)), + }, +) \ No newline at end of file diff --git a/stock_data_analytics/orchestration/orchestration/schedules.py b/stock_data_analytics/orchestration/orchestration/schedules.py new file mode 100644 index 0000000..9c0ac94 --- /dev/null +++ b/stock_data_analytics/orchestration/orchestration/schedules.py @@ -0,0 +1,15 @@ +""" +To add a daily schedule that materializes your dbt assets, uncomment the following lines. +""" +from dagster_dbt import build_schedule_from_dbt_selection + +from .assets import dbt_project_dbt_assets + +schedules = [ +# build_schedule_from_dbt_selection( +# [dbt_project_dbt_assets], +# job_name="materialize_dbt_models", +# cron_schedule="0 0 * * *", +# dbt_select="fqn:*", +# ), +] \ No newline at end of file diff --git a/stock_data_analytics/orchestration/pyproject.toml b/stock_data_analytics/orchestration/pyproject.toml new file mode 100644 index 0000000..6e83f2f --- /dev/null +++ b/stock_data_analytics/orchestration/pyproject.toml @@ -0,0 +1,7 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[tool.dagster] +module_name = "orchestration.definitions" +code_location_name = "orchestration" \ No newline at end of file diff --git a/stock_data_analytics/orchestration/setup.py b/stock_data_analytics/orchestration/setup.py new file mode 100644 index 0000000..5b102a9 --- /dev/null +++ b/stock_data_analytics/orchestration/setup.py @@ -0,0 +1,19 @@ +from setuptools import find_packages, setup + +setup( + name="orchestration", + version="0.0.1", + packages=find_packages(), + install_requires=[ + "dagster", + "dagster-cloud", + "dagster-dbt", + "dbt-core>=1.4.0", + "dbt-bigquery", + ], + extras_require={ + "dev": [ + "dagster-webserver", + ] + }, +) \ No newline at end of file diff --git a/stock_data_analytics/setup.py b/stock_data_analytics/setup.py new file mode 100644 index 0000000..2f6b135 --- /dev/null +++ b/stock_data_analytics/setup.py @@ -0,0 +1,14 @@ +from setuptools import find_packages, setup + +setup( + name="airbyte-dbt-dagster", + packages=find_packages(), + install_requires=[ + "dbt-bigquery", + "dagster", + "dagster-cloud", + "dagster-dbt", + "dagster-airbyte", + ], + extras_require={"dev": ["dagit", "pytest"]}, +) \ No newline at end of file