Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/pip/genson-1.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
aaronsteers authored Nov 13, 2024
2 parents d385729 + 3a9091c commit 268b100
Show file tree
Hide file tree
Showing 9 changed files with 235 additions and 175 deletions.
26 changes: 21 additions & 5 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,29 @@

version: 2
updates:
- package-ecosystem: "pip"
- package-ecosystem: pip
directory: "/" # Location of package manifests
commit-message:
prefix: "chore(deps): "
schedule:
interval: "weekly"
interval: daily
labels:
- chore
open-pull-requests-limit: 8 # default is 5

- package-ecosystem: "github-actions"
# Workflow files stored in the default location of `.github/workflows`. (You don't need to specify `/.github/workflows` for `directory`. You can use `directory: "/"`.)
- package-ecosystem: github-actions
open-pull-requests-limit: 5 # default is 5
directory: "/"
commit-message:
prefix: "ci(deps): "
schedule:
interval: "weekly"
interval: daily
labels:
- ci
groups:
# allow combining github-actions updates into a single PR
minor-and-patch:
applies-to: version-updates
update-types:
- patch
- minor
100 changes: 67 additions & 33 deletions .github/workflows/connector-tests.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,15 @@
name: Connectors Tests
name: Test Connectors
on:
workflow_dispatch:
pull_request:
types:
- opened
- synchronize
# TODO: Consider moving to run these only after the "PyTest (Fast)" workflow is successful.
# workflow_run:
# workflows: [PyTest (Fast)]
# types:
# - completed

concurrency:
# This is the name of the concurrency group. It is used to prevent concurrent runs of the same workflow.
Expand All @@ -11,12 +22,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

on:
workflow_dispatch:
pull_request:
types:
- opened
- synchronize
jobs:
cdk_changes:
name: Get Changes
Expand Down Expand Up @@ -51,21 +56,6 @@ jobs:
vector-db-based: ${{ steps.changes.outputs.vector-db-based }}
sql: ${{ steps.changes.outputs.sql }}

# # The Connector CI Tests is a status check emitted by airbyte-ci
# # We make it pass once we have determined that there are no changes to the connectors
# - name: "Skip Connectors CI tests"
# if: steps.changes.outputs.src != 'true' && github.event_name == 'pull_request'
# run: |
# curl --request POST \
# --url https://api.github.com/repos/${{ github.repository }}/statuses/${{ github.event.pull_request.head.sha }} \
# --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \
# --header 'content-type: application/json' \
# --data '{
# "state": "success",
# "context": "CDK Changes - Connectors Tests",
# "target_url": "${{ github.event.workflow_run.html_url }}"
# }' \

connectors_ci:
needs: cdk_changes
# We only run the Connectors CI job if there are changes to the connectors on a non-forked PR
Expand All @@ -78,6 +68,8 @@ jobs:
fail-fast: false
matrix:
include:
- connector: source-hardcoded-records
cdk_extra: n/a
- connector: source-shopify
cdk_extra: n/a
# Currently not passing CI (unrelated)
Expand All @@ -89,34 +81,76 @@ jobs:
cdk_extra: vector-db-based
- connector: destination-motherduck
cdk_extra: sql
if: >
( github.event_name == 'pull_request' && needs.cdk_changes.outputs.src == 'true' && github.event.pull_request.head.repo.fork != true
) || github.event_name == 'workflow_dispatch'

name: "Check: '${{matrix.connector}}' (skip=${{needs.cdk_changes.outputs[matrix.cdk_extra] == 'false'}})"
steps:
- name: Abort if extra not changed (${{matrix.cdk_extra}})
id: no_changes
if: ${{ matrix.cdk_extra != 'n/a' && needs.cdk_changes.outputs[matrix.cdk_extra] == 'false' }}
run: |
echo "Aborting job as specified extra not changed: ${{matrix.cdk_extra}} = ${{ needs.cdk_changes.outputs[matrix.cdk_extra] }}"
echo "> Skipped '${{matrix.connector}}' (no relevant changes)" >> $GITHUB_STEP_SUMMARY
echo "status=cancelled" >> $GITHUB_OUTPUT
exit 1
exit 0
continue-on-error: true
# Get the monorepo so we can test the connectors
- name: Checkout CDK
if: steps.no_changes.outputs.status != 'cancelled'
uses: actions/checkout@v4
with:
path: airbyte-python-cdk
- name: Checkout Airbyte Monorepo
uses: actions/checkout@v4
if: steps.no_changes.outcome != 'failure'
if: steps.no_changes.outputs.status != 'cancelled'
with:
repository: airbytehq/airbyte
ref: master
# TODO: Revert to `master` after Airbyte CI released:
ref: aj/airbyte-ci/update-python-local-cdk-code
path: airbyte
- name: Test Connector
if: steps.no_changes.outcome != 'failure'
if: steps.no_changes.outputs.status != 'cancelled'
timeout-minutes: 90
env:
GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }}
POETRY_DYNAMIC_VERSIONING_BYPASS: "0.0.0"
# TODO: Revert below to use `tools.airbyte-ci-binary.install` after Airbyte CI released:
run: |
make tools.airbyte-ci-binary.install
airbyte-ci connectors \
cd airbyte
make tools.airbyte-ci-dev.install
airbyte-ci-dev connectors \
--name ${{matrix.connector}} \
test
--global-status-check-context='Connectors Test: ${{matrix.connector}}'"
--use-local-cdk \
test \
--fail-fast \
--skip-step qa_checks \
--skip-step connector_live_tests
# Upload the job output to the artifacts
- name: Upload Job Output
id: upload_job_output
if: always() && steps.no_changes.outputs.status != 'cancelled'
uses: actions/upload-artifact@v4
with:
name: ${{matrix.connector}}-job-output
path: airbyte/airbyte-ci/connectors/pipelines/pipeline_reports

- name: Evaluate Job Output
if: always() && steps.no_changes.outputs.status != 'cancelled'
run: |
# save job output json file as ci step output
json_output_file=$(find airbyte/airbyte-ci/connectors/pipelines/pipeline_reports -name 'output.json' -print -quit)
job_output=$(cat ${json_output_file})
success=$(echo ${job_output} | jq -r '.success')
failed_step=$(echo ${job_output} | jq -r '.failed_steps | select(length > 0) | .[0] // "None"')
run_duration=$(echo ${job_output} | jq -r '.run_duration')
echo "## Job Output for ${{matrix.connector}}" >> $GITHUB_STEP_SUMMARY
echo "- Success: ${success}" >> $GITHUB_STEP_SUMMARY
echo "- Test Duration: $(printf "%.0f" ${run_duration})s" >> $GITHUB_STEP_SUMMARY
if [ "${success}" != "true" ]; then
echo "- Failed Step: ${failed_step}" >> $GITHUB_STEP_SUMMARY
fi
echo -e "\n[Download Job Output](${{steps.upload_job_output.outputs.artifact-url}})" >> $GITHUB_STEP_SUMMARY
if [ "${success}" != "true" ]; then
echo "::error::Test failed for connector '${{ matrix.connector }}' on step '${failed_step}'. Check the logs for more details."
exit 1
fi
59 changes: 59 additions & 0 deletions .github/workflows/pytest_fast.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
name: Pytest (Fast)

on:
push:
branches:
- main
paths:
- 'airbyte_cdk/**'
- 'poetry.lock'
- 'pyproject.toml'
pull_request:
paths:
- 'airbyte_cdk/**'
- 'poetry.lock'
- 'pyproject.toml'

jobs:
pytest-fast:
name: Pytest (Fast)
runs-on: ubuntu-latest
steps:
# Common steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Poetry
uses: Gr1N/setup-poetry@v9
with:
poetry-version: "1.7.1"
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
cache: "poetry"
- name: Install dependencies
run: poetry install --all-extras

- name: Run Pytest with Coverage (Fast Tests Only)
timeout-minutes: 20
run: >
poetry run coverage run -m pytest
--durations=5 --exitfirst
-m "not slow"
- name: Print Coverage Report
if: always()
run: poetry run coverage report

- name: Create Coverage Artifacts
if: always()
run: |
poetry run coverage html -d htmlcov
poetry run coverage xml -o htmlcov/coverage.xml
- name: Upload coverage to GitHub Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: fasttest-coverage
path: htmlcov/
Original file line number Diff line number Diff line change
Expand Up @@ -5,70 +5,23 @@
# 2. `pytest`: Run all tests, across multiple Python versions.
#
# Note that `pytest-fast` also skips tests that require credentials, allowing it to run on forks.
name: PyTest
name: PyTest Matrix

on:
push:
branches:
- main
pull_request: {}
paths:
- 'airbyte_cdk/**'
- 'poetry.lock'
- 'pyproject.toml'
pull_request:
paths:
- 'airbyte_cdk/**'
- 'poetry.lock'
- 'pyproject.toml'

jobs:
pytest-fast:
name: Pytest (Fast)
runs-on: ubuntu-latest
steps:
# Common steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Poetry
uses: Gr1N/setup-poetry@v9
with:
poetry-version: "1.7.1"
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
cache: "poetry"
- name: Install dependencies
run: poetry install --all-extras

- name: Run Pytest with Coverage (Fast Tests Only)
timeout-minutes: 60
env:
GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }}
run: >
poetry run coverage run -m pytest
--durations=5 --exitfirst
-m "not slow and not requires_creds and not linting and not flaky"
- name: Run Pytest with Coverage (Flaky Tests Only)
timeout-minutes: 60
continue-on-error: true
env:
GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }}
run: >
poetry run coverage run -m pytest
--durations=5 --exitfirst
-m "flaky and not slow and not requires_creds"
- name: Print Coverage Report
if: always()
run: poetry run coverage report

- name: Create Coverage Artifacts
if: always()
run: |
poetry run coverage html -d htmlcov
poetry run coverage xml -o htmlcov/coverage.xml
- name: Upload coverage to GitHub Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: fasttest-coverage
path: htmlcov/

pytest:
name: Pytest (All, Python ${{ matrix.python-version }}, ${{ matrix.os }})
# Don't run on forks. Run on pushes to main, and on PRs that are not from forks.
Expand Down
14 changes: 10 additions & 4 deletions airbyte_cdk/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,12 +282,18 @@
"StreamSlice",
]

__version__ = _dunamai.get_version(
"airbyte-cdk",
third_choice=_dunamai.Version.from_any_vcs,
).serialize()
__version__: str
"""Version generated by poetry dynamic versioning during publish.
When running in development, dunamai will calculate a new prerelease version
from existing git release tag info.
"""

try:
__version__ = _dunamai.get_version(
"airbyte-cdk",
third_choice=_dunamai.Version.from_any_vcs,
fallback=_dunamai.Version("0.0.0+dev"),
).serialize()
except:
__version__ = "0.0.0+dev"
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,9 @@
from __future__ import annotations

from enum import Enum
from typing import Any, Dict, List, Optional, Union
from typing import Any, Dict, List, Literal, Optional, Union

from pydantic.v1 import BaseModel, Extra, Field
from typing_extensions import Literal


class AuthFlowType(Enum):
Expand Down Expand Up @@ -632,6 +631,7 @@ class HttpResponseFilter(BaseModel):
description="Match the response if its HTTP code is included in this list.",
examples=[[420, 429], [500]],
title="HTTP Codes",
unique_items=True,
)
predicate: Optional[str] = Field(
None,
Expand Down
4 changes: 3 additions & 1 deletion bin/generate_component_manifest_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


PIP_DEPENDENCIES = [
"datamodel_code_generator==0.11.19",
"datamodel_code_generator==0.26.3",
]


Expand Down Expand Up @@ -73,6 +73,8 @@ async def main():
"--enum-field-as-literal",
"one",
"--set-default-enum-member",
"--use-double-quotes",
"--remove-special-field-name-prefix",
],
use_entrypoint=True,
)
Expand Down
Loading

0 comments on commit 268b100

Please sign in to comment.