diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 66a305f3ba465..c265497ecb5d7 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.40.18 +current_version = 0.40.19 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-[a-z]+)? @@ -61,3 +61,7 @@ serialize = [bumpversion:file:octavia-cli/install.sh] [bumpversion:file:octavia-cli/setup.py] + +[bumpversion:file:airbyte-connector-builder-server/Dockerfile] + +[bumpversion:file:airbyte-connector-builder-server/setup.py] diff --git a/.env b/.env index 61f8e2c720e9e..b2a0fb0a29bc0 100644 --- a/.env +++ b/.env @@ -10,7 +10,7 @@ ### SHARED ### -VERSION=0.40.18 +VERSION=0.40.19 # When using the airbyte-db via default docker image CONFIG_ROOT=/data @@ -57,6 +57,7 @@ CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION=0.35.15.001 ### AIRBYTE SERVICES ### TEMPORAL_HOST=airbyte-temporal:7233 INTERNAL_API_HOST=airbyte-server:8001 +CONNECTOR_BUILDER_API_HOST=airbyte-connector-builder-server:80 WEBAPP_URL=http://localhost:8000/ # Although not present as an env var, required for webapp configuration. API_URL=/api/v1/ @@ -92,6 +93,8 @@ MAX_SYNC_WORKERS=5 MAX_SPEC_WORKERS=5 MAX_CHECK_WORKERS=5 MAX_DISCOVER_WORKERS=5 +MAX_NOTIFY_WORKERS=5 +SHOULD_RUN_NOTIFY_WORKFLOWS=false # Temporal Activity configuration ACTIVITY_MAX_ATTEMPT= ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS= diff --git a/.env.dev b/.env.dev index 3fa990232cb80..de61acefef9e1 100644 --- a/.env.dev +++ b/.env.dev @@ -23,6 +23,7 @@ HACK_LOCAL_ROOT_PARENT=/tmp WEBAPP_URL=http://localhost:8000/ API_URL=/api/v1/ INTERNAL_API_HOST=airbyte-server:8001 +CONNECTOR_BUILDER_API_HOST=airbyte-connector-builder-server:80 SYNC_JOB_MAX_ATTEMPTS=3 SYNC_JOB_MAX_TIMEOUT_DAYS=3 WORKERS_MICRONAUT_ENVIRONMENTS=control-plane diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index d063f4e23030d..e5e57d6c8f687 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,6 +5,9 @@ /airbyte-webapp/package.json /airbyte-webapp/package-lock.json +# Design +/airbyte-config/init/src/main/resources/icons/*.svg @airbytehq/design + # CDK and SAT /airbyte-cdk/ @airbytehq/api-connectors-dx /airbyte-integrations/bases/source-acceptance-tests/ @airbytehq/api-connectors-dx diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md index 31f6106dfc6b5..cb78e32ffbd4c 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -9,16 +9,12 @@ assignees: '' diff --git a/.github/ISSUE_TEMPLATE/source-connector-type.md b/.github/ISSUE_TEMPLATE/source-connector-type.md deleted file mode 100644 index e3345ea03b476..0000000000000 --- a/.github/ISSUE_TEMPLATE/source-connector-type.md +++ /dev/null @@ -1,26 +0,0 @@ ---- - -name: Source Connector Type -about: Add a new type or update an existing type in source connector -title: '[EPIC] Add new type / update in source connector ' -labels: area/connectors, needs-triage -assignees: '' - ---- - -## Summary -(Choose one of the two below.) -- [ ] Support new type -- [ ] Update existing type - -## TODOs -(Complete the TODOs based on the instruction, and convert each bullet point with the `[Issue]` tag into an issue.) -- [ ] [Issue] Add a new destination acceptance test (DAT) test case for this type. -- List every destination below, either update the destination to suppor this type, or override its DAT to bypass the new test case. - - [ ] Example: [Issue] support in destination bigquery -- [ ] [Issue] Make sure every destination can pass the new DAT test case. - - Even if a destination does not need to support this type, its DAT should not break. -- List every source that should support this type below. - - [ ] Example: [Issue] support in source github - -## Desired Timeline diff --git a/.github/actions/build-and-push-branch/action.yml b/.github/actions/build-and-push-branch/action.yml index 55d33bb6fb7ef..c1c5f3a10893c 100644 --- a/.github/actions/build-and-push-branch/action.yml +++ b/.github/actions/build-and-push-branch/action.yml @@ -20,7 +20,7 @@ runs: branch_version_tag: ${{ inputs.branch_version_tag }} - name: Login to Docker (on Master) - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: username: ${{ inputs.dockerhub_username }} password: ${{ inputs.dockerhub_token }} diff --git a/.github/actions/build-branch/action.yml b/.github/actions/build-branch/action.yml index e1df48d97e2d9..4536de7f2b9f1 100644 --- a/.github/actions/build-branch/action.yml +++ b/.github/actions/build-branch/action.yml @@ -18,16 +18,21 @@ runs: # if the *branch_version_tag* input param is not specified, then generate it as 'dev-` # [[ "${{ inputs.branch_version_tag }}" != '' ]] && echo "branch_version_tag=${{ inputs.branch_version_tag }}" >> $GITHUB_OUTPUT \ - || { short_hash=$(git rev-parse --short=7 HEAD); echo "branch_version_tag=dev-$short_hash" >> $GITHUB_OUTPUT ; } + || { short_hash=$(git rev-parse --short=10 HEAD); echo "branch_version_tag=dev-$short_hash" >> $GITHUB_OUTPUT ; } - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" + - uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Set up CI Gradle Properties run: | mkdir -p ~/.gradle/ diff --git a/.github/actions/cache-build-artifacts/action.yml b/.github/actions/cache-build-artifacts/action.yml index 0e454b7f07f25..d3e2d6e8cd5e8 100644 --- a/.github/actions/cache-build-artifacts/action.yml +++ b/.github/actions/cache-build-artifacts/action.yml @@ -14,7 +14,7 @@ runs: steps: - name: Pip Caching if: ${{ inputs.cache_python }} == 'true' - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ~/.cache/pip @@ -23,7 +23,7 @@ runs: ${{ inputs.cache-key }}-pip-${{ runner.os }}- - name: Npm Caching - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ~/.npm @@ -33,7 +33,7 @@ runs: # this intentionally does not use restore-keys so we don't mess with gradle caching - name: Gradle and Python Caching - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ~/.gradle/caches diff --git a/.github/actions/ci-tests-runner/action.yml b/.github/actions/ci-tests-runner/action.yml index f9014c4da277e..cae685c81ed0e 100644 --- a/.github/actions/ci-tests-runner/action.yml +++ b/.github/actions/ci-tests-runner/action.yml @@ -27,13 +27,14 @@ runs: using: "composite" steps: - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.9 - name: Install Java - uses: actions/setup-java@v1 + uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - name: Tests of CI diff --git a/.github/workflows/build-report.yml b/.github/workflows/build-report.yml index 9017360746753..4cdd45eaaabd4 100644 --- a/.github/workflows/build-report.yml +++ b/.github/workflows/build-report.yml @@ -18,7 +18,7 @@ jobs: environment: more-secrets steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/commands-for-testing-tool.yml b/.github/workflows/commands-for-testing-tool.yml index fb2b4dea0c225..bdaa9464cdc03 100644 --- a/.github/workflows/commands-for-testing-tool.yml +++ b/.github/workflows/commands-for-testing-tool.yml @@ -14,7 +14,7 @@ jobs: command: ${{ steps.regex.outputs.first_match }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits run: | ./tools/bin/find_non_rate_limited_PAT \ @@ -55,7 +55,7 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 - name: Pull Testing Tool docker image - run: docker pull airbyte/airbyte-e2e-testing-tool:latest + run: ./tools/bin/pull_image.sh -i airbyte/airbyte-e2e-testing-tool:latest - name: Create input and output folders run: | mkdir secrets @@ -97,14 +97,14 @@ jobs: comment-id: ${{ needs.set-params.outputs.comment-id }} reactions: eyes, rocket - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ needs.set-params.outputs.repo }} ref: ${{ needs.set-params.outputs.ref }} - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 - name: Pull Testing Tool docker image - run: docker pull airbyte/airbyte-e2e-testing-tool:latest + run: ./tools/bin/pull_image.sh -i airbyte/airbyte-e2e-testing-tool:latest - name: Change wrapper permissions run: | mkdir secrets diff --git a/.github/workflows/connector_integration_tests.yml b/.github/workflows/connector_integration_tests.yml index 9e5851ba0807a..90fcef0f0ce3c 100644 --- a/.github/workflows/connector_integration_tests.yml +++ b/.github/workflows/connector_integration_tests.yml @@ -6,7 +6,7 @@ on: workflow_dispatch: schedule: # 11am UTC is 4am PDT. - - cron: '0 11 * * *' + - cron: "0 11 * * *" jobs: launch_integration_tests: @@ -15,11 +15,12 @@ jobs: if: github.ref == 'refs/heads/master' steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install Java - uses: actions/setup-java@v1 + uses: actions/setup-java@v3 with: - java-version: '17' + distribution: "zulu" + java-version: "17" - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/connector_ops_ci.yml b/.github/workflows/connector_ops_ci.yml new file mode 100644 index 0000000000000..d5efdd9f8a614 --- /dev/null +++ b/.github/workflows/connector_ops_ci.yml @@ -0,0 +1,23 @@ +name: Connector Ops CI + +on: + pull_request: + paths: + - "airbyte-integrations/connectors/source-**" +jobs: + test-strictness-level: + name: "Check test strictness level" + runs-on: ubuntu-latest + steps: + - name: Checkout Airbyte + uses: actions/checkout@v3 + with: + fetch-depth: 0 + - name: Install Python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Install ci-connector-ops package + run: pip install --quiet -e ./tools/ci_connector_ops + - name: Check test strictness level + run: check-test-strictness-level diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 75020451223fd..348fe691854a8 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -57,7 +57,7 @@ jobs: echo -e "$CHANGELOG" >> $GITHUB_ENV echo "EOF" >> $GITHUB_ENV - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Get Version id: get_version shell: bash diff --git a/.github/workflows/deploy-docs-site.yml b/.github/workflows/deploy-docs-site.yml index a66be41b4c862..4da11c12fa2fc 100644 --- a/.github/workflows/deploy-docs-site.yml +++ b/.github/workflows/deploy-docs-site.yml @@ -5,7 +5,7 @@ on: branches: - master paths: - - 'docs/**' + - "docs/**" # Allows you to run this workflow manually from the Actions tab workflow_dispatch: @@ -23,10 +23,10 @@ jobs: # Node.js is needed for Yarn - name: Setup Yarn - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: - node-version: '16.14.0' - cache: 'yarn' + node-version: "16.14.0" + cache: "yarn" cache-dependency-path: docusaurus - name: Run Docusaurus diff --git a/.github/workflows/deploy-oss-catalog.yml b/.github/workflows/deploy-oss-catalog.yml index c5be13bfdde69..26ecbdaaf651e 100644 --- a/.github/workflows/deploy-oss-catalog.yml +++ b/.github/workflows/deploy-oss-catalog.yml @@ -4,7 +4,7 @@ on: push: branches: - master - paths: + paths: - airbyte-config/init/src/main/resources/seed/** workflow_dispatch: @@ -17,15 +17,16 @@ jobs: concurrency: deploy-oss-connector-catalog steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Cloud SDK uses: google-github-actions/setup-gcloud@v0 with: service_account_key: ${{ secrets.PROD_SPEC_CACHE_SA_KEY }} export_default_credentials: true - name: Install Java - uses: actions/setup-java@v1 + uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - name: Generate catalog run: SUB_BUILD=PLATFORM ./gradlew :airbyte-config:init:processResources @@ -35,3 +36,14 @@ jobs: gcs_bucket_name="prod-airbyte-cloud-connector-metadata-service" catalog_path="airbyte-config/init/src/main/resources/seed/oss_catalog.json" gsutil -h "Cache-Control:public, max-age=10" cp "$catalog_path" "gs://$gcs_bucket_name/oss_catalog.json" + - name: Check PAT rate limits + run: | + ./tools/bin/find_non_rate_limited_PAT \ + ${{ secrets.OCTAVIA_4_ROOT_ACCESS }} \ + ${{ secrets.OCTAVIA_PAT }} + - name: Trigger Cloud catalog generation + uses: peter-evans/repository-dispatch@v2 + with: + token: ${{ env.PAT }} + repository: airbytehq/airbyte-cloud + event-type: generate-cloud-catalog diff --git a/.github/workflows/fe-validate-links.yml b/.github/workflows/fe-validate-links.yml index 2793c5542c6e4..ae5c4ef57dee7 100644 --- a/.github/workflows/fe-validate-links.yml +++ b/.github/workflows/fe-validate-links.yml @@ -3,7 +3,7 @@ name: Check for broken links in FE on: workflow_dispatch: schedule: - - cron: '0 14 * * *' + - cron: "0 14 * * *" jobs: validate-frontend-links: @@ -12,13 +12,14 @@ jobs: timeout-minutes: 15 steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" @@ -47,4 +48,4 @@ jobs: args: >- {\"channel\":\"C03088BTMFC\", \"blocks\":[ {\"type\":\"section\",\"text\":{\"type\":\"mrkdwn\",\"text\":\":alarm: The periodic link validation failed!\n\n\"}}, - {\"type\":\"section\",\"text\":{\"type\":\"mrkdwn\",\"text\":\"See details on \n\"}}]} \ No newline at end of file + {\"type\":\"section\",\"text\":{\"type\":\"mrkdwn\",\"text\":\"See details on \n\"}}]} diff --git a/.github/workflows/gke-kube-test-command.yml b/.github/workflows/gke-kube-test-command.yml index 68a7aba4362b4..997241ed11562 100644 --- a/.github/workflows/gke-kube-test-command.yml +++ b/.github/workflows/gke-kube-test-command.yml @@ -1,7 +1,7 @@ name: GKE Kube Acceptance Test on: schedule: - - cron: '0 0 * * 0' # runs at midnight UTC every Sunday + - cron: "0 0 * * 0" # runs at midnight UTC every Sunday workflow_dispatch: inputs: repo: @@ -25,7 +25,7 @@ jobs: pat: ${{ steps.variables.outputs.pat }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits id: variables run: | @@ -44,7 +44,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -71,16 +71,17 @@ jobs: > :clock2: https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo || "airbytehq/airbyte" }} ref: ${{ github.event.inputs.gitref || "master" }} - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" diff --git a/.github/workflows/gradle.yml b/.github/workflows/gradle.yml index 587d760ae113c..3485083530340 100644 --- a/.github/workflows/gradle.yml +++ b/.github/workflows/gradle.yml @@ -1,5 +1,9 @@ name: Airbyte CI +env: + S3_BUILD_CACHE_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} + S3_BUILD_CACHE_SECRET_KEY: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} + on: #ability to start task manually in Web UI workflow_dispatch: @@ -17,7 +21,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check images exist run: ./tools/bin/check_images_exist.sh all @@ -39,7 +43,7 @@ jobs: frontend: ${{ steps.filter.outputs.frontend }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - uses: dorny/paths-filter@v2 id: filter with: @@ -49,6 +53,8 @@ jobs: - 'airbyte-!(cdk|integrations|webapp|webapp-e2e-tests)/**' - 'airbyte-integrations/connectors/(destination-jdbc|destination-postgres|source-jdbc|source-postgres)/**' - 'airbyte-config/init/src/main/resources/seed/(source|destination)_definitions.yaml' + - 'docker-compose*.yaml' + - '(charts|kube)/**' build: - '.github/**' - 'buildSrc/**' @@ -76,7 +82,7 @@ jobs: pat: ${{ steps.variables.outputs.pat }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits id: variables run: | @@ -86,14 +92,13 @@ jobs: ${{ secrets.SUPERTOPHER_PAT }} \ ${{ secrets.DAVINCHIA_PAT }} - # Uncomment to debug. # changes-output: # name: "Debug Change Detection Logic" # needs: changes # runs-on: ubuntu-latest # steps: - # - uses: actions/checkout@v2 + # - uses: actions/checkout@v3 # - run: | # echo '${{ toJSON(needs) }}' @@ -107,7 +112,7 @@ jobs: timeout-minutes: 90 steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Cache Build Artifacts uses: ./.github/actions/cache-build-artifacts @@ -115,11 +120,12 @@ jobs: cache-key: ${{ secrets.CACHE_VERSION }} cache-python: "false" - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.9" @@ -170,7 +176,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Start AWS Runner id: start-ec2-runner uses: ./.github/actions/start-aws-runner @@ -187,22 +193,23 @@ jobs: timeout-minutes: 90 steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Cache Build Artifacts uses: ./.github/actions/cache-build-artifacts with: cache-key: ${{ secrets.CACHE_VERSION }} - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.9" @@ -248,7 +255,7 @@ jobs: json_file: connectors_base_results.json json_test_case_results: true check_name: "Connectors Base Test Results" - + - name: Setup Google Cloud SDK if: always() uses: google-github-actions/setup-gcloud@v0 @@ -260,22 +267,22 @@ jobs: if: always() run: | python tools/bin/prep_test_results_for_gcs.py --json connectors_base_results.json --jobid $GITHUB_JOB --runid $GITHUB_RUN_ID - + - name: Upload Test Results to GCS if: always() - run: | + run: | gcs_bucket_name="dev-ab-ci-run-results" filename=$(echo "${{ fromJSON( steps.connectors-test-results.outputs.json ).check_url }}" | sed 's@.*/@@') echo "$filename" gsutil -h "Cache-Control:public" cp connectors_base_results.jsonl "gs://$gcs_bucket_name/oss/$filename.jsonl" - + - name: Generate Test Report uses: dorny/test-reporter@v1 if: always() with: name: Connectors Base Test Report # Specify top-level and second-level modules. Note there cannot be a space between the comma. - path: '/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml,/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml' + path: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml,/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml" reporter: java-junit # In case of self-hosted EC2 errors, remove this block. @@ -323,7 +330,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Start AWS Runner id: start-ec2-runner uses: ./.github/actions/start-aws-runner @@ -339,7 +346,7 @@ jobs: runs-on: ${{ needs.start-frontend-runner.outputs.label }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 @@ -349,14 +356,19 @@ jobs: cache-key: ${{ secrets.CACHE_VERSION }} cache-python: "false" - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" + - uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Set up CI Gradle Properties run: | mkdir -p ~/.gradle/ @@ -383,7 +395,6 @@ jobs: autoAcceptChanges: true exitOnceUploaded: true - frontend-test: name: "Frontend: Run End-to-End Tests" needs: @@ -392,7 +403,7 @@ jobs: timeout-minutes: 120 steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Cache Build Artifacts uses: ./.github/actions/cache-build-artifacts @@ -400,14 +411,19 @@ jobs: cache-key: ${{ secrets.CACHE_VERSION }} cache-python: "false" - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" + - uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Set up CI Gradle Properties run: | mkdir -p ~/.gradle/ @@ -474,7 +490,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Start AWS Runner id: start-ec2-runner uses: ./.github/actions/start-aws-runner @@ -490,7 +506,7 @@ jobs: timeout-minutes: 90 steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Cache Build Artifacts uses: ./.github/actions/cache-build-artifacts @@ -498,11 +514,16 @@ jobs: cache-key: ${{ secrets.CACHE_VERSION }} cache-python: "false" - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" @@ -546,9 +567,9 @@ jobs: if: success() && github.ref == 'refs/heads/master' run: ./tools/site/link_checker.sh check_docs -# This is only required on the usual github runner. The usual runner does not contain enough disk space for our use. -# - name: Get Docker Space -# run: docker run --rm busybox df -h + # This is only required on the usual github runner. The usual runner does not contain enough disk space for our use. + # - name: Get Docker Space + # run: docker run --rm busybox df -h - name: Run End-to-End Acceptance Tests run: ./tools/bin/acceptance_test.sh @@ -556,7 +577,7 @@ jobs: - name: Automatic Migration Acceptance Test run: SUB_BUILD=PLATFORM ./gradlew :airbyte-tests:automaticMigrationAcceptanceTest --scan -i - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 if: always() with: python-version: "3.9" @@ -578,39 +599,39 @@ jobs: with: service_account_key: ${{ secrets.GKE_TEST_SA_KEY }} export_default_credentials: true - + - name: Prep Test Results For GCS if: always() run: | python tools/bin/prep_test_results_for_gcs.py --json platform_results.json --jobid $GITHUB_JOB --runid $GITHUB_RUN_ID - + - name: Upload Test Results to GCS if: always() - run: | + run: | gcs_bucket_name="dev-ab-ci-run-results" filename=$(echo "${{ fromJSON( steps.platform-results.outputs.json ).check_url }}" | sed 's@.*/@@') echo "$filename" gsutil -h "Cache-Control:public" cp platform_results.jsonl "gs://$gcs_bucket_name/oss/$filename.jsonl" - + - name: Generate Test Report uses: dorny/test-reporter@v1 - if: always() # run this step even if previous step failed + if: always() # run this step even if previous step failed with: name: Platform Test Report with Docker E2E Test # Specify top-level and second-level modules. Note there cannot be a space between the comma. - path: '/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml,/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml' + path: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml,/actions-runner/_work/airbyte/airbyte/*/*/build/test-results/*/*.xml" reporter: java-junit - name: Upload test results to BuildPulse for flaky test detection - if: '!cancelled()' # Run this step even when the tests fail. Skip if the workflow is cancelled. + if: "!cancelled()" # Run this step even when the tests fail. Skip if the workflow is cancelled. uses: Workshop64/buildpulse-action@main with: account: 59758427 repository: 283046497 - path: '/actions-runner/_work/airbyte/airbyte/*' + path: "/actions-runner/_work/airbyte/airbyte/*" key: ${{ secrets.BUILDPULSE_ACCESS_KEY_ID }} secret: ${{ secrets.BUILDPULSE_SECRET_ACCESS_KEY }} - + # In case of self-hosted EC2 errors, remove this block. stop-platform-build-runner: name: "Platform: Stop Build EC2 Runner" @@ -655,7 +676,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Start AWS Runner id: start-ec2-runner uses: ./.github/actions/start-aws-runner @@ -674,7 +695,7 @@ jobs: timeout-minutes: 40 steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Cache Build Artifacts uses: ./.github/actions/cache-build-artifacts @@ -682,14 +703,19 @@ jobs: cache-key: ${{ secrets.CACHE_VERSION }} cache-python: "false" - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" + - uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Fix EC-2 Runner run: | mkdir -p /home/runner @@ -742,7 +768,7 @@ jobs: run: | CI=true IS_MINIKUBE=true ./tools/bin/acceptance_test_kube.sh - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 if: always() with: python-version: "3.9" @@ -757,52 +783,51 @@ jobs: json_file: kube_results.json json_test_case_results: true check_name: "Kube Test Results" - + - name: Setup Google Cloud SDK if: always() uses: google-github-actions/setup-gcloud@v0 with: service_account_key: ${{ secrets.GKE_TEST_SA_KEY }} export_default_credentials: true - + - name: Prep Test Results For GCS if: always() run: | python tools/bin/prep_test_results_for_gcs.py --json kube_results.json --jobid $GITHUB_JOB --runid $GITHUB_RUN_ID - + - name: Upload Test Results to GCS if: always() - run: | + run: | gcs_bucket_name="dev-ab-ci-run-results" filename=$(echo "${{ fromJSON( steps.kube-results.outputs.json ).check_url }}" | sed 's@.*/@@') echo "$filename" gsutil -h "Cache-Control:public" cp kube_results.jsonl "gs://$gcs_bucket_name/oss/$filename.jsonl" - + - name: Generate Test Report uses: dorny/test-reporter@v1 - if: always() # run this step even if previous step failed + if: always() # run this step even if previous step failed with: name: Platform Kubernetes E2E Test Report - path: '/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml' + path: "/actions-runner/_work/airbyte/airbyte/*/build/test-results/*/*.xml" reporter: java-junit - name: Upload test results to BuildPulse for flaky test detection - if: '!cancelled()' # Run this step even when the tests fail. Skip if the workflow is cancelled. + if: "!cancelled()" # Run this step even when the tests fail. Skip if the workflow is cancelled. uses: Workshop64/buildpulse-action@main with: account: 59758427 repository: 283046497 - path: '/actions-runner/_work/airbyte/airbyte/*' + path: "/actions-runner/_work/airbyte/airbyte/*" key: ${{ secrets.BUILDPULSE_ACCESS_KEY_ID }} secret: ${{ secrets.BUILDPULSE_SECRET_ACCESS_KEY }} - - - uses: actions/upload-artifact@v2 + + - uses: actions/upload-artifact@v3 if: failure() with: name: Kubernetes Logs path: /tmp/kubernetes_logs/* - # In case of self-hosted EC2 errors, remove this block. stop-kube-acceptance-test-runner: name: "Platform: Stop Kube Acceptance Test EC2 Runner" @@ -830,7 +855,6 @@ jobs: label: ${{ needs.start-kube-acceptance-test-runner.outputs.label }} ec2-instance-id: ${{ needs.start-kube-acceptance-test-runner.outputs.ec2-instance-id }} - ## Kube Acceptance Tests # Docker acceptance tests run as part of the build job. # In case of self-hosted EC2 errors, remove this block. @@ -850,7 +874,7 @@ jobs: # ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} # steps: # - name: Checkout Airbyte - # uses: actions/checkout@v2 + # uses: actions/checkout@v3 # - name: Start AWS Runner # id: start-ec2-runner # uses: ./.github/actions/start-aws-runner @@ -861,130 +885,128 @@ jobs: # aws-secret-access-key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} # github-token: ${{ needs.find_valid_pat.outputs.pat }} - # Todo: Kyrylo turn this on. -# helm-acceptance-test: -# name: "Platform: Acceptance Tests (Helm)" -# # In case of self-hosted EC2 errors, removed the `needs` line and switch back to running on ubuntu-latest. -# needs: start-helm-acceptance-test-runner # required to start the main job when the runner is ready -# runs-on: ${{ needs.start-helm-acceptance-test-runner.outputs.label }} # run the job on the newly created runner -# environment: more-secrets -# timeout-minutes: 90 -# steps: -# - name: Checkout Airbyte -# uses: actions/checkout@v2 -# -# - name: Cache Build Artifacts -# uses: ./.github/actions/cache-build-artifacts -# with: -# cache-key: ${{ secrets.CACHE_VERSION }} -# cache-python: "false" -# -# - uses: actions/setup-java@v1 -# with: -# java-version: "17" -# -# - uses: actions/setup-node@v2 -# with: -# node-version: "lts/gallium" -# -# - name: Install unzip -# shell: bash -# run: sudo apt-get update && sudo apt-get install -y unzip -# -# - uses: azure/setup-helm@v3 -# with: -# version: 'latest' -# token: ${{ secrets.GITHUB_TOKEN }} -# id: install -# -# - name: Fix EC-2 Runner -# run: | -# mkdir -p /home/runner -# -# - name: Set up CI Gradle Properties -# run: | -# mkdir -p ~/.gradle/ -# cat > ~/.gradle/gradle.properties < /tmp/kind-config.yaml < ~/.gradle/gradle.properties < /tmp/kind-config.yaml < team/user to notify recipients: | team/frontend=@airbytehq/frontend diff --git a/.github/workflows/notify-on-push-to-master.yml b/.github/workflows/notify-on-push-to-master.yml index d5891fdf310ec..040bf4ccb6735 100644 --- a/.github/workflows/notify-on-push-to-master.yml +++ b/.github/workflows/notify-on-push-to-master.yml @@ -4,14 +4,14 @@ on: branches: - master workflow_dispatch: - + jobs: repo-sync: name: "Fire a Repo Dispatch event to airbyte-cloud" runs-on: ubuntu-latest steps: - name: Checkout Airbyte Repo for PAT command - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits # Cannot share PAT outside of JOB context run: | diff --git a/.github/workflows/publish-cdk-command.yml b/.github/workflows/publish-cdk-command.yml index 2ca72bc3c184b..18113a11bfcea 100644 --- a/.github/workflows/publish-cdk-command.yml +++ b/.github/workflows/publish-cdk-command.yml @@ -24,14 +24,15 @@ jobs: matrix: python-version: ["3.9"] steps: - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -64,7 +65,7 @@ jobs: echo ${{ github.event.inputs.dry-run }} echo "pypi_url=https://test.pypi.org/legacy/" >> $GITHUB_ENV - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index 0317fb6319beb..14ca7de9d5ed1 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -38,7 +38,7 @@ jobs: pat: ${{ steps.variables.outputs.pat }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits id: variables run: | @@ -58,7 +58,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -80,7 +80,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -102,7 +102,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -124,7 +124,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -146,7 +146,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -233,17 +233,18 @@ jobs: if: steps.regex.outputs.first_match != matrix.connector run: echo "The connector provided has an invalid format!" && exit 1 - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} token: ${{ secrets.OCTAVIA_PAT }} - name: Install Java - uses: actions/setup-java@v1 + uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - name: Install Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.9" - name: Install Pyenv and Tox diff --git a/.github/workflows/publish-connector-command.yml b/.github/workflows/publish-connector-command.yml index 9744873bcf384..cc695436f7ad6 100644 --- a/.github/workflows/publish-connector-command.yml +++ b/.github/workflows/publish-connector-command.yml @@ -34,7 +34,7 @@ jobs: pat: ${{ steps.variables.outputs.pat }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits id: variables run: | @@ -55,7 +55,7 @@ jobs: # ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} # steps: # - name: Checkout Airbyte -# uses: actions/checkout@v2 +# uses: actions/checkout@v3 # with: # repository: ${{ github.event.inputs.repo }} # ref: ${{ github.event.inputs.gitref }} @@ -99,17 +99,18 @@ jobs: # body: | # > :clock2: ${{github.event.inputs.connector}} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} # - name: Checkout Airbyte -# uses: actions/checkout@v2 +# uses: actions/checkout@v3 # with: # repository: ${{ github.event.inputs.repo }} # ref: ${{ github.event.inputs.gitref }} # token: ${{ secrets.OCTAVIA_PAT }} # - name: Install Java -# uses: actions/setup-java@v1 +# uses: actions/setup-java@v3 # with: +# distribution: "zulu" # java-version: "17" # - name: Install Python -# uses: actions/setup-python@v2 +# uses: actions/setup-python@v4 # with: # python-version: "3.9" # - name: Install Pyenv and Tox diff --git a/.github/workflows/publish-helm-charts.yml b/.github/workflows/publish-helm-charts.yml index 53208e22a3506..b6f1540fbacee 100644 --- a/.github/workflows/publish-helm-charts.yml +++ b/.github/workflows/publish-helm-charts.yml @@ -21,7 +21,7 @@ jobs: next-version: ${{ steps.ver.outputs.fragment }} tag: ${{ steps.sem-ver.outputs.version_tag }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 diff --git a/.github/workflows/publish-oss-for-cloud.yml b/.github/workflows/publish-oss-for-cloud.yml index f3a5937f15770..74fb5486c6b3a 100644 --- a/.github/workflows/publish-oss-for-cloud.yml +++ b/.github/workflows/publish-oss-for-cloud.yml @@ -17,7 +17,7 @@ jobs: pat: ${{ steps.variables.outputs.pat }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits id: variables run: | @@ -36,7 +36,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Start AWS Runner id: start-ec2-runner uses: ./.github/actions/start-aws-runner @@ -53,7 +53,7 @@ jobs: master_tag: ${{ steps.set-outputs.outputs.master_tag }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: ref: ${{ github.event.inputs.oss_ref || github.ref }} - name: Generate Outputs @@ -62,7 +62,7 @@ jobs: run: |- set -x - commit_sha=$(git rev-parse --short=7 HEAD) + commit_sha=$(git rev-parse --short=10 HEAD) # set dev_tag # AirbyteVersion.java allows versions that have a prefix of 'dev' @@ -83,7 +83,7 @@ jobs: environment: more-secrets steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: ref: ${{ github.event.inputs.oss_ref || github.ref }} @@ -116,7 +116,7 @@ jobs: runs-on: ${{ needs.start-runner.outputs.label }} steps: - name: Login to Docker (on Master) - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_PASSWORD }} diff --git a/.github/workflows/release-airbyte-os.yml b/.github/workflows/release-airbyte-os.yml index 23dd09cb6e6f6..2165dc688f51a 100644 --- a/.github/workflows/release-airbyte-os.yml +++ b/.github/workflows/release-airbyte-os.yml @@ -17,7 +17,7 @@ jobs: pat: ${{ steps.variables.outputs.pat }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits id: variables run: | @@ -37,7 +37,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Start AWS Runner id: start-ec2-runner uses: ./.github/actions/start-aws-runner @@ -53,18 +53,19 @@ jobs: environment: more-secrets steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" # necessary to install pip - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.9" - name: Release Airbyte @@ -85,17 +86,18 @@ jobs: environment: more-secrets steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: node-version: "lts/gallium" - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.9" - name: Release Octavia @@ -114,11 +116,11 @@ jobs: environment: more-secrets steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: fetch-depth: 0 # necessary to install pip - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: "3.9" - name: Bump version diff --git a/.github/workflows/run-performance-test.yml b/.github/workflows/run-performance-test.yml index 55a3bda9d83c1..5e02ae01875b5 100644 --- a/.github/workflows/run-performance-test.yml +++ b/.github/workflows/run-performance-test.yml @@ -22,13 +22,13 @@ jobs: environment: more-secrets steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} - name: Npm Caching - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ~/.npm @@ -38,7 +38,7 @@ jobs: # this intentionally does not use restore-keys so we don't mess with gradle caching - name: Gradle Caching - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ~/.gradle/caches @@ -46,13 +46,14 @@ jobs: **/.venv key: ${{ secrets.CACHE_VERSION }}-${{ runner.os }}-${{ hashFiles('**/*.gradle*') }}-${{ hashFiles('**/package-lock.json') }} - - uses: actions/setup-java@v1 + - uses: actions/setup-java@v3 with: - java-version: '14' + distribution: "zulu" + java-version: "14" - - uses: actions/setup-node@v2 + - uses: actions/setup-node@v3 with: - node-version: 'lts/gallium' + node-version: "lts/gallium" - name: Build id: run-specific-test diff --git a/.github/workflows/slash-commands.yml b/.github/workflows/slash-commands.yml index 503d51cfa71a6..e0a3b2e1ec370 100644 --- a/.github/workflows/slash-commands.yml +++ b/.github/workflows/slash-commands.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits run: | diff --git a/.github/workflows/terminate-zombie-build-instances.yml b/.github/workflows/terminate-zombie-build-instances.yml index 8de735476c813..40cd8e782cbf5 100644 --- a/.github/workflows/terminate-zombie-build-instances.yml +++ b/.github/workflows/terminate-zombie-build-instances.yml @@ -41,7 +41,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: List and Terminate GH actions in status 'offline' env: GITHUB_PAT: ${{ secrets.OCTAVIA_PAT }} diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml index c424fc828485e..23b0ff27469a4 100644 --- a/.github/workflows/test-command.yml +++ b/.github/workflows/test-command.yml @@ -31,7 +31,7 @@ jobs: - name: UUID ${{ github.event.inputs.uuid }} run: true - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits id: variables run: | @@ -50,7 +50,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -87,16 +87,17 @@ jobs: if: steps.regex.outputs.first_match != github.event.inputs.connector run: echo "The connector provided has an invalid format!" && exit 1 - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} - name: Install Java - uses: actions/setup-java@v1 + uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - name: Install Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.9" - name: Install Pyenv and Tox @@ -134,7 +135,7 @@ jobs: TZ: UTC - name: Archive test reports artifacts if: github.event.inputs.comment-id && failure() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: test-reports path: | @@ -148,7 +149,7 @@ jobs: - name: Test coverage reports artifacts if: github.event.inputs.comment-id && success() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: test-reports path: | diff --git a/.github/workflows/test-performance-command.yml b/.github/workflows/test-performance-command.yml index 0e24f4444b2ed..c420ebc928118 100644 --- a/.github/workflows/test-performance-command.yml +++ b/.github/workflows/test-performance-command.yml @@ -34,7 +34,7 @@ jobs: pat: ${{ steps.variables.outputs.pat }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Check PAT rate limits id: variables run: | @@ -53,7 +53,7 @@ jobs: ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} steps: - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} @@ -90,16 +90,17 @@ jobs: body: | > :clock2: ${{github.event.inputs.connector}} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} - name: Checkout Airbyte - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} - name: Install Java - uses: actions/setup-java@v1 + uses: actions/setup-java@v3 with: + distribution: "zulu" java-version: "17" - name: Install Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: "3.9" - name: Install Pyenv and Tox @@ -136,7 +137,7 @@ jobs: TZ: UTC - name: Archive test reports artifacts if: github.event.inputs.comment-id && failure() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: test-reports path: | @@ -150,7 +151,7 @@ jobs: - name: Test coverage reports artifacts if: github.event.inputs.comment-id && success() - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: test-reports path: | diff --git a/.github/workflows/workflow-cleanup.yml b/.github/workflows/workflow-cleanup.yml index 10652dd5f5d0d..8c7ea90d9ac80 100644 --- a/.github/workflows/workflow-cleanup.yml +++ b/.github/workflows/workflow-cleanup.yml @@ -1,34 +1,33 @@ name: Cleanup old GH workflow runs -on: +on: schedule: - - cron: '0 12 * * *' # runs at 12:00 UTC everyday + - cron: "0 12 * * *" # runs at 12:00 UTC everyday jobs: build: runs-on: ubuntu-latest steps: - - name: checkout repo content - uses: actions/checkout@v2 # checkout the repository content to github runner + uses: actions/checkout@v3 # checkout the repository content to github runner - name: setup python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: - python-version: '3.9.13' # install the python version needed - + python-version: "3.9.13" # install the python version needed + - name: install python packages run: | python -m pip install --upgrade pip pip install PyGithub slack_sdk - - - name: execute cleanup workflow py script - env: + + - name: execute cleanup workflow py script + env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: python tools/bin/cleanup-workflow-runs.py --delete - - - name: execute dormant workflow py script - env: + + - name: execute dormant workflow py script + env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SLACK_TOKEN: ${{ secrets.SLACK_BOT_TOKEN_AIRBYTE_TEAM }} diff --git a/airbyte-api/src/main/openapi/config.yaml b/airbyte-api/src/main/openapi/config.yaml index e9691ae1b4e01..d1745349070b4 100644 --- a/airbyte-api/src/main/openapi/config.yaml +++ b/airbyte-api/src/main/openapi/config.yaml @@ -3778,19 +3778,35 @@ components: type: string OperatorWebhook: type: object - required: - - executionUrl properties: - executionUrl: - type: string - description: The URL to call to execute the webhook operation via POST request. - executionBody: - type: string - description: If populated, this will be sent with the POST request. webhookConfigId: type: string format: uuid description: The id of the webhook configs to use from the workspace. + webhookType: + type: string + enum: + - dbtCloud + dbtCloud: + type: object + required: + - accountId + - jobId + properties: + accountId: + type: integer + description: The account id associated with the job + jobId: + type: integer + description: The job id associated with the job + executionUrl: + type: string + description: DEPRECATED. Populate dbtCloud instead. + deprecated: true + executionBody: + type: string + description: DEPRECATED. Populate dbtCloud instead. + deprecated: true CheckOperationRead: type: object required: @@ -4019,6 +4035,13 @@ components: $ref: "#/components/schemas/SourceDefinitionRead" destinationDefinition: $ref: "#/components/schemas/DestinationDefinitionRead" + WorkflowStateRead: + type: object + required: + - running + properties: + running: + type: boolean JobWithAttemptsRead: type: object properties: @@ -4210,6 +4233,8 @@ components: type: array items: $ref: "#/components/schemas/AttemptInfoRead" + workflowState: + $ref: "#/components/schemas/WorkflowStateRead" AttemptInfoRead: type: object required: @@ -4649,6 +4674,8 @@ components: type: string oAuthInputConfiguration: $ref: "#/components/schemas/OAuthInputConfiguration" + sourceId: + $ref: "#/components/schemas/SourceId" DestinationOauthConsentRequest: type: object required: @@ -4665,6 +4692,8 @@ components: type: string oAuthInputConfiguration: $ref: "#/components/schemas/OAuthInputConfiguration" + destinationId: + $ref: "#/components/schemas/DestinationId" OAuthConsentRead: type: object required: @@ -4691,6 +4720,8 @@ components: additionalProperties: true # Oauth parameters like code, state, etc.. will be different per API so we don't specify them in advance oAuthInputConfiguration: $ref: "#/components/schemas/OAuthInputConfiguration" + sourceId: + $ref: "#/components/schemas/SourceId" CompleteDestinationOAuthRequest: type: object required: @@ -4710,6 +4741,8 @@ components: additionalProperties: true # Oauth parameters like code, state, etc.. will be different per API so we don't specify them in advance oAuthInputConfiguration: $ref: "#/components/schemas/OAuthInputConfiguration" + destinationId: + $ref: "#/components/schemas/DestinationId" CompleteOAuthResponse: type: object additionalProperties: true # Oauth parameters like refresh/access token etc.. will be different per API so we don't specify them in advance diff --git a/airbyte-bootloader/Dockerfile b/airbyte-bootloader/Dockerfile index b04199481af76..671e11e0a4408 100644 --- a/airbyte-bootloader/Dockerfile +++ b/airbyte-bootloader/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_IMAGE=airbyte/airbyte-base-java-image:1.0 FROM ${JDK_IMAGE} -ARG VERSION=0.40.18 +ARG VERSION=0.40.19 ENV APPLICATION airbyte-bootloader ENV VERSION ${VERSION} diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java index 1132b5a46ef2b..dd60a92159908 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/BootloaderApp.java @@ -8,8 +8,8 @@ import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.lang.CloseableShutdownHook; import io.airbyte.commons.resources.MoreResources; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.commons.version.Version; import io.airbyte.config.Configs; import io.airbyte.config.EnvConfigs; import io.airbyte.config.Geography; @@ -70,7 +70,7 @@ public class BootloaderApp { private final FeatureFlags featureFlags; private final SecretMigrator secretMigrator; private ConfigRepository configRepository; - private DefinitionsProvider localDefinitionsProvider; + private Optional definitionsProvider; private Database configDatabase; private Database jobDatabase; private JobPersistence jobPersistence; @@ -79,6 +79,13 @@ public class BootloaderApp { private final DSLContext configsDslContext; private final DSLContext jobsDslContext; + // This controls how we check the protocol version compatibility + // True means that the connectors will be forcefully upgraded regardless of whether they are used in + // an active sync or not. + // This should be moved to a Configs, however, this behavior is currently forced through hooks that + // are passed as the postLoadExecution. + private final boolean autoUpgradeConnectors; + /** * This method is exposed for Airbyte Cloud consumption. This lets us override the seed loading * logic and customise Cloud connector versions. Please check with the Platform team before making @@ -97,7 +104,9 @@ public BootloaderApp(final Configs configs, final DSLContext configsDslContext, final DSLContext jobsDslContext, final Flyway configsFlyway, - final Flyway jobsFlyway) { + final Flyway jobsFlyway, + final Optional definitionsProvider, + final boolean autoUpgradeConnectors) { this.configs = configs; this.postLoadExecution = postLoadExecution; this.featureFlags = featureFlags; @@ -106,11 +115,16 @@ public BootloaderApp(final Configs configs, this.configsFlyway = configsFlyway; this.jobsDslContext = jobsDslContext; this.jobsFlyway = jobsFlyway; + this.definitionsProvider = definitionsProvider; + this.autoUpgradeConnectors = autoUpgradeConnectors; initPersistences(configsDslContext, jobsDslContext); } + // Temporary duplication of constructor, to remove once Cloud has been migrated to the one above. + @Deprecated(forRemoval = true) public BootloaderApp(final Configs configs, + final Runnable postLoadExecution, final FeatureFlags featureFlags, final SecretMigrator secretMigrator, final DSLContext configsDslContext, @@ -118,18 +132,49 @@ public BootloaderApp(final Configs configs, final Flyway configsFlyway, final Flyway jobsFlyway) { this.configs = configs; + this.postLoadExecution = postLoadExecution; this.featureFlags = featureFlags; this.secretMigrator = secretMigrator; this.configsDslContext = configsDslContext; this.configsFlyway = configsFlyway; this.jobsDslContext = jobsDslContext; this.jobsFlyway = jobsFlyway; + this.autoUpgradeConnectors = false; + + try { + this.definitionsProvider = Optional.of(getLocalDefinitionsProvider()); + } catch (final IOException e) { + LOGGER.error("Unable to initialize persistence.", e); + } + + initPersistences(configsDslContext, jobsDslContext); + } + + public BootloaderApp(final Configs configs, + final FeatureFlags featureFlags, + final SecretMigrator secretMigrator, + final DSLContext configsDslContext, + final DSLContext jobsDslContext, + final Flyway configsFlyway, + final Flyway jobsFlyway, + final DefinitionsProvider definitionsProvider, + final boolean autoUpgradeConnectors) { + this.configs = configs; + this.featureFlags = featureFlags; + this.secretMigrator = secretMigrator; + this.configsDslContext = configsDslContext; + this.configsFlyway = configsFlyway; + this.jobsDslContext = jobsDslContext; + this.jobsFlyway = jobsFlyway; + this.definitionsProvider = Optional.of(definitionsProvider); + this.autoUpgradeConnectors = autoUpgradeConnectors; initPersistences(configsDslContext, jobsDslContext); postLoadExecution = () -> { try { - final ApplyDefinitionsHelper applyDefinitionsHelper = new ApplyDefinitionsHelper(configRepository, localDefinitionsProvider); + final ApplyDefinitionsHelper applyDefinitionsHelper = + new ApplyDefinitionsHelper(configRepository, this.definitionsProvider.get(), jobPersistence); applyDefinitionsHelper.apply(); if (featureFlags.forceSecretMigration() || !jobPersistence.isSecretMigrated()) { @@ -159,10 +204,9 @@ public void load() throws Exception { final AirbyteVersion currAirbyteVersion = configs.getAirbyteVersion(); assertNonBreakingMigration(jobPersistence, currAirbyteVersion); - final Version airbyteProtocolVersionMax = configs.getAirbyteProtocolVersionMax(); - final Version airbyteProtocolVersionMin = configs.getAirbyteProtocolVersionMin(); - // TODO ProtocolVersion validation should happen here - trackProtocolVersion(airbyteProtocolVersionMin, airbyteProtocolVersionMax); + final ProtocolVersionChecker protocolVersionChecker = + new ProtocolVersionChecker(jobPersistence, configs, configRepository, definitionsProvider); + assertNonBreakingProtocolVersionConstraints(protocolVersionChecker, jobPersistence, autoUpgradeConnectors); // TODO Will be converted to an injected singleton during DI migration final DatabaseMigrator configDbMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); @@ -191,7 +235,7 @@ private static Database getConfigDatabase(final DSLContext dslContext) throws IO return new Database(dslContext); } - private static DefinitionsProvider getLocalDefinitionsProvider() throws IOException { + static DefinitionsProvider getLocalDefinitionsProvider() throws IOException { return new LocalDefinitionsProvider(LocalDefinitionsProvider.DEFAULT_SEED_DEFINITION_RESOURCE_CLASS); } @@ -207,7 +251,6 @@ private void initPersistences(final DSLContext configsDslContext, final DSLConte try { configDatabase = getConfigDatabase(configsDslContext); configRepository = new ConfigRepository(configDatabase); - localDefinitionsProvider = getLocalDefinitionsProvider(); jobDatabase = getJobDatabase(jobsDslContext); jobPersistence = getJobPersistence(jobDatabase); } catch (final IOException e) { @@ -249,7 +292,11 @@ public static void main(final String[] args) throws Exception { // Ensure that the database resources are closed on application shutdown CloseableShutdownHook.registerRuntimeShutdownHook(configsDataSource, jobsDataSource, configsDslContext, jobsDslContext); - final var bootloader = new BootloaderApp(configs, featureFlags, secretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + final DefinitionsProvider definitionsProvider = getLocalDefinitionsProvider(); + + final var bootloader = + new BootloaderApp(configs, featureFlags, secretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway, definitionsProvider, + false); bootloader.load(); } } @@ -307,10 +354,24 @@ private static void assertNonBreakingMigration(final JobPersistence jobPersisten } } - private void trackProtocolVersion(final Version airbyteProtocolVersionMin, final Version airbyteProtocolVersionMax) throws IOException { - jobPersistence.setAirbyteProtocolVersionMin(airbyteProtocolVersionMin); - jobPersistence.setAirbyteProtocolVersionMax(airbyteProtocolVersionMax); - LOGGER.info("AirbyteProtocol version support range [{}:{}]", airbyteProtocolVersionMin.serialize(), airbyteProtocolVersionMax.serialize()); + private static void assertNonBreakingProtocolVersionConstraints(final ProtocolVersionChecker protocolVersionChecker, + final JobPersistence jobPersistence, + final boolean autoUpgradeConnectors) + throws Exception { + final Optional newProtocolRange = protocolVersionChecker.validate(autoUpgradeConnectors); + if (newProtocolRange.isEmpty()) { + throw new RuntimeException( + "Aborting bootloader to avoid breaking existing connection after an upgrade. " + + "Please address airbyte protocol version support issues in the connectors before retrying."); + } + trackProtocolVersion(jobPersistence, newProtocolRange.get()); + } + + private static void trackProtocolVersion(final JobPersistence jobPersistence, final AirbyteProtocolVersionRange protocolVersionRange) + throws IOException { + jobPersistence.setAirbyteProtocolVersionMin(protocolVersionRange.min()); + jobPersistence.setAirbyteProtocolVersionMax(protocolVersionRange.max()); + LOGGER.info("AirbyteProtocol version support range [{}:{}]", protocolVersionRange.min().serialize(), protocolVersionRange.max().serialize()); } static boolean isLegalUpgrade(final AirbyteVersion airbyteDatabaseVersion, final AirbyteVersion airbyteVersion) { diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java new file mode 100644 index 0000000000000..e5de49cd3ba0d --- /dev/null +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java @@ -0,0 +1,206 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.bootloader; + +import io.airbyte.commons.version.AirbyteProtocolVersion; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; +import io.airbyte.commons.version.AirbyteVersion; +import io.airbyte.commons.version.Version; +import io.airbyte.config.ActorType; +import io.airbyte.config.Configs; +import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.init.DefinitionsProvider; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.persistence.job.JobPersistence; +import java.io.IOException; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ProtocolVersionChecker { + + private final JobPersistence jobPersistence; + private final Configs configs; + private final ConfigRepository configRepository; + private final Optional definitionsProvider; + + // Dependencies could be simplified once we break some pieces up: + // * JobPersistence for accessing the airbyte_metadata table. + // * Configs for getting the new Airbyte Protocol Range from the env vars. + // * ConfigRepository for accessing ActorDefinitions + public ProtocolVersionChecker(final JobPersistence jobPersistence, + final Configs configs, + final ConfigRepository configRepository, + final Optional definitionsProvider) { + this.jobPersistence = jobPersistence; + this.configs = configs; + this.configRepository = configRepository; + this.definitionsProvider = definitionsProvider; + } + + /** + * Validate the AirbyteProtocolVersion support range between the platform and the connectors. + *

+ * The goal is to make sure that we do not end up disabling existing connections after an upgrade + * that changes the protocol support range. + * + * @param supportAutoUpgrade whether the connectors will be automatically upgraded by the platform + * @return the supported protocol version range if check is successful, Optional.empty() if we would + * break existing connections. + * @throws IOException + */ + public Optional validate(final boolean supportAutoUpgrade) throws IOException { + final Optional currentAirbyteVersion = getCurrentAirbyteVersion(); + final Optional currentRange = jobPersistence.getCurrentProtocolVersionRange(); + final AirbyteProtocolVersionRange targetRange = getTargetProtocolVersionRange(); + + // Checking if there is a pre-existing version of airbyte. + // Without this check, the first run of the validation would fail because we do not have the tables + // set yet + // which means that the actor definitions lookup will throw SQLExceptions. + if (currentAirbyteVersion.isEmpty()) { + log.info("No previous version of Airbyte detected, assuming this is a fresh deploy."); + return Optional.of(targetRange); + } + + if (currentRange.isEmpty() || currentRange.get().equals(targetRange)) { + log.info("Using AirbyteProtocolVersion range [{}:{}]", targetRange.min().serialize(), targetRange.max().serialize()); + return Optional.of(targetRange); + } + + log.info("Detected an AirbyteProtocolVersion range change from [{}:{}] to [{}:{}]", + currentRange.get().min().serialize(), currentRange.get().max().serialize(), + targetRange.min().serialize(), targetRange.max().serialize()); + + final Map> conflicts = getConflictingActorDefinitions(targetRange); + + if (conflicts.isEmpty()) { + log.info("No protocol version conflict detected."); + return Optional.of(targetRange); + } + + final Set destConflicts = conflicts.getOrDefault(ActorType.DESTINATION, new HashSet<>()); + final Set sourceConflicts = conflicts.getOrDefault(ActorType.SOURCE, new HashSet<>()); + + if (!supportAutoUpgrade) { + // If we do not support auto upgrade, any conflict of used connectors must be resolved before being + // able to upgrade the platform. + log.warn("The following connectors need to be upgraded before being able to upgrade the platform"); + formatActorDefinitionForLogging(destConflicts, sourceConflicts).forEach(log::warn); + return Optional.empty(); + } + + final Set remainingDestConflicts = + projectRemainingConflictsAfterConnectorUpgrades(targetRange, destConflicts, ActorType.DESTINATION); + final Set remainingSourceConflicts = + projectRemainingConflictsAfterConnectorUpgrades(targetRange, sourceConflicts, ActorType.SOURCE); + + if (!remainingDestConflicts.isEmpty() || !remainingSourceConflicts.isEmpty()) { + // These set of connectors need a manual intervention because there is no compatible version listed + formatActorDefinitionForLogging(remainingDestConflicts, remainingSourceConflicts).forEach(log::warn); + return Optional.empty(); + } + + // These can be auto upgraded + destConflicts.removeAll(remainingDestConflicts); + sourceConflicts.removeAll(remainingSourceConflicts); + log.info("The following connectors will be upgraded"); + formatActorDefinitionForLogging(destConflicts, sourceConflicts).forEach(log::info); + return Optional.of(targetRange); + } + + protected Optional getCurrentAirbyteVersion() throws IOException { + return jobPersistence.getVersion().map(AirbyteVersion::new); + } + + protected AirbyteProtocolVersionRange getTargetProtocolVersionRange() { + return new AirbyteProtocolVersionRange(configs.getAirbyteProtocolVersionMin(), configs.getAirbyteProtocolVersionMax()); + } + + protected Map> getConflictingActorDefinitions(final AirbyteProtocolVersionRange targetRange) throws IOException { + final Map> actorDefIdToProtocolVersion = configRepository.getActorDefinitionToProtocolVersionMap(); + final Map> conflicts = + actorDefIdToProtocolVersion.entrySet().stream() + // Keeping only ActorDefinitionIds that have an unsupported protocol version + .filter(e -> !targetRange.isSupported(e.getValue().getValue())) + // Build the ActorType -> List[ActorDefIds] map + .map(e -> Map.entry(e.getValue().getKey(), e.getKey())) + // Group by ActorType and transform the List> into a Set + .collect(Collectors.groupingBy(Entry::getKey, + Collectors.collectingAndThen(Collectors.toList(), list -> list.stream().map(Entry::getValue).collect(Collectors.toSet())))); + return conflicts; + } + + protected Set projectRemainingConflictsAfterConnectorUpgrades(final AirbyteProtocolVersionRange targetRange, + final Set initialConflicts, + final ActorType actorType) { + if (initialConflicts.isEmpty()) { + return Set.of(); + } + + final Set upgradedSourceDefs = getProtocolVersionsForActorDefinitions(actorType) + // Keep definition ids if the protocol version will fall into the new supported range + .filter(e -> initialConflicts.contains(e.getKey()) && targetRange.isSupported(e.getValue())) + .map(Entry::getKey) + .collect(Collectors.toSet()); + + // Get the set of source definitions that will still have conflict after the connector upgrades + final Set remainingConflicts = new HashSet<>(initialConflicts); + remainingConflicts.removeAll(upgradedSourceDefs); + return remainingConflicts; + } + + protected Stream> getProtocolVersionsForActorDefinitions(final ActorType actorType) { + if (definitionsProvider.isEmpty()) { + return Stream.empty(); + } + + Stream> stream; + if (actorType == ActorType.SOURCE) { + stream = definitionsProvider.get().getSourceDefinitions() + .stream() + .map(def -> Map.entry(def.getSourceDefinitionId(), AirbyteProtocolVersion.getWithDefault(def.getSpec().getProtocolVersion()))); + } else { + stream = definitionsProvider.get().getDestinationDefinitions() + .stream() + .map(def -> Map.entry(def.getDestinationDefinitionId(), AirbyteProtocolVersion.getWithDefault(def.getSpec().getProtocolVersion()))); + } + return stream; + } + + private Stream formatActorDefinitionForLogging(final Set remainingDestConflicts, final Set remainingSourceConflicts) { + return Stream.concat( + remainingSourceConflicts.stream().map(defId -> { + final StandardSourceDefinition sourceDef; + try { + sourceDef = configRepository.getStandardSourceDefinition(defId); + return String.format("Source: %s: %s: protocol version: %s", + sourceDef.getSourceDefinitionId(), sourceDef.getName(), sourceDef.getProtocolVersion()); + } catch (Exception e) { + log.info("Failed to getStandardSourceDefinition for {}", defId, e); + return String.format("Source: %s: Failed to fetch details...", defId); + } + }), + remainingDestConflicts.stream().map(defId -> { + try { + final StandardDestinationDefinition destDef = configRepository.getStandardDestinationDefinition(defId); + return String.format("Destination: %s: %s: protocol version: %s", + destDef.getDestinationDefinitionId(), destDef.getName(), destDef.getProtocolVersion()); + } catch (Exception e) { + log.info("Failed to getStandardDestinationDefinition for {}", defId, e); + return String.format("Source: %s: Failed to fetch details...", defId); + } + })); + } + +} diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java index 251daa5101655..fbc113f69063f 100644 --- a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/BootloaderAppTest.java @@ -132,16 +132,17 @@ void testBootloaderAppBlankDb() throws Exception { val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); val bootloader = - new BootloaderApp(mockedConfigs, mockedFeatureFlags, mockedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + new BootloaderApp(mockedConfigs, mockedFeatureFlags, mockedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway, + BootloaderApp.getLocalDefinitionsProvider(), false); bootloader.load(); val jobsMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - assertEquals("0.40.14.001", jobsMigrator.getLatestMigration().getVersion().getVersion()); + assertEquals("0.40.18.002", jobsMigrator.getLatestMigration().getVersion().getVersion()); val configsMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); // this line should change with every new migration // to show that you meant to make a new migration to the prod database - assertEquals("0.40.18.002", configsMigrator.getLatestMigration().getVersion().getVersion()); + assertEquals("0.40.18.004", configsMigrator.getLatestMigration().getVersion().getVersion()); val jobsPersistence = new DefaultJobPersistence(jobDatabase); assertEquals(VERSION_0330_ALPHA, jobsPersistence.getVersion().get()); @@ -200,7 +201,8 @@ void testBootloaderAppRunSecretMigration() throws Exception { environmentVariables.set("DATABASE_URL", container.getJdbcUrl()); // Bootstrap the database for the test - val initBootloader = new BootloaderApp(mockedConfigs, mockedFeatureFlags, null, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + val initBootloader = new BootloaderApp(mockedConfigs, mockedFeatureFlags, null, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway, + BootloaderApp.getLocalDefinitionsProvider(), false); initBootloader.load(); final DefinitionsProvider localDefinitions = new LocalDefinitionsProvider(LocalDefinitionsProvider.DEFAULT_SEED_DEFINITION_RESOURCE_CLASS); @@ -241,7 +243,8 @@ void testBootloaderAppRunSecretMigration() throws Exception { // Perform secrets migration var bootloader = - new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway, + BootloaderApp.getLocalDefinitionsProvider(), false); boolean isMigrated = jobsPersistence.isSecretMigrated(); assertFalse(isMigrated); @@ -261,7 +264,8 @@ void testBootloaderAppRunSecretMigration() throws Exception { reset(spiedSecretMigrator); // We need to re-create the bootloader because it is closing the persistence after running load bootloader = - new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway, + BootloaderApp.getLocalDefinitionsProvider(), false); bootloader.load(); verifyNoInteractions(spiedSecretMigrator); @@ -270,7 +274,8 @@ void testBootloaderAppRunSecretMigration() throws Exception { when(mockedFeatureFlags.forceSecretMigration()).thenReturn(true); // We need to re-create the bootloader because it is closing the persistence after running load bootloader = - new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway); + new BootloaderApp(mockedConfigs, mockedFeatureFlags, spiedSecretMigrator, configsDslContext, jobsDslContext, configsFlyway, jobsFlyway, + BootloaderApp.getLocalDefinitionsProvider(), false); bootloader.load(); verify(spiedSecretMigrator).migrateSecrets(); } diff --git a/airbyte-bootloader/src/test/java/io/airbyte/bootloader/ProtocolVersionCheckerTest.java b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/ProtocolVersionCheckerTest.java new file mode 100644 index 0000000000000..59f290a480cf4 --- /dev/null +++ b/airbyte-bootloader/src/test/java/io/airbyte/bootloader/ProtocolVersionCheckerTest.java @@ -0,0 +1,341 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.bootloader; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import io.airbyte.commons.version.AirbyteProtocolVersionRange; +import io.airbyte.commons.version.Version; +import io.airbyte.config.ActorType; +import io.airbyte.config.Configs; +import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.init.DefinitionsProvider; +import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.persistence.job.JobPersistence; +import io.airbyte.protocol.models.ConnectorSpecification; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +class ProtocolVersionCheckerTest { + + Configs configs; + ConfigRepository configRepository; + DefinitionsProvider definitionsProvider; + JobPersistence jobPersistence; + ProtocolVersionChecker protocolVersionChecker; + + final Version V0_0_0 = new Version("0.0.0"); + final Version V1_0_0 = new Version("1.0.0"); + final Version V2_0_0 = new Version("2.0.0"); + + @BeforeEach + void beforeEach() throws IOException { + configs = mock(Configs.class); + configRepository = mock(ConfigRepository.class); + definitionsProvider = mock(DefinitionsProvider.class); + jobPersistence = mock(JobPersistence.class); + protocolVersionChecker = new ProtocolVersionChecker(jobPersistence, configs, configRepository, Optional.of(definitionsProvider)); + + when(jobPersistence.getVersion()).thenReturn(Optional.of("1.2.3")); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testFirstInstallCheck(final boolean supportAutoUpgrade) throws IOException { + when(jobPersistence.getVersion()).thenReturn(Optional.empty()); + setTargetProtocolRangeRange(V0_0_0, V1_0_0); + + assertEquals(Optional.of(new AirbyteProtocolVersionRange(V0_0_0, V1_0_0)), protocolVersionChecker.validate(supportAutoUpgrade)); + } + + @Test + void testGetTargetRange() throws IOException { + setTargetProtocolRangeRange(V1_0_0, V2_0_0); + + assertEquals(new AirbyteProtocolVersionRange(V1_0_0, V2_0_0), protocolVersionChecker.getTargetProtocolVersionRange()); + } + + @Test + void testRetrievingCurrentConflicts() throws IOException { + final AirbyteProtocolVersionRange targetRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); + + final UUID source1 = UUID.randomUUID(); + final UUID source2 = UUID.randomUUID(); + final UUID source3 = UUID.randomUUID(); + final UUID dest1 = UUID.randomUUID(); + final UUID dest2 = UUID.randomUUID(); + + final Map> initialActorDefinitions = Map.of( + source1, Map.entry(ActorType.SOURCE, V0_0_0), + source2, Map.entry(ActorType.SOURCE, V1_0_0), + source3, Map.entry(ActorType.SOURCE, V2_0_0), + dest1, Map.entry(ActorType.DESTINATION, V0_0_0), + dest2, Map.entry(ActorType.DESTINATION, V0_0_0)); + when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); + + final Map> conflicts = protocolVersionChecker.getConflictingActorDefinitions(targetRange); + + final Map> expectedConflicts = Map.of( + ActorType.DESTINATION, Set.of(dest1, dest2), + ActorType.SOURCE, Set.of(source1)); + assertEquals(expectedConflicts, conflicts); + } + + @Test + void testRetrievingCurrentConflictsWhenNoConflicts() throws IOException { + final AirbyteProtocolVersionRange targetRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); + + final UUID source1 = UUID.randomUUID(); + final UUID dest1 = UUID.randomUUID(); + + final Map> initialActorDefinitions = Map.of( + source1, Map.entry(ActorType.SOURCE, V2_0_0), + dest1, Map.entry(ActorType.DESTINATION, V1_0_0)); + when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); + + final Map> conflicts = protocolVersionChecker.getConflictingActorDefinitions(targetRange); + + assertEquals(Map.of(), conflicts); + } + + @Test + void testProjectRemainingSourceConflicts() { + final AirbyteProtocolVersionRange targetRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); + + final UUID unrelatedSource = UUID.randomUUID(); + final UUID upgradedSource = UUID.randomUUID(); + final UUID notChangedSource = UUID.randomUUID(); + final UUID missingSource = UUID.randomUUID(); + final Set initialConflicts = Set.of(upgradedSource, notChangedSource, missingSource); + + setNewSourceDefinitions(List.of( + Map.entry(unrelatedSource, V2_0_0), + Map.entry(upgradedSource, V1_0_0), + Map.entry(notChangedSource, V0_0_0))); + + final Set actualConflicts = + protocolVersionChecker.projectRemainingConflictsAfterConnectorUpgrades(targetRange, initialConflicts, ActorType.SOURCE); + + final Set expectedConflicts = Set.of(notChangedSource, missingSource); + assertEquals(expectedConflicts, actualConflicts); + } + + @Test + void testProjectRemainingDestinationConflicts() { + final AirbyteProtocolVersionRange targetRange = new AirbyteProtocolVersionRange(V1_0_0, V2_0_0); + + final UUID dest1 = UUID.randomUUID(); + final UUID dest2 = UUID.randomUUID(); + final UUID dest3 = UUID.randomUUID(); + final Set initialConflicts = Set.of(dest1, dest2, dest3); + + setNewDestinationDefinitions(List.of( + Map.entry(dest1, V2_0_0), + Map.entry(dest2, V1_0_0), + Map.entry(dest3, V2_0_0))); + + final Set actualConflicts = + protocolVersionChecker.projectRemainingConflictsAfterConnectorUpgrades(targetRange, initialConflicts, ActorType.DESTINATION); + + final Set expectedConflicts = Set.of(); + assertEquals(expectedConflicts, actualConflicts); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testValidateSameRange(final boolean supportAutoUpgrade) throws Exception { + setCurrentProtocolRangeRange(V0_0_0, V2_0_0); + setTargetProtocolRangeRange(V0_0_0, V2_0_0); + + final Optional range = protocolVersionChecker.validate(supportAutoUpgrade); + assertEquals(Optional.of(new AirbyteProtocolVersionRange(V0_0_0, V2_0_0)), range); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testValidateAllConnectorsAreUpgraded(final boolean supportAutoUpgrade) throws Exception { + setCurrentProtocolRangeRange(V0_0_0, V1_0_0); + setTargetProtocolRangeRange(V1_0_0, V2_0_0); + + final UUID source1 = UUID.randomUUID(); + final UUID source2 = UUID.randomUUID(); + final UUID source3 = UUID.randomUUID(); + final UUID source4 = UUID.randomUUID(); + final UUID dest1 = UUID.randomUUID(); + final UUID dest2 = UUID.randomUUID(); + final UUID dest3 = UUID.randomUUID(); + + final Map> initialActorDefinitions = Map.of( + source1, Map.entry(ActorType.SOURCE, V0_0_0), + source2, Map.entry(ActorType.SOURCE, V1_0_0), + source3, Map.entry(ActorType.SOURCE, V0_0_0), + source4, Map.entry(ActorType.SOURCE, V0_0_0), + dest1, Map.entry(ActorType.DESTINATION, V0_0_0), + dest2, Map.entry(ActorType.DESTINATION, V1_0_0), + dest3, Map.entry(ActorType.DESTINATION, V2_0_0)); + when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); + + setNewSourceDefinitions(List.of( + Map.entry(source1, V1_0_0), + Map.entry(source2, V1_0_0), + Map.entry(source3, V2_0_0), + Map.entry(source4, V1_0_0))); + setNewDestinationDefinitions(List.of( + Map.entry(dest1, V1_0_0), + Map.entry(dest2, V1_0_0), + Map.entry(dest3, V2_0_0))); + + final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); + + // Without auto upgrade, we will fail the validation because it would require connector automatic + // actor definition + // upgrade for used sources/destinations. + if (supportAutoUpgrade) { + assertEquals(Optional.of(new AirbyteProtocolVersionRange(V1_0_0, V2_0_0)), actualRange); + } else { + assertEquals(Optional.empty(), actualRange); + } + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testValidateBadUpgradeMissingSource(final boolean supportAutoUpgrade) throws Exception { + setCurrentProtocolRangeRange(V0_0_0, V1_0_0); + setTargetProtocolRangeRange(V1_0_0, V2_0_0); + + final UUID source1 = UUID.randomUUID(); + final UUID source2 = UUID.randomUUID(); + final UUID dest1 = UUID.randomUUID(); + final UUID dest2 = UUID.randomUUID(); + + final Map> initialActorDefinitions = Map.of( + source1, Map.entry(ActorType.SOURCE, V0_0_0), + source2, Map.entry(ActorType.SOURCE, V0_0_0), + dest1, Map.entry(ActorType.DESTINATION, V0_0_0), + dest2, Map.entry(ActorType.DESTINATION, V0_0_0)); + when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); + + setNewSourceDefinitions(List.of( + Map.entry(source1, V0_0_0), + Map.entry(source2, V1_0_0))); + setNewDestinationDefinitions(List.of( + Map.entry(dest1, V1_0_0), + Map.entry(dest2, V1_0_0))); + + final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); + assertEquals(Optional.empty(), actualRange); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testValidateBadUpgradeMissingDestination(final boolean supportAutoUpgrade) throws Exception { + setCurrentProtocolRangeRange(V0_0_0, V1_0_0); + setTargetProtocolRangeRange(V1_0_0, V2_0_0); + + final UUID source1 = UUID.randomUUID(); + final UUID source2 = UUID.randomUUID(); + final UUID dest1 = UUID.randomUUID(); + final UUID dest2 = UUID.randomUUID(); + + final Map> initialActorDefinitions = Map.of( + source1, Map.entry(ActorType.SOURCE, V0_0_0), + source2, Map.entry(ActorType.SOURCE, V0_0_0), + dest1, Map.entry(ActorType.DESTINATION, V0_0_0), + dest2, Map.entry(ActorType.DESTINATION, V0_0_0)); + when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); + + setNewSourceDefinitions(List.of( + Map.entry(source1, V1_0_0), + Map.entry(source2, V1_0_0))); + setNewDestinationDefinitions(List.of( + Map.entry(dest1, V1_0_0), + Map.entry(dest2, V0_0_0))); + + final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); + assertEquals(Optional.empty(), actualRange); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testValidateFailsOnProtocolRangeChangeWithoutDefinitionsProvider(final boolean supportAutoUpgrade) throws Exception { + protocolVersionChecker = new ProtocolVersionChecker(jobPersistence, configs, configRepository, Optional.empty()); + + setCurrentProtocolRangeRange(V0_0_0, V1_0_0); + setTargetProtocolRangeRange(V1_0_0, V2_0_0); + + final UUID source1 = UUID.randomUUID(); + final UUID dest1 = UUID.randomUUID(); + + final Map> initialActorDefinitions = Map.of( + source1, Map.entry(ActorType.SOURCE, V0_0_0), + dest1, Map.entry(ActorType.DESTINATION, V0_0_0)); + when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); + + final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); + assertEquals(Optional.empty(), actualRange); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testValidateSucceedsWhenNoProtocolRangeChangeWithoutDefinitionsProvider(final boolean supportAutoUpgrade) throws Exception { + protocolVersionChecker = new ProtocolVersionChecker(jobPersistence, configs, configRepository, Optional.empty()); + + setCurrentProtocolRangeRange(V0_0_0, V2_0_0); + setTargetProtocolRangeRange(V0_0_0, V2_0_0); + + final UUID source1 = UUID.randomUUID(); + final UUID dest1 = UUID.randomUUID(); + + final Map> initialActorDefinitions = Map.of( + source1, Map.entry(ActorType.SOURCE, V0_0_0), + dest1, Map.entry(ActorType.DESTINATION, V0_0_0)); + when(configRepository.getActorDefinitionToProtocolVersionMap()).thenReturn(initialActorDefinitions); + + final Optional actualRange = protocolVersionChecker.validate(supportAutoUpgrade); + assertEquals(Optional.of(new AirbyteProtocolVersionRange(V0_0_0, V2_0_0)), actualRange); + } + + private void setCurrentProtocolRangeRange(final Version min, final Version max) throws IOException { + when(jobPersistence.getCurrentProtocolVersionRange()).thenReturn(Optional.of(new AirbyteProtocolVersionRange(min, max))); + when(jobPersistence.getAirbyteProtocolVersionMin()).thenReturn(Optional.of(min)); + when(jobPersistence.getAirbyteProtocolVersionMax()).thenReturn(Optional.of(max)); + } + + private void setTargetProtocolRangeRange(final Version min, final Version max) throws IOException { + when(configs.getAirbyteProtocolVersionMin()).thenReturn(min); + when(configs.getAirbyteProtocolVersionMax()).thenReturn(max); + } + + private void setNewDestinationDefinitions(final List> defs) { + final List destDefinitions = defs.stream() + .map(e -> new StandardDestinationDefinition() + .withDestinationDefinitionId(e.getKey()) + .withSpec(new ConnectorSpecification().withProtocolVersion(e.getValue().serialize()))) + .toList(); + when(definitionsProvider.getDestinationDefinitions()).thenReturn(destDefinitions); + } + + private void setNewSourceDefinitions(final List> defs) { + final List sourceDefinitions = defs.stream() + .map(e -> new StandardSourceDefinition() + .withSourceDefinitionId(e.getKey()) + .withSpec(new ConnectorSpecification().withProtocolVersion(e.getValue().serialize()))) + .toList(); + when(definitionsProvider.getSourceDefinitions()).thenReturn(sourceDefinitions); + } + +} diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index fda8cd5846acf..5adba2bc6aec1 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,35 @@ # Changelog +## 0.9.4 +Low-code: Fix reference resolution for connector builder + +## 0.9.3 +Low-code: Avoid duplicate HTTP query in `simple_retriever` + +## 0.9.2 +Low-code: Make `default_paginator.page_token_option` optional + +## 0.9.1 +Low-code: Fix filtering vars in `InterpolatedRequestInputProvider.eval_request_inputs` + +## 0.9.0 +Low-code: Allow `grant_type` to be specified for OAuthAuthenticator + +## 0.8.1 +Low-code: Don't update cursor for non-record messages and fix default loader for connector builder manifests + +## 0.8.0 +Low-code: Allow for request and response to be emitted as log messages + +## 0.7.1 +Low-code: Decouple yaml manifest parsing from the declarative source implementation + +## 0.7.0 +Low-code: Allow connector specifications to be defined in the manifest + +## 0.6.0 +Low-code: Add support for monthly and yearly incremental updates for `DatetimeStreamSlicer` + ## 0.5.4 Low-code: Get response.json in a safe way diff --git a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py index a4b654310d00b..190442f884696 100644 --- a/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py +++ b/airbyte-cdk/python/airbyte_cdk/models/airbyte_protocol.py @@ -81,6 +81,7 @@ class Config: class TraceType(Enum): ERROR = "ERROR" + ESTIMATE = "ESTIMATE" class FailureType(Enum): @@ -98,6 +99,28 @@ class Config: failure_type: Optional[FailureType] = Field(None, description="The type of error") +class EstimateType(Enum): + STREAM = "STREAM" + SYNC = "SYNC" + + +class AirbyteEstimateTraceMessage(BaseModel): + class Config: + extra = Extra.allow + + name: str = Field(..., description="The name of the stream") + type: EstimateType = Field(..., description="The type of estimate", title="estimate type") + namespace: Optional[str] = Field(None, description="The namespace of the stream") + row_estimate: Optional[int] = Field( + None, + description="The estimated number of rows to be emitted by this sync for this stream", + ) + byte_estimate: Optional[int] = Field( + None, + description="The estimated number of bytes to be emitted by this sync for this stream", + ) + + class OrchestratorType(Enum): CONNECTOR_CONFIG = "CONNECTOR_CONFIG" @@ -213,6 +236,10 @@ class Config: type: TraceType = Field(..., description="the type of trace message", title="trace type") emitted_at: float = Field(..., description="the time in ms that the message was emitted") error: Optional[AirbyteErrorTraceMessage] = Field(None, description="error trace message: the error object") + estimate: Optional[AirbyteEstimateTraceMessage] = Field( + None, + description="Estimate trace message: a guess at how much data will be produced in this sync", + ) class AirbyteControlMessage(BaseModel): diff --git a/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py b/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py index c0c2cf3dbc85b..389ffcc19d66a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/abstract_source.py @@ -4,15 +4,12 @@ import logging from abc import ABC, abstractmethod -from datetime import datetime -from functools import lru_cache from typing import Any, Dict, Iterator, List, Mapping, MutableMapping, Optional, Tuple, Union from airbyte_cdk.models import ( AirbyteCatalog, AirbyteConnectionStatus, AirbyteMessage, - AirbyteRecordMessage, AirbyteStateMessage, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, @@ -23,9 +20,10 @@ from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.source import Source from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.sources.streams.http.http import HttpStream +from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message from airbyte_cdk.sources.utils.schema_helpers import InternalConfig, split_config -from airbyte_cdk.sources.utils.transform import TypeTransformer from airbyte_cdk.utils.event_timing import create_timer from airbyte_cdk.utils.traced_exception import AirbyteTracedException @@ -241,20 +239,25 @@ def _read_incremental( stream_state=stream_state, cursor_field=configured_stream.cursor_field or None, ) - for record_counter, record_data in enumerate(records, start=1): - yield self._as_airbyte_record(stream_name, record_data) - stream_state = stream_instance.get_updated_state(stream_state, record_data) - checkpoint_interval = stream_instance.state_checkpoint_interval - if checkpoint_interval and record_counter % checkpoint_interval == 0: - yield self._checkpoint_state(stream_instance, stream_state, state_manager) - - total_records_counter += 1 - # This functionality should ideally live outside of this method - # but since state is managed inside this method, we keep track - # of it here. - if self._limit_reached(internal_config, total_records_counter): - # Break from slice loop to save state and exit from _read_incremental function. - break + record_counter = 0 + for message_counter, record_data_or_message in enumerate(records, start=1): + message = self._get_message(record_data_or_message, stream_instance) + yield message + if message.type == MessageType.RECORD: + record = message.record + stream_state = stream_instance.get_updated_state(stream_state, record.data) + checkpoint_interval = stream_instance.state_checkpoint_interval + record_counter += 1 + if checkpoint_interval and record_counter % checkpoint_interval == 0: + yield self._checkpoint_state(stream_instance, stream_state, state_manager) + + total_records_counter += 1 + # This functionality should ideally live outside of this method + # but since state is managed inside this method, we keep track + # of it here. + if self._limit_reached(internal_config, total_records_counter): + # Break from slice loop to save state and exit from _read_incremental function. + break yield self._checkpoint_state(stream_instance, stream_state, state_manager) if self._limit_reached(internal_config, total_records_counter): @@ -277,16 +280,18 @@ def _read_full_refresh( total_records_counter = 0 for _slice in slices: logger.debug("Processing stream slice", extra={"slice": _slice}) - records = stream_instance.read_records( + record_data_or_messages = stream_instance.read_records( stream_slice=_slice, sync_mode=SyncMode.full_refresh, cursor_field=configured_stream.cursor_field, ) - for record in records: - yield self._as_airbyte_record(configured_stream.stream.name, record) - total_records_counter += 1 - if self._limit_reached(internal_config, total_records_counter): - return + for record_data_or_message in record_data_or_messages: + message = self._get_message(record_data_or_message, stream_instance) + yield message + if message.type == MessageType.RECORD: + total_records_counter += 1 + if self._limit_reached(internal_config, total_records_counter): + return def _checkpoint_state(self, stream: Stream, stream_state, state_manager: ConnectorStateManager): # First attempt to retrieve the current state using the stream's state property. We receive an AttributeError if the state @@ -294,33 +299,11 @@ def _checkpoint_state(self, stream: Stream, stream_state, state_manager: Connect # instance's deprecated get_updated_state() method. try: state_manager.update_state_for_stream(stream.name, stream.namespace, stream.state) + except AttributeError: state_manager.update_state_for_stream(stream.name, stream.namespace, stream_state) return state_manager.create_state_message(stream.name, stream.namespace, send_per_stream_state=self.per_stream_state_enabled) - @lru_cache(maxsize=None) - def _get_stream_transformer_and_schema(self, stream_name: str) -> Tuple[TypeTransformer, Mapping[str, Any]]: - """ - Lookup stream's transform object and jsonschema based on stream name. - This function would be called a lot so using caching to save on costly - get_json_schema operation. - :param stream_name name of stream from catalog. - :return tuple with stream transformer object and discover json schema. - """ - stream_instance = self._stream_to_instance_map[stream_name] - return stream_instance.transformer, stream_instance.get_json_schema() - - def _as_airbyte_record(self, stream_name: str, data: Mapping[str, Any]): - now_millis = int(datetime.now().timestamp() * 1000) - transformer, schema = self._get_stream_transformer_and_schema(stream_name) - # Transform object fields according to config. Most likely you will - # need it to normalize values against json schema. By default no action - # taken unless configured. See - # docs/connector-development/cdk-python/schemas.md for details. - transformer.transform(data, schema) # type: ignore - message = AirbyteRecordMessage(stream=stream_name, data=data, emitted_at=now_millis) - return AirbyteMessage(type=MessageType.RECORD, record=message) - @staticmethod def _apply_log_level_to_stream_logger(logger: logging.Logger, stream_instance: Stream): """ @@ -329,3 +312,12 @@ def _apply_log_level_to_stream_logger(logger: logging.Logger, stream_instance: S """ if hasattr(logger, "level"): stream_instance.logger.setLevel(logger.level) + + def _get_message(self, record_data_or_message: Union[StreamData, AirbyteMessage], stream: Stream): + """ + Converts the input to an AirbyteMessage if it is a StreamData. Returns the input as is if it is already an AirbyteMessage + """ + if isinstance(record_data_or_message, AirbyteMessage): + return record_data_or_message + else: + return stream_data_to_airbyte_message(stream.name, record_data_or_message, stream.transformer, stream.get_json_schema()) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/oauth.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/oauth.py index 2446ba131ed76..97869cc9010a5 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/oauth.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/oauth.py @@ -31,6 +31,7 @@ class DeclarativeOauth2Authenticator(AbstractOauth2Authenticator, DeclarativeAut scopes (Optional[List[str]]): The scopes to request token_expiry_date (Optional[Union[InterpolatedString, str]]): The access token expiration date refresh_request_body (Optional[Mapping[str, Any]]): The request body to send in the refresh request + grant_type: The grant_type to request for access_token """ token_refresh_endpoint: Union[InterpolatedString, str] @@ -45,6 +46,7 @@ class DeclarativeOauth2Authenticator(AbstractOauth2Authenticator, DeclarativeAut access_token_name: Union[InterpolatedString, str] = "access_token" expires_in_name: Union[InterpolatedString, str] = "expires_in" refresh_request_body: Optional[Mapping[str, Any]] = None + grant_type: Union[InterpolatedString, str] = "refresh_token" def __post_init__(self, options: Mapping[str, Any]): self.token_refresh_endpoint = InterpolatedString.create(self.token_refresh_endpoint, options=options) @@ -53,6 +55,7 @@ def __post_init__(self, options: Mapping[str, Any]): self.refresh_token = InterpolatedString.create(self.refresh_token, options=options) self.access_token_name = InterpolatedString.create(self.access_token_name, options=options) self.expires_in_name = InterpolatedString.create(self.expires_in_name, options=options) + self.grant_type = InterpolatedString.create(self.grant_type, options=options) self._refresh_request_body = InterpolatedMapping(self.refresh_request_body or {}, options=options) self._token_expiry_date = ( pendulum.parse(InterpolatedString.create(self.token_expiry_date, options=options).eval(self.config)) @@ -82,6 +85,9 @@ def get_access_token_name(self) -> InterpolatedString: def get_expires_in_name(self) -> InterpolatedString: return self.expires_in_name.eval(self.config) + def get_grant_type(self) -> InterpolatedString: + return self.grant_type.eval(self.config) + def get_refresh_request_body(self) -> Mapping[str, Any]: return self._refresh_request_body.eval(self.config) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/config_component_schema.json b/airbyte-cdk/python/airbyte_cdk/sources/declarative/config_component_schema.json index 5ae05dbf5fcf7..8076c6f4ad7f8 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/config_component_schema.json +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/config_component_schema.json @@ -515,6 +515,17 @@ }, "refresh_request_body": { "type": "object" + }, + "grant_type": { + "anyOf": [ + { + "$ref": "#/definitions/InterpolatedString" + }, + { + "type": "string" + } + ], + "default": "refresh_token" } } } diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/manifest_declarative_source.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/manifest_declarative_source.py new file mode 100644 index 0000000000000..240799fe3ad1d --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/manifest_declarative_source.py @@ -0,0 +1,207 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import inspect +import json +import logging +import typing +from dataclasses import dataclass, fields +from enum import Enum, EnumMeta +from typing import Any, Iterator, List, Mapping, MutableMapping, Union + +from airbyte_cdk.models import ( + AirbyteConnectionStatus, + AirbyteMessage, + AirbyteStateMessage, + ConfiguredAirbyteCatalog, + ConnectorSpecification, +) +from airbyte_cdk.sources.declarative.checks import CheckStream +from airbyte_cdk.sources.declarative.checks.connection_checker import ConnectionChecker +from airbyte_cdk.sources.declarative.declarative_source import DeclarativeSource +from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream +from airbyte_cdk.sources.declarative.exceptions import InvalidConnectorDefinitionException +from airbyte_cdk.sources.declarative.parsers.factory import DeclarativeComponentFactory +from airbyte_cdk.sources.declarative.parsers.manifest_reference_resolver import ManifestReferenceResolver +from airbyte_cdk.sources.declarative.types import ConnectionDefinition +from airbyte_cdk.sources.streams.core import Stream +from dataclasses_jsonschema import JsonSchemaMixin +from jsonschema.validators import validate + + +@dataclass +class ConcreteDeclarativeSource(JsonSchemaMixin): + version: str + checker: CheckStream + streams: List[DeclarativeStream] + + +class ManifestDeclarativeSource(DeclarativeSource): + """Declarative source defined by a manifest of low-code components that define source connector behavior""" + + VALID_TOP_LEVEL_FIELDS = {"check", "definitions", "spec", "streams", "version"} + + def __init__(self, source_config: ConnectionDefinition, debug: bool = False): + """ + :param source_config(Mapping[str, Any]): The manifest of low-code components that describe the source connector + :param debug(bool): True if debug mode is enabled + """ + self.logger = logging.getLogger(f"airbyte.{self.name}") + + evaluated_manifest = {} + resolved_source_config = ManifestReferenceResolver().preprocess_manifest(source_config, evaluated_manifest, "") + self._source_config = resolved_source_config + self._debug = debug + self._factory = DeclarativeComponentFactory() + + self._validate_source() + + # Stopgap to protect the top-level namespace until it's validated through the schema + unknown_fields = [key for key in self._source_config.keys() if key not in self.VALID_TOP_LEVEL_FIELDS] + if unknown_fields: + raise InvalidConnectorDefinitionException(f"Found unknown top-level fields: {unknown_fields}") + + @property + def connection_checker(self) -> ConnectionChecker: + check = self._source_config["check"] + if "class_name" not in check: + check["class_name"] = "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream" + return self._factory.create_component(check, dict())(source=self) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + self._emit_manifest_debug_message(extra_args={"source_name": self.name, "parsed_config": json.dumps(self._source_config)}) + + source_streams = [self._factory.create_component(stream_config, config, True)() for stream_config in self._stream_configs()] + for stream in source_streams: + # make sure the log level is always applied to the stream's logger + self._apply_log_level_to_stream_logger(self.logger, stream) + return source_streams + + def spec(self, logger: logging.Logger) -> ConnectorSpecification: + """ + Returns the connector specification (spec) as defined in the Airbyte Protocol. The spec is an object describing the possible + configurations (e.g: username and password) which can be configured when running this connector. For low-code connectors, this + will first attempt to load the spec from the manifest's spec block, otherwise it will load it from "spec.yaml" or "spec.json" + in the project root. + """ + self._configure_logger_level(logger) + self._emit_manifest_debug_message(extra_args={"source_name": self.name, "parsed_config": json.dumps(self._source_config)}) + + spec = self._source_config.get("spec") + if spec: + if "class_name" not in spec: + spec["class_name"] = "airbyte_cdk.sources.declarative.spec.Spec" + spec_component = self._factory.create_component(spec, dict())() + return spec_component.generate_spec() + else: + return super().spec(logger) + + def check(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + self._configure_logger_level(logger) + return super().check(logger, config) + + def read( + self, + logger: logging.Logger, + config: Mapping[str, Any], + catalog: ConfiguredAirbyteCatalog, + state: Union[List[AirbyteStateMessage], MutableMapping[str, Any]] = None, + ) -> Iterator[AirbyteMessage]: + self._configure_logger_level(logger) + yield from super().read(logger, config, catalog, state) + + def _configure_logger_level(self, logger: logging.Logger): + """ + Set the log level to logging.DEBUG if debug mode is enabled + """ + if self._debug: + logger.setLevel(logging.DEBUG) + + def _validate_source(self): + full_config = {} + if "version" in self._source_config: + full_config["version"] = self._source_config["version"] + if "check" in self._source_config: + full_config["checker"] = self._source_config["check"] + streams = [self._factory.create_component(stream_config, {}, False)() for stream_config in self._stream_configs()] + if len(streams) > 0: + full_config["streams"] = streams + declarative_source_schema = ConcreteDeclarativeSource.json_schema() + validate(full_config, declarative_source_schema) + + def _stream_configs(self): + stream_configs = self._source_config.get("streams", []) + for s in stream_configs: + if "class_name" not in s: + s["class_name"] = "airbyte_cdk.sources.declarative.declarative_stream.DeclarativeStream" + return stream_configs + + @staticmethod + def generate_schema() -> str: + expanded_source_manifest = ManifestDeclarativeSource.expand_schema_interfaces(ConcreteDeclarativeSource, {}) + expanded_schema = expanded_source_manifest.json_schema() + return json.dumps(expanded_schema, cls=SchemaEncoder) + + @staticmethod + def expand_schema_interfaces(expand_class: type, visited: dict) -> type: + """ + Recursive function that takes in class type that will have its interface fields unpacked and expended and then recursively + attempt the same expansion on all the class' underlying fields that are declarative component. It also performs expansion + with respect to interfaces that are contained within generic data types. + :param expand_class: The declarative component class that will have its interface fields expanded + :param visited: cache used to store a record of already visited declarative classes that have already been seen + :return: The expanded declarative component + """ + + # Recursive base case to stop recursion if we have already expanded an interface in case of cyclical components + # like CompositeErrorHandler + if expand_class.__name__ in visited: + return visited[expand_class.__name__] + visited[expand_class.__name__] = expand_class + + next_classes = [] + class_fields = fields(expand_class) + for field in class_fields: + unpacked_field_types = DeclarativeComponentFactory.unpack(field.type) + expand_class.__annotations__[field.name] = unpacked_field_types + next_classes.extend(ManifestDeclarativeSource._get_next_expand_classes(field.type)) + for next_class in next_classes: + ManifestDeclarativeSource.expand_schema_interfaces(next_class, visited) + return expand_class + + @staticmethod + def _get_next_expand_classes(field_type) -> list[type]: + """ + Parses through a given field type and assembles a list of all underlying declarative components. For a concrete declarative class + it will return itself. For a declarative interface it will return its subclasses. For declarative components in a generic type + it will return the unpacked classes. Any non-declarative types will be skipped. + :param field_type: A field type that + :return: + """ + generic_type = typing.get_origin(field_type) + if generic_type is None: + # We can only continue parsing declarative that inherit from the JsonSchemaMixin class because it is used + # to generate the final json schema + if inspect.isclass(field_type) and issubclass(field_type, JsonSchemaMixin) and not isinstance(field_type, EnumMeta): + subclasses = field_type.__subclasses__() + if subclasses: + return subclasses + else: + return [field_type] + elif generic_type == list or generic_type == Union: + next_classes = [] + for underlying_type in typing.get_args(field_type): + next_classes.extend(ManifestDeclarativeSource._get_next_expand_classes(underlying_type)) + return next_classes + return [] + + def _emit_manifest_debug_message(self, extra_args: dict): + self.logger.debug("declarative source created from manifest", extra=extra_args) + + +class SchemaEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, property) or isinstance(obj, Enum): + return str(obj) + return json.JSONEncoder.default(self, obj) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/class_types_registry.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/class_types_registry.py index 6f18376f2a85b..66d4d6b0bc905 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/class_types_registry.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/class_types_registry.py @@ -33,6 +33,7 @@ from airbyte_cdk.sources.declarative.requesters.paginators.strategies.page_increment import PageIncrement from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever from airbyte_cdk.sources.declarative.schema.json_file_schema_loader import JsonFileSchemaLoader +from airbyte_cdk.sources.declarative.spec import Spec from airbyte_cdk.sources.declarative.stream_slicers.cartesian_product_stream_slicer import CartesianProductStreamSlicer from airbyte_cdk.sources.declarative.stream_slicers.datetime_stream_slicer import DatetimeStreamSlicer from airbyte_cdk.sources.declarative.stream_slicers.list_stream_slicer import ListStreamSlicer @@ -75,6 +76,7 @@ "RemoveFields": RemoveFields, "SimpleRetriever": SimpleRetriever, "SingleSlice": SingleSlice, + "Spec": Spec, "SubstreamSlicer": SubstreamSlicer, "WaitUntilTimeFromHeader": WaitUntilTimeFromHeaderBackoffStrategy, "WaitTimeFromHeader": WaitTimeFromHeaderBackoffStrategy, diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/config_parser.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/config_parser.py deleted file mode 100644 index 06c61f1215448..0000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/config_parser.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from abc import ABC, abstractmethod - -from airbyte_cdk.sources.declarative.types import ConnectionDefinition - - -class ConnectionDefinitionParser(ABC): - """ - Parses a string to a ConnectionDefinition - """ - - @abstractmethod - def parse(self, config_str: str) -> ConnectionDefinition: - """Parses the config_str to a ConnectionDefinition""" diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/factory.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/factory.py index b499ea756132f..0904fb3bb66a3 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/factory.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/factory.py @@ -246,7 +246,11 @@ def is_object_definition_with_class_name(definition): @staticmethod def is_object_definition_with_type(definition): - return isinstance(definition, dict) and "type" in definition + # The `type` field is an overloaded term in the context of the low-code manifest. As part of the language, `type` is shorthand + # for convenience to avoid defining the entire classpath. For the connector specification, `type` is a part of the spec schema. + # For spec parsing, as part of this check, when the type is set to object, we want it to remain a mapping. But when type is + # defined any other way, then it should be parsed as a declarative component in the manifest. + return isinstance(definition, dict) and "type" in definition and definition["type"] != "object" @staticmethod def get_default_type(parameter_name, parent_class): diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/manifest_reference_resolver.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/manifest_reference_resolver.py new file mode 100644 index 0000000000000..04af303c87f1d --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/manifest_reference_resolver.py @@ -0,0 +1,186 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from copy import deepcopy +from typing import Any, Mapping, Tuple, Union + +from airbyte_cdk.sources.declarative.parsers.undefined_reference_exception import UndefinedReferenceException + + +class ManifestReferenceResolver: + """ + An incoming manifest can contain references to values previously defined. + This parser will dereference these values to produce a complete ConnectionDefinition. + + References can be defined using a *ref() string. + ``` + key: 1234 + reference: "*ref(key)" + ``` + will produce the following definition: + ``` + key: 1234 + reference: 1234 + ``` + This also works with objects: + ``` + key_value_pairs: + k1: v1 + k2: v2 + same_key_value_pairs: "*ref(key_value_pairs)" + ``` + will produce the following definition: + ``` + key_value_pairs: + k1: v1 + k2: v2 + same_key_value_pairs: + k1: v1 + k2: v2 + ``` + + The $ref keyword can be used to refer to an object and enhance it with addition key-value pairs + ``` + key_value_pairs: + k1: v1 + k2: v2 + same_key_value_pairs: + $ref: "*ref(key_value_pairs)" + k3: v3 + ``` + will produce the following definition: + ``` + key_value_pairs: + k1: v1 + k2: v2 + same_key_value_pairs: + k1: v1 + k2: v2 + k3: v3 + ``` + + References can also point to nested values. + Nested references are ambiguous because one could define a key containing with `.` + in this example, we want to refer to the limit key in the dict object: + ``` + dict: + limit: 50 + limit_ref: "*ref(dict.limit)" + ``` + will produce the following definition: + ``` + dict + limit: 50 + limit-ref: 50 + ``` + + whereas here we want to access the `nested.path` value. + ``` + nested: + path: "first one" + nested.path: "uh oh" + value: "ref(nested.path) + ``` + will produce the following definition: + ``` + nested: + path: "first one" + nested.path: "uh oh" + value: "uh oh" + ``` + + to resolve the ambiguity, we try looking for the reference key at the top level, and then traverse the structs downward + until we find a key with the given path, or until there is nothing to traverse. + """ + + ref_tag = "$ref" + + def preprocess_manifest(self, manifest: Mapping[str, Any], evaluated_mapping: Mapping[str, Any], path: Union[str, Tuple[str]]): + + """ + :param manifest: incoming manifest that could have references to previously defined components + :param evaluated_mapping: mapping produced by dereferencing the content of input_mapping + :param path: curent path in configuration traversal + :return: + """ + d = {} + if self.ref_tag in manifest: + partial_ref_string = manifest[self.ref_tag] + d = deepcopy(self._preprocess(partial_ref_string, evaluated_mapping, path)) + + for key, value in manifest.items(): + if key == self.ref_tag: + continue + full_path = self._resolve_value(key, path) + if full_path in evaluated_mapping: + raise Exception(f"Databag already contains key={key} with path {full_path}") + processed_value = self._preprocess(value, evaluated_mapping, full_path) + evaluated_mapping[full_path] = processed_value + d[key] = processed_value + + return d + + def _get_ref_key(self, s: str) -> str: + ref_start = s.find("*ref(") + if ref_start == -1: + return None + return s[ref_start + 5 : s.find(")")] + + def _resolve_value(self, value: str, path): + if path: + return *path, value + else: + return (value,) + + def _preprocess(self, value, evaluated_config: Mapping[str, Any], path): + if isinstance(value, str): + ref_key = self._get_ref_key(value) + if ref_key is None: + return value + else: + """ + references are ambiguous because one could define a key containing with `.` + in this example, we want to refer to the limit key in the dict object: + dict: + limit: 50 + limit_ref: "*ref(dict.limit)" + + whereas here we want to access the `nested.path` value. + nested: + path: "first one" + nested.path: "uh oh" + value: "ref(nested.path) + + to resolve the ambiguity, we try looking for the reference key at the top level, and then traverse the structs downward + until we find a key with the given path, or until there is nothing to traverse. + """ + key = (ref_key,) + while key[-1]: + if key in evaluated_config: + return evaluated_config[key] + else: + split = key[-1].split(".") + key = *key[:-1], split[0], ".".join(split[1:]) + raise UndefinedReferenceException(path, ref_key) + elif isinstance(value, dict): + return self.preprocess_manifest(value, evaluated_config, path) + elif type(value) == list: + evaluated_list = [ + # pass in elem's path instead of the list's path + self._preprocess(v, evaluated_config, self._get_path_for_list_item(path, index)) + for index, v in enumerate(value) + ] + # Add the list's element to the evaluated config so they can be referenced + for index, elem in enumerate(evaluated_list): + evaluated_config[self._get_path_for_list_item(path, index)] = elem + return evaluated_list + else: + return value + + def _get_path_for_list_item(self, path, index): + # An elem's path is {path_to_list}[{index}] + if len(path) > 1: + return path[:-1], f"{path[-1]}[{index}]" + else: + return (f"{path[-1]}[{index}]",) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/yaml_parser.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/yaml_parser.py deleted file mode 100644 index 31518c74849b5..0000000000000 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/yaml_parser.py +++ /dev/null @@ -1,202 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from copy import deepcopy -from typing import Any, Mapping, Tuple, Union - -import yaml -from airbyte_cdk.sources.declarative.parsers.config_parser import ConnectionDefinitionParser -from airbyte_cdk.sources.declarative.parsers.undefined_reference_exception import UndefinedReferenceException -from airbyte_cdk.sources.declarative.types import ConnectionDefinition - - -class YamlParser(ConnectionDefinitionParser): - """ - Parses a Yaml string to a ConnectionDefinition - - In addition to standard Yaml parsing, the input_string can contain references to values previously defined. - This parser will dereference these values to produce a complete ConnectionDefinition. - - References can be defined using a *ref() string. - ``` - key: 1234 - reference: "*ref(key)" - ``` - will produce the following definition: - ``` - key: 1234 - reference: 1234 - ``` - This also works with objects: - ``` - key_value_pairs: - k1: v1 - k2: v2 - same_key_value_pairs: "*ref(key_value_pairs)" - ``` - will produce the following definition: - ``` - key_value_pairs: - k1: v1 - k2: v2 - same_key_value_pairs: - k1: v1 - k2: v2 - ``` - - The $ref keyword can be used to refer to an object and enhance it with addition key-value pairs - ``` - key_value_pairs: - k1: v1 - k2: v2 - same_key_value_pairs: - $ref: "*ref(key_value_pairs)" - k3: v3 - ``` - will produce the following definition: - ``` - key_value_pairs: - k1: v1 - k2: v2 - same_key_value_pairs: - k1: v1 - k2: v2 - k3: v3 - ``` - - References can also point to nested values. - Nested references are ambiguous because one could define a key containing with `.` - in this example, we want to refer to the limit key in the dict object: - ``` - dict: - limit: 50 - limit_ref: "*ref(dict.limit)" - ``` - will produce the following definition: - ``` - dict - limit: 50 - limit-ref: 50 - ``` - - whereas here we want to access the `nested.path` value. - ``` - nested: - path: "first one" - nested.path: "uh oh" - value: "ref(nested.path) - ``` - will produce the following definition: - ``` - nested: - path: "first one" - nested.path: "uh oh" - value: "uh oh" - ``` - - to resolve the ambiguity, we try looking for the reference key at the top level, and then traverse the structs downward - until we find a key with the given path, or until there is nothing to traverse. - """ - - ref_tag = "$ref" - - def parse(self, connection_definition_str: str) -> ConnectionDefinition: - """ - Parses a yaml file and dereferences string in the form "*ref({reference)" - to {reference} - :param connection_definition_str: yaml string to parse - :return: The ConnectionDefinition parsed from connection_definition_str - """ - input_mapping = yaml.safe_load(connection_definition_str) - evaluated_definition = {} - return self._preprocess_dict(input_mapping, evaluated_definition, "") - - def _preprocess_dict(self, input_mapping: Mapping[str, Any], evaluated_mapping: Mapping[str, Any], path: Union[str, Tuple[str]]): - - """ - :param input_mapping: mapping produced by parsing yaml - :param evaluated_mapping: mapping produced by dereferencing the content of input_mapping - :param path: curent path in configuration traversal - :return: - """ - d = {} - if self.ref_tag in input_mapping: - partial_ref_string = input_mapping[self.ref_tag] - d = deepcopy(self._preprocess(partial_ref_string, evaluated_mapping, path)) - - for key, value in input_mapping.items(): - if key == self.ref_tag: - continue - full_path = self._resolve_value(key, path) - if full_path in evaluated_mapping: - raise Exception(f"Databag already contains key={key} with path {full_path}") - processed_value = self._preprocess(value, evaluated_mapping, full_path) - evaluated_mapping[full_path] = processed_value - d[key] = processed_value - - return d - - def _get_ref_key(self, s: str) -> str: - ref_start = s.find("*ref(") - if ref_start == -1: - return None - return s[ref_start + 5 : s.find(")")] - - def _resolve_value(self, value: str, path): - if path: - return *path, value - else: - return (value,) - - def _preprocess(self, value, evaluated_config: Mapping[str, Any], path): - if isinstance(value, str): - ref_key = self._get_ref_key(value) - if ref_key is None: - return value - else: - """ - references are ambiguous because one could define a key containing with `.` - in this example, we want to refer to the limit key in the dict object: - dict: - limit: 50 - limit_ref: "*ref(dict.limit)" - - whereas here we want to access the `nested.path` value. - nested: - path: "first one" - nested.path: "uh oh" - value: "ref(nested.path) - - to resolve the ambiguity, we try looking for the reference key at the top level, and then traverse the structs downward - until we find a key with the given path, or until there is nothing to traverse. - """ - key = (ref_key,) - while key[-1]: - if key in evaluated_config: - return evaluated_config[key] - else: - split = key[-1].split(".") - key = *key[:-1], split[0], ".".join(split[1:]) - raise UndefinedReferenceException(path, ref_key) - elif isinstance(value, dict): - return self._preprocess_dict(value, evaluated_config, path) - elif type(value) == list: - evaluated_list = [ - # pass in elem's path instead of the list's path - self._preprocess(v, evaluated_config, self._get_path_for_list_item(path, index)) - for index, v in enumerate(value) - ] - # Add the list's element to the evaluated config so they can be referenced - for index, elem in enumerate(evaluated_list): - evaluated_config[self._get_path_for_list_item(path, index)] = elem - return evaluated_list - else: - return value - - def _get_path_for_list_item(self, path, index): - # An elem's path is {path_to_list}[{index}] - if len(path) > 1: - return path[:-1], f"{path[-1]}[{index}]" - else: - return (f"{path[-1]}[{index}]",) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py index b88908aef8950..2ef4fcd9d672a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py @@ -81,22 +81,20 @@ class DefaultPaginator(Paginator, JsonSchemaMixin): decoder (Decoder): decoder to decode the response """ - page_size_option: Optional[RequestOption] - page_token_option: RequestOption pagination_strategy: PaginationStrategy config: Config url_base: Union[InterpolatedString, str] options: InitVar[Mapping[str, Any]] decoder: Decoder = JsonDecoder(options={}) _token: Optional[Any] = field(init=False, repr=False, default=None) + page_size_option: Optional[RequestOption] = None + page_token_option: Optional[RequestOption] = None def __post_init__(self, options: Mapping[str, Any]): if self.page_size_option and self.page_size_option.inject_into == RequestOptionType.path: raise ValueError("page_size_option cannot be set in as path") if self.page_size_option and not self.pagination_strategy.get_page_size(): raise ValueError("page_size_option cannot be set if the pagination strategy does not have a page_size") - if self.pagination_strategy.get_page_size() and not self.page_size_option: - raise ValueError("page_size_option must be set if the pagination strategy has a page_size") if isinstance(self.url_base, str): self.url_base = InterpolatedString(string=self.url_base, options=options) @@ -108,7 +106,7 @@ def next_page_token(self, response: requests.Response, last_records: List[Mappin return None def path(self): - if self._token and self.page_token_option.inject_into == RequestOptionType.path: + if self._token and self.page_token_option and self.page_token_option.inject_into == RequestOptionType.path: # Replace url base to only return the path return str(self._token).replace(self.url_base.eval(self.config), "") else: @@ -155,7 +153,7 @@ def reset(self): def _get_request_options(self, option_type: RequestOptionType) -> Mapping[str, Any]: options = {} - if self.page_token_option.inject_into == option_type: + if self.page_token_option and self.page_token_option.inject_into == option_type: if option_type != RequestOptionType.path and self._token: options[self.page_token_option.field_name] = self._token if self.page_size_option and self.pagination_strategy.get_page_size() and self.page_size_option.inject_into == option_type: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py index 370e5d8caeacd..731b7e63e0f35 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/request_options/interpolated_request_input_provider.py @@ -45,6 +45,6 @@ def eval_request_inputs( interpolated_value = self._interpolator.eval(self.config, **kwargs) if isinstance(interpolated_value, dict): - non_null_tokens = {k: v for k, v in interpolated_value.items() if v} + non_null_tokens = {k: v for k, v in interpolated_value.items() if v is not None} return non_null_tokens return interpolated_value diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/retriever.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/retriever.py index 45252050a6ec1..99fac94af4e2a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/retriever.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/retriever.py @@ -7,7 +7,8 @@ from typing import Iterable, List, Optional from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.declarative.types import Record, StreamSlice, StreamState +from airbyte_cdk.sources.declarative.types import StreamSlice, StreamState +from airbyte_cdk.sources.streams.core import StreamData from dataclasses_jsonschema import JsonSchemaMixin @@ -24,7 +25,7 @@ def read_records( cursor_field: Optional[List[str]] = None, stream_slice: Optional[StreamSlice] = None, stream_state: Optional[StreamState] = None, - ) -> Iterable[Record]: + ) -> Iterable[StreamData]: """ Fetch a stream's records from an HTTP API source diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py index 27fcd98655ee9..867eb8dd837fb 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py @@ -2,13 +2,17 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import json +import logging from dataclasses import InitVar, dataclass, field from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union import requests -from airbyte_cdk.models import SyncMode +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Level, SyncMode +from airbyte_cdk.models import Type as MessageType from airbyte_cdk.sources.declarative.exceptions import ReadException from airbyte_cdk.sources.declarative.extractors.http_selector import HttpSelector +from airbyte_cdk.sources.declarative.interpolation import InterpolatedString from airbyte_cdk.sources.declarative.requesters.error_handlers.response_action import ResponseAction from airbyte_cdk.sources.declarative.requesters.paginators.no_pagination import NoPagination from airbyte_cdk.sources.declarative.requesters.paginators.paginator import Paginator @@ -16,8 +20,10 @@ from airbyte_cdk.sources.declarative.retrievers.retriever import Retriever from airbyte_cdk.sources.declarative.stream_slicers.single_slice import SingleSlice from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer -from airbyte_cdk.sources.declarative.types import Record, StreamSlice, StreamState +from airbyte_cdk.sources.declarative.types import Config, Record, StreamSlice, StreamState +from airbyte_cdk.sources.streams.core import StreamData from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.utils.airbyte_secrets_utils import filter_secrets from dataclasses_jsonschema import JsonSchemaMixin @@ -46,9 +52,10 @@ class SimpleRetriever(Retriever, HttpStream, JsonSchemaMixin): requester: Requester record_selector: HttpSelector + config: Config options: InitVar[Mapping[str, Any]] name: str - _name: str = field(init=False, repr=False, default="") + _name: Union[InterpolatedString, str] = field(init=False, repr=False, default="") primary_key: Optional[Union[str, List[str], List[List[str]]]] _primary_key: str = field(init=False, repr=False, default="") paginator: Optional[Paginator] = None @@ -59,13 +66,15 @@ def __post_init__(self, options: Mapping[str, Any]): HttpStream.__init__(self, self.requester.get_authenticator()) self._last_response = None self._last_records = None + self._options = options + self.name = InterpolatedString(self._name, options=options) @property def name(self) -> str: """ :return: Stream name """ - return self._name + return self._name.eval(self.config) @name.setter def name(self, value: str) -> None: @@ -347,20 +356,27 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, def read_records( self, sync_mode: SyncMode, - cursor_field: List[str] = None, + cursor_field: Optional[List[str]] = None, stream_slice: Optional[StreamSlice] = None, stream_state: Optional[StreamState] = None, - ) -> Iterable[Mapping[str, Any]]: + ) -> Iterable[StreamData]: # Warning: use self.state instead of the stream_state passed as argument! stream_slice = stream_slice or {} # None-check self.paginator.reset() - records_generator = HttpStream.read_records(self, sync_mode, cursor_field, stream_slice, self.state) - for r in records_generator: - self.stream_slicer.update_cursor(stream_slice, last_record=r) - yield r + records_generator = self._read_pages( + self.parse_records_and_emit_request_and_responses, + stream_slice, + stream_state, + ) + for record in records_generator: + # Only record messages should be parsed to update the cursor which is indicated by the Mapping type + if isinstance(record, Mapping): + self.stream_slicer.update_cursor(stream_slice, last_record=record) + yield record else: last_record = self._last_records[-1] if self._last_records else None - self.stream_slicer.update_cursor(stream_slice, last_record=last_record) + if last_record and isinstance(last_record, Mapping): + self.stream_slicer.update_cursor(stream_slice, last_record=last_record) yield from [] def stream_slices( @@ -385,3 +401,24 @@ def state(self) -> MutableMapping[str, Any]: def state(self, value: StreamState): """State setter, accept state serialized by state getter.""" self.stream_slicer.update_cursor(value) + + def parse_records_and_emit_request_and_responses(self, request, response, stream_slice, stream_state) -> Iterable[StreamData]: + # Only emit requests and responses when running in debug mode + if self.logger.isEnabledFor(logging.DEBUG): + yield self._create_trace_message_from_request(request) + yield self._create_trace_message_from_response(response) + # Not great to need to call _read_pages which is a private method + # A better approach would be to extract the HTTP client from the HttpStream and call it directly from the HttpRequester + yield from self.parse_response(response, stream_slice=stream_slice, stream_state=stream_state) + + def _create_trace_message_from_request(self, request: requests.PreparedRequest): + # FIXME: this should return some sort of trace message + request_dict = {"url": request.url, "headers": dict(request.headers), "body": request.body} + log_message = filter_secrets(f"request:{json.dumps(request_dict)}") + return AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=Level.INFO, message=log_message)) + + def _create_trace_message_from_response(self, response: requests.Response): + # FIXME: this should return some sort of trace message + response_dict = {"body": response.text, "headers": dict(response.headers), "status_code": response.status_code} + log_message = filter_secrets(f"response:{json.dumps(response_dict)}") + return AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=Level.INFO, message=log_message)) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/schema/default_schema_loader.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/schema/default_schema_loader.py index 8e5129ebeab94..9344ffeed684b 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/schema/default_schema_loader.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/schema/default_schema_loader.py @@ -38,7 +38,7 @@ def get_json_schema(self) -> Mapping[str, Any]: try: return self.default_loader.get_json_schema() - except FileNotFoundError: + except OSError: # A slight hack since we don't directly have the stream name. However, when building the default filepath we assume the # runtime options stores stream name 'name' so we'll do the same here stream_name = self._options.get("name", "") diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/schema/json_file_schema_loader.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/schema/json_file_schema_loader.py index de3fb380d1bcf..fa4e5a99a0054 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/schema/json_file_schema_loader.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/schema/json_file_schema_loader.py @@ -15,15 +15,18 @@ def _default_file_path() -> str: - # schema files are always in "source_/schemas/.json - # the connector's module name can be inferred by looking at the modules loaded and look for the one starting with source_ + # Schema files are always in "source_/schemas/.json + # The connector's module name can be inferred by looking at the modules loaded and look for the one starting with source_ source_modules = [ - k for k, v in sys.modules.items() if "source_" in k # example: ['source_exchange_rates', 'source_exchange_rates.source'] - ] - if not source_modules: - raise RuntimeError("Expected at least one module starting with 'source_'") - module = source_modules[0].split(".")[0] - return f"./{module}/schemas/{{{{options['name']}}}}.json" + k for k, v in sys.modules.items() if "source_" in k + ] # example: ['source_exchange_rates', 'source_exchange_rates.source'] + if source_modules: + module = source_modules[0].split(".")[0] + return f"./{module}/schemas/{{{{options['name']}}}}.json" + + # If we are not in a source_ module, the most likely scenario is we're processing a manifest from the connector builder + # server which does not require a json schema to be defined. + return "./{{options['name']}}.json" @dataclass diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/__init__.py new file mode 100644 index 0000000000000..fba2f9612ba2e --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/__init__.py @@ -0,0 +1,7 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.spec.spec import Spec + +__all__ = ["Spec"] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/spec.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/spec.py new file mode 100644 index 0000000000000..d5ac6a1d586d3 --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/spec/spec.py @@ -0,0 +1,34 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from dataclasses import InitVar, dataclass +from typing import Any, Mapping + +from airbyte_cdk.models.airbyte_protocol import ConnectorSpecification +from dataclasses_jsonschema import JsonSchemaMixin + + +@dataclass +class Spec(JsonSchemaMixin): + """ + Returns a connection specification made up of information about the connector and how it can be configured + + Attributes: + documentation_url (str): The link the Airbyte documentation about this connector + connection_specification (Mapping[str, Any]): information related to how a connector can be configured + """ + + documentation_url: str + connection_specification: Mapping[str, Any] + options: InitVar[Mapping[str, Any]] + + def generate_spec(self) -> ConnectorSpecification: + """ + Returns the connector specification according the spec block defined in the low code connector manifest. + """ + + # We remap these keys to camel case because that's the existing format expected by the rest of the platform + return ConnectorSpecification.parse_obj( + {"documentationUrl": self.documentation_url, "connectionSpecification": self.connection_specification} + ) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/datetime_stream_slicer.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/datetime_stream_slicer.py index 3853755796770..181ddc096d99a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/datetime_stream_slicer.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/datetime_stream_slicer.py @@ -16,6 +16,7 @@ from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer from airbyte_cdk.sources.declarative.types import Config, Record, StreamSlice, StreamState from dataclasses_jsonschema import JsonSchemaMixin +from dateutil.relativedelta import relativedelta @dataclass @@ -30,10 +31,12 @@ class DatetimeStreamSlicer(StreamSlicer, JsonSchemaMixin): `""` where unit can be one of + - years, y + - months, m - weeks, w - days, d - For example, "1d" will produce windows of 1 day, and 2weeks windows of 2 weeks. + For example, "1d" will produce windows of 1 day, and "2w" windows of 2 weeks. The timestamp format accepts the same format codes as datetime.strfptime, which are all the format codes required by the 1989 C standard. @@ -68,7 +71,9 @@ class DatetimeStreamSlicer(StreamSlicer, JsonSchemaMixin): stream_state_field_end: Optional[str] = None lookback_window: Optional[Union[InterpolatedString, str]] = None - timedelta_regex = re.compile(r"((?P[\.\d]+?)w)?" r"((?P[\.\d]+?)d)?$") + timedelta_regex = re.compile( + r"((?P[\.\d]+?)y)?" r"((?P[\.\d]+?)m)?" r"((?P[\.\d]+?)w)?" r"((?P[\.\d]+?)d)?$" + ) def __post_init__(self, options: Mapping[str, Any]): if not isinstance(self.start_datetime, MinMaxDatetime): @@ -188,14 +193,14 @@ def _parse_timedelta(cls, time_str): Parse a time string e.g. (2h13m) into a timedelta object. Modified from virhilo's answer at https://stackoverflow.com/a/4628148/851699 :param time_str: A string identifying a duration. (eg. 2h13m) - :return datetime.timedelta: A datetime.timedelta object + :return relativedelta: A relativedelta object """ parts = cls.timedelta_regex.match(time_str) assert parts is not None time_params = {name: float(param) for name, param in parts.groupdict().items() if param} - return datetime.timedelta(**time_params) + return relativedelta(**time_params) def get_request_params( self, diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/substream_slicer.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/substream_slicer.py index dbe46d9242694..081b8d0674338 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/substream_slicer.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/stream_slicers/substream_slicer.py @@ -5,7 +5,7 @@ from dataclasses import InitVar, dataclass from typing import Any, Iterable, List, Mapping, Optional -from airbyte_cdk.models import SyncMode +from airbyte_cdk.models import AirbyteMessage, SyncMode, Type from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType from airbyte_cdk.sources.declarative.stream_slicers.stream_slicer import StreamSlicer from airbyte_cdk.sources.declarative.types import Record, StreamSlice, StreamState @@ -138,6 +138,12 @@ def stream_slices(self, sync_mode: SyncMode, stream_state: StreamState) -> Itera for parent_record in parent_stream.read_records( sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice=parent_stream_slice, stream_state=None ): + # Skip non-records (eg AirbyteLogMessage) + if isinstance(parent_record, AirbyteMessage): + if parent_record.type == Type.RECORD: + parent_record = parent_record.record.data + else: + continue empty_parent_slice = False stream_state_value = parent_record.get(parent_field) yield {stream_state_field: stream_state_value, "parent_slice": parent_slice} diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/yaml_declarative_source.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/yaml_declarative_source.py index c9e31fd47ba92..88d449192fe6f 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/yaml_declarative_source.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/yaml_declarative_source.py @@ -2,161 +2,41 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # -import inspect -import json -import logging import pkgutil -import typing -from dataclasses import dataclass, fields -from enum import Enum, EnumMeta -from typing import Any, List, Mapping, Union -from airbyte_cdk.sources.declarative.checks import CheckStream -from airbyte_cdk.sources.declarative.checks.connection_checker import ConnectionChecker -from airbyte_cdk.sources.declarative.declarative_source import DeclarativeSource -from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream -from airbyte_cdk.sources.declarative.exceptions import InvalidConnectorDefinitionException -from airbyte_cdk.sources.declarative.parsers.factory import DeclarativeComponentFactory -from airbyte_cdk.sources.declarative.parsers.yaml_parser import YamlParser -from airbyte_cdk.sources.streams.core import Stream -from dataclasses_jsonschema import JsonSchemaMixin -from jsonschema.validators import validate +import yaml +from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource +from airbyte_cdk.sources.declarative.types import ConnectionDefinition -@dataclass -class ConcreteDeclarativeSource(JsonSchemaMixin): - version: str - checker: CheckStream - streams: List[DeclarativeStream] - - -class YamlDeclarativeSource(DeclarativeSource): +class YamlDeclarativeSource(ManifestDeclarativeSource): """Declarative source defined by a yaml file""" - VALID_TOP_LEVEL_FIELDS = {"definitions", "streams", "check", "version"} - - def __init__(self, path_to_yaml): + def __init__(self, path_to_yaml, debug: bool = False): """ :param path_to_yaml: Path to the yaml file describing the source """ - self.logger = logging.getLogger(f"airbyte.{self.name}") - self._factory = DeclarativeComponentFactory() self._path_to_yaml = path_to_yaml - self._source_config = self._read_and_parse_yaml_file(path_to_yaml) - - self._validate_source() - - # Stopgap to protect the top-level namespace until it's validated through the schema - unknown_fields = [key for key in self._source_config.keys() if key not in self.VALID_TOP_LEVEL_FIELDS] - if unknown_fields: - raise InvalidConnectorDefinitionException(f"Found unknown top-level fields: {unknown_fields}") + source_config = self._read_and_parse_yaml_file(path_to_yaml) + super().__init__(source_config, debug) - @property - def connection_checker(self) -> ConnectionChecker: - check = self._source_config["check"] - if "class_name" not in check: - check["class_name"] = "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream" - return self._factory.create_component(check, dict())(source=self) - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - self.logger.debug( - "parsed YAML into declarative source", - extra={"path_to_yaml_file": self._path_to_yaml, "source_name": self.name, "parsed_config": json.dumps(self._source_config)}, - ) - source_streams = [self._factory.create_component(stream_config, config, True)() for stream_config in self._stream_configs()] - for stream in source_streams: - # make sure the log level is always appied to the stream's logger - self._apply_log_level_to_stream_logger(self.logger, stream) - return source_streams - - def _read_and_parse_yaml_file(self, path_to_yaml_file): + def _read_and_parse_yaml_file(self, path_to_yaml_file) -> ConnectionDefinition: package = self.__class__.__module__.split(".")[0] yaml_config = pkgutil.get_data(package, path_to_yaml_file) decoded_yaml = yaml_config.decode() - return YamlParser().parse(decoded_yaml) - - def _validate_source(self): - full_config = {} - if "version" in self._source_config: - full_config["version"] = self._source_config["version"] - if "check" in self._source_config: - full_config["checker"] = self._source_config["check"] - streams = [self._factory.create_component(stream_config, {}, False)() for stream_config in self._stream_configs()] - if len(streams) > 0: - full_config["streams"] = streams - declarative_source_schema = ConcreteDeclarativeSource.json_schema() - validate(full_config, declarative_source_schema) - - def _stream_configs(self): - stream_configs = self._source_config.get("streams", []) - for s in stream_configs: - if "class_name" not in s: - s["class_name"] = "airbyte_cdk.sources.declarative.declarative_stream.DeclarativeStream" - return stream_configs + return self._parse(decoded_yaml) - @staticmethod - def generate_schema() -> str: - expanded_source_definition = YamlDeclarativeSource.expand_schema_interfaces(ConcreteDeclarativeSource, {}) - expanded_schema = expanded_source_definition.json_schema() - return json.dumps(expanded_schema, cls=SchemaEncoder) + def _emit_manifest_debug_message(self, extra_args: dict): + extra_args["path_to_yaml"] = self._path_to_yaml + self.logger.debug("declarative source created from parsed YAML manifest", extra=extra_args) @staticmethod - def expand_schema_interfaces(expand_class: type, visited: dict) -> type: + def _parse(connection_definition_str: str) -> ConnectionDefinition: """ - Recursive function that takes in class type that will have its interface fields unpacked and expended and then recursively - attempt the same expansion on all the class' underlying fields that are declarative component. It also performs expansion - with respect to interfaces that are contained within generic data types. - :param expand_class: The declarative component class that will have its interface fields expanded - :param visited: cache used to store a record of already visited declarative classes that have already been seen - :return: The expanded declarative component + Parses a yaml file into a manifest. Component references still exist in the manifest which will be + resolved during the creating of the DeclarativeSource. + :param connection_definition_str: yaml string to parse + :return: The ConnectionDefinition parsed from connection_definition_str """ - - # Recursive base case to stop recursion if we have already expanded an interface in case of cyclical components - # like CompositeErrorHandler - if expand_class.__name__ in visited: - return visited[expand_class.__name__] - visited[expand_class.__name__] = expand_class - - next_classes = [] - class_fields = fields(expand_class) - for field in class_fields: - unpacked_field_types = DeclarativeComponentFactory.unpack(field.type) - expand_class.__annotations__[field.name] = unpacked_field_types - next_classes.extend(YamlDeclarativeSource._get_next_expand_classes(field.type)) - for next_class in next_classes: - YamlDeclarativeSource.expand_schema_interfaces(next_class, visited) - return expand_class - - @staticmethod - def _get_next_expand_classes(field_type) -> list[type]: - """ - Parses through a given field type and assembles a list of all underlying declarative components. For a concrete declarative class - it will return itself. For a declarative interface it will return its subclasses. For declarative components in a generic type - it will return the unpacked classes. Any non-declarative types will be skipped. - :param field_type: A field type that - :return: - """ - generic_type = typing.get_origin(field_type) - if generic_type is None: - # We can only continue parsing declarative that inherit from the JsonSchemaMixin class because it is used - # to generate the final json schema - if inspect.isclass(field_type) and issubclass(field_type, JsonSchemaMixin) and not isinstance(field_type, EnumMeta): - subclasses = field_type.__subclasses__() - if subclasses: - return subclasses - else: - return [field_type] - elif generic_type == list or generic_type == Union: - next_classes = [] - for underlying_type in typing.get_args(field_type): - next_classes.extend(YamlDeclarativeSource._get_next_expand_classes(underlying_type)) - return next_classes - return [] - - -class SchemaEncoder(json.JSONEncoder): - def default(self, obj): - if isinstance(obj, property) or isinstance(obj, Enum): - return str(obj) - return json.JSONEncoder.default(self, obj) + return yaml.safe_load(connection_definition_str) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py index 02199df40c31f..d39c706eb9aaf 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/core.py @@ -6,14 +6,24 @@ import inspect import logging from abc import ABC, abstractmethod +from functools import lru_cache from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union import airbyte_cdk.sources.utils.casing as casing -from airbyte_cdk.models import AirbyteStream, SyncMode +from airbyte_cdk.models import AirbyteLogMessage, AirbyteStream, AirbyteTraceMessage, SyncMode + +# list of all possible HTTP methods which can be used for sending of request bodies from airbyte_cdk.sources.utils.schema_helpers import ResourceSchemaLoader from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer from deprecated.classic import deprecated +# A stream's read method can return one of the following types: +# Mapping[str, Any]: The content of an AirbyteRecordMessage +# AirbyteRecordMessage: An AirbyteRecordMessage +# AirbyteLogMessage: A log message +# AirbyteTraceMessage: A trace message +StreamData = Union[Mapping[str, Any], AirbyteLogMessage, AirbyteTraceMessage] + def package_name_from_class(cls: object) -> str: """Find the package name given a class name""" @@ -94,11 +104,12 @@ def read_records( cursor_field: List[str] = None, stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: + ) -> Iterable[StreamData]: """ This method should be overridden by subclasses to read records based on the inputs """ + @lru_cache(maxsize=None) def get_json_schema(self) -> Mapping[str, Any]: """ :return: A dict of the JSON schema representing this stream. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py index a0faa46108999..28ea3f506606a 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/http.py @@ -7,13 +7,13 @@ import os from abc import ABC, abstractmethod from contextlib import suppress -from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Union +from typing import Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union from urllib.parse import urljoin import requests import requests_cache from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources.streams.core import Stream +from airbyte_cdk.sources.streams.core import Stream, StreamData from requests.auth import AuthBase from requests_cache.session import CachedSession @@ -408,24 +408,25 @@ def read_records( cursor_field: List[str] = None, stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None, - ) -> Iterable[Mapping[str, Any]]: + ) -> Iterable[StreamData]: + yield from self._read_pages( + lambda req, res, state, _slice: self.parse_response(res, stream_slice=_slice, stream_state=state), stream_slice, stream_state + ) + + def _read_pages( + self, + records_generator_fn: Callable[ + [requests.PreparedRequest, requests.Response, Mapping[str, Any], Mapping[str, Any]], Iterable[StreamData] + ], + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[StreamData]: stream_state = stream_state or {} pagination_complete = False - next_page_token = None while not pagination_complete: - request_headers = self.request_headers(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - request = self._create_prepared_request( - path=self.path(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), - headers=dict(request_headers, **self.authenticator.get_auth_header()), - params=self.request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), - json=self.request_body_json(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), - data=self.request_body_data(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), - ) - request_kwargs = self.request_kwargs(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - - response = self._send_request(request, request_kwargs) - yield from self.parse_response(response, stream_state=stream_state, stream_slice=stream_slice) + request, response = self._fetch_next_page(stream_slice, stream_state, next_page_token) + yield from records_generator_fn(request, response, stream_state, stream_slice) next_page_token = self.next_page_token(response) if not next_page_token: @@ -434,6 +435,22 @@ def read_records( # Always return an empty generator just in case no records were ever yielded yield from [] + def _fetch_next_page( + self, stream_slice: Mapping[str, Any] = None, stream_state: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Tuple[requests.PreparedRequest, requests.Response]: + request_headers = self.request_headers(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) + request = self._create_prepared_request( + path=self.path(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), + headers=dict(request_headers, **self.authenticator.get_auth_header()), + params=self.request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), + json=self.request_body_json(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), + data=self.request_body_data(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token), + ) + request_kwargs = self.request_kwargs(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) + + response = self._send_request(request, request_kwargs) + return request, response + class HttpSubStream(HttpStream, ABC): def __init__(self, parent: HttpStream, **kwargs): diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/abstract_oauth.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/abstract_oauth.py index 2a8bd72833715..e7e0ce397e809 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/abstract_oauth.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/abstract_oauth.py @@ -47,7 +47,7 @@ def build_refresh_request_body(self) -> Mapping[str, Any]: Override to define additional parameters """ payload: MutableMapping[str, Any] = { - "grant_type": "refresh_token", + "grant_type": self.get_grant_type(), "client_id": self.get_client_id(), "client_secret": self.get_client_secret(), "refresh_token": self.get_refresh_token(), @@ -118,6 +118,10 @@ def get_expires_in_name(self) -> str: def get_refresh_request_body(self) -> Mapping[str, Any]: """Returns the request body to set on the refresh request""" + @abstractmethod + def get_grant_type(self) -> str: + """Returns grant_type specified for requesting access_token""" + @property @abstractmethod def access_token(self) -> str: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py index d479652f78b8f..5f2e21df88414 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py @@ -25,6 +25,7 @@ def __init__( access_token_name: str = "access_token", expires_in_name: str = "expires_in", refresh_request_body: Mapping[str, Any] = None, + grant_type: str = "refresh_token", ): self._token_refresh_endpoint = token_refresh_endpoint self._client_secret = client_secret @@ -34,6 +35,7 @@ def __init__( self._access_token_name = access_token_name self._expires_in_name = expires_in_name self._refresh_request_body = refresh_request_body + self._grant_type = grant_type self._token_expiry_date = token_expiry_date or pendulum.now().subtract(days=1) self._access_token = None @@ -62,6 +64,9 @@ def get_expires_in_name(self) -> str: def get_refresh_request_body(self) -> Mapping[str, Any]: return self._refresh_request_body + def get_grant_type(self) -> str: + return self._grant_type + def get_token_expiry_date(self) -> pendulum.DateTime: return self._token_expiry_date diff --git a/airbyte-cdk/python/airbyte_cdk/sources/utils/__init__.py b/airbyte-cdk/python/airbyte_cdk/sources/utils/__init__.py index 5adf292dff0cb..8edf89696e665 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/utils/__init__.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/utils/__init__.py @@ -1,5 +1,7 @@ # -# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. # # Initialize Utils Package + +__all__ = ["record_helper"] diff --git a/airbyte-cdk/python/airbyte_cdk/sources/utils/record_helper.py b/airbyte-cdk/python/airbyte_cdk/sources/utils/record_helper.py new file mode 100644 index 0000000000000..482596230f04b --- /dev/null +++ b/airbyte-cdk/python/airbyte_cdk/sources/utils/record_helper.py @@ -0,0 +1,38 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import datetime +from typing import Any, Mapping + +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, AirbyteTraceMessage +from airbyte_cdk.models import Type as MessageType +from airbyte_cdk.sources.streams.core import StreamData +from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer + + +def stream_data_to_airbyte_message( + stream_name: str, + data_or_message: StreamData, + transformer: TypeTransformer = TypeTransformer(TransformConfig.NoTransform), + schema: Mapping[str, Any] = None, +) -> AirbyteMessage: + if schema is None: + schema = {} + + if isinstance(data_or_message, dict): + data = data_or_message + now_millis = int(datetime.datetime.now().timestamp() * 1000) + # Transform object fields according to config. Most likely you will + # need it to normalize values against json schema. By default no action + # taken unless configured. See + # docs/connector-development/cdk-python/schemas.md for details. + transformer.transform(data, schema) # type: ignore + message = AirbyteRecordMessage(stream=stream_name, data=data, emitted_at=now_millis) + return AirbyteMessage(type=MessageType.RECORD, record=message) + elif isinstance(data_or_message, AirbyteTraceMessage): + return AirbyteMessage(type=MessageType.TRACE, trace=data_or_message) + elif isinstance(data_or_message, AirbyteLogMessage): + return AirbyteMessage(type=MessageType.LOG, log=data_or_message) + else: + raise ValueError(f"Unexpected type for data_or_message: {type(data_or_message)}: {data_or_message}") diff --git a/airbyte-cdk/python/setup.py b/airbyte-cdk/python/setup.py index d133785a6ec9f..92bab1beefcfe 100644 --- a/airbyte-cdk/python/setup.py +++ b/airbyte-cdk/python/setup.py @@ -15,7 +15,7 @@ setup( name="airbyte-cdk", - version="0.5.4", + version="0.9.4", description="A framework for writing Airbyte Connectors.", long_description=README, long_description_content_type="text/markdown", @@ -52,6 +52,7 @@ "jsonref~=0.2", "pendulum", "pydantic~=1.9.2", + "python-dateutil", "PyYAML~=5.4", "requests", "requests_cache", diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/auth/test_oauth.py b/airbyte-cdk/python/unit_tests/sources/declarative/auth/test_oauth.py index 12cb353de5c04..fe3ea518611da 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/auth/test_oauth.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/auth/test_oauth.py @@ -20,6 +20,7 @@ "token_expiry_date": pendulum.now().subtract(days=2).to_rfc3339_string(), "custom_field": "in_outbound_request", "another_field": "exists_in_body", + "grant_type": "some_grant_type", } options = {"refresh_token": "some_refresh_token"} @@ -48,10 +49,11 @@ def test_refresh_request_body(self): "scopes": ["no_override"], }, options=options, + grant_type="{{ config['grant_type'] }}" ) body = oauth.build_refresh_request_body() expected = { - "grant_type": "refresh_token", + "grant_type": "some_grant_type", "client_id": "some_client_id", "client_secret": "some_client_secret", "refresh_token": "some_refresh_token", diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_manifest_reference_resolver.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_manifest_reference_resolver.py new file mode 100644 index 0000000000000..59f7412ef1a1c --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_manifest_reference_resolver.py @@ -0,0 +1,143 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest +from airbyte_cdk.sources.declarative.parsers.manifest_reference_resolver import ManifestReferenceResolver +from airbyte_cdk.sources.declarative.parsers.undefined_reference_exception import UndefinedReferenceException + +resolver = ManifestReferenceResolver() + + +def test_get_ref(): + s = "*ref(limit)" + ref_key = resolver._get_ref_key(s) + assert ref_key == "limit" + + +def test_get_ref_no_ref(): + s = "limit: 50" + + ref_key = resolver._get_ref_key(s) + assert ref_key is None + + +def test_refer(): + content = { + "limit": 50, + "limit_ref": "*ref(limit)" + } + config = resolver.preprocess_manifest(content, {}, "") + assert config["limit_ref"] == 50 + + +def test_refer_to_inner(): + content = { + "dict": { + "limit": 50 + }, + "limit_ref": "*ref(dict.limit)" + } + config = resolver.preprocess_manifest(content, {}, "") + assert config["limit_ref"] == 50 + + +def test_refer_to_non_existant_struct(): + content = { + "dict": { + "limit": 50 + }, + "limit_ref": "*ref(not_dict)" + } + with pytest.raises(UndefinedReferenceException): + resolver.preprocess_manifest(content, {}, "") + + +def test_refer_in_dict(): + content = { + "limit": 50, + "offset_request_parameters": { + "offset": "{{ next_page_token['offset'] }}", + "limit": "*ref(limit)" + } + } + config = resolver.preprocess_manifest(content, {}, "") + assert config["offset_request_parameters"]["offset"] == "{{ next_page_token['offset'] }}" + assert config["offset_request_parameters"]["limit"] == 50 + + +def test_refer_to_dict(): + content = { + "limit": 50, + "offset_request_parameters": { + "offset": "{{ next_page_token['offset'] }}", + "limit": "*ref(limit)" + }, + "offset_pagination_request_parameters": { + "class": "InterpolatedRequestParameterProvider", + "request_parameters": "*ref(offset_request_parameters)" + } + } + config = resolver.preprocess_manifest(content, {}, "") + assert config["limit"] == 50 + assert config["offset_request_parameters"]["limit"] == 50 + assert len(config["offset_pagination_request_parameters"]) == 2 + assert config["offset_pagination_request_parameters"]["request_parameters"]["limit"] == 50 + assert config["offset_pagination_request_parameters"]["request_parameters"]["offset"] == "{{ next_page_token['offset'] }}" + + +def test_refer_and_overwrite(): + content = { + "limit": 50, + "custom_limit": 25, + "offset_request_parameters": { + "offset": "{{ next_page_token['offset'] }}", + "limit": "*ref(limit)" + }, + "custom_request_parameters": { + "$ref": "*ref(offset_request_parameters)", + "limit": "*ref(custom_limit)" + } + } + config = resolver.preprocess_manifest(content, {}, "") + assert config["offset_request_parameters"]["limit"] == 50 + assert config["custom_request_parameters"]["limit"] == 25 + + assert config["offset_request_parameters"]["offset"] == "{{ next_page_token['offset'] }}" + assert config["custom_request_parameters"]["offset"] == "{{ next_page_token['offset'] }}" + + +def test_collision(): + content = { + "example": { + "nested":{ + "path": "first one", + "more_nested": { + "value": "found it!" + } + }, + "nested.path": "uh oh", + }, + "reference_to_nested_path": { + "$ref": "*ref(example.nested.path)" + }, + "reference_to_nested_nested_value": { + "$ref": "*ref(example.nested.more_nested.value)" + } + } + config = resolver.preprocess_manifest(content, {}, "") + assert config["example"]["nested"]["path"] == "first one" + assert config["example"]["nested.path"] == "uh oh" + assert config["reference_to_nested_path"] == "uh oh" + assert config["example"]["nested"]["more_nested"]["value"] == "found it!" + assert config["reference_to_nested_nested_value"] == "found it!" + + +def test_list(): + content = { + "list": ["A", "B"], + "elem_ref": "*ref(list[0])" + } + config = resolver.preprocess_manifest(content, {}, "") + elem_ref = config["elem_ref"] + assert elem_ref == "A" diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_yaml_parser.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_yaml_parser.py deleted file mode 100644 index bbc9104ab1b7f..0000000000000 --- a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_yaml_parser.py +++ /dev/null @@ -1,144 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import pytest -from airbyte_cdk.sources.declarative.parsers.undefined_reference_exception import UndefinedReferenceException -from airbyte_cdk.sources.declarative.parsers.yaml_parser import YamlParser - -parser = YamlParser() - - -def test(): - content = """ - limit: 50 - """ - config = parser.parse(content) - assert config["limit"] == 50 - - -def test_get_ref(): - s = """ - limit_ref: "*ref(limit)" - """ - ref_key = parser._get_ref_key(s) - assert ref_key == "limit" - - -def test_get_ref_no_ref(): - s = """ - limit: 50 - """ - ref_key = parser._get_ref_key(s) - assert ref_key is None - - -def test_refer(): - content = """ - limit: 50 - limit_ref: "*ref(limit)" - """ - config = parser.parse(content) - assert config["limit_ref"] == 50 - - -def test_refer_to_inner(): - content = """ - dict: - limit: 50 - limit_ref: "*ref(dict.limit)" - """ - config = parser.parse(content) - assert config["limit_ref"] == 50 - - -def test_refer_to_non_existant_struct(): - content = """ - dict: - limit: 50 - limit_ref: "*ref(not_dict)" - """ - with pytest.raises(UndefinedReferenceException): - parser.parse(content) - - -def test_refer_in_dict(): - content = """ - limit: 50 - offset_request_parameters: - offset: "{{ next_page_token['offset'] }}" - limit: "*ref(limit)" - """ - config = parser.parse(content) - assert config["offset_request_parameters"]["offset"] == "{{ next_page_token['offset'] }}" - assert config["offset_request_parameters"]["limit"] == 50 - - -def test_refer_to_dict(): - content = """ - limit: 50 - offset_request_parameters: - offset: "{{ next_page_token['offset'] }}" - limit: "*ref(limit)" - offset_pagination_request_parameters: - class: InterpolatedRequestParameterProvider - request_parameters: "*ref(offset_request_parameters)" - """ - config = parser.parse(content) - assert config["limit"] == 50 - assert config["offset_request_parameters"]["limit"] == 50 - assert len(config["offset_pagination_request_parameters"]) == 2 - assert config["offset_pagination_request_parameters"]["request_parameters"]["limit"] == 50 - assert config["offset_pagination_request_parameters"]["request_parameters"]["offset"] == "{{ next_page_token['offset'] }}" - - -def test_refer_and_overwrite(): - content = """ - limit: 50 - custom_limit: 25 - offset_request_parameters: - offset: "{{ next_page_token['offset'] }}" - limit: "*ref(limit)" - custom_request_parameters: - $ref: "*ref(offset_request_parameters)" - limit: "*ref(custom_limit)" - """ - config = parser.parse(content) - assert config["offset_request_parameters"]["limit"] == 50 - assert config["custom_request_parameters"]["limit"] == 25 - - assert config["offset_request_parameters"]["offset"] == "{{ next_page_token['offset'] }}" - assert config["custom_request_parameters"]["offset"] == "{{ next_page_token['offset'] }}" - - -def test_collision(): - content = """ -example: - nested: - path: "first one" - more_nested: - value: "found it!" - nested.path: "uh oh" -reference_to_nested_path: - $ref: "*ref(example.nested.path)" -reference_to_nested_nested_value: - $ref: "*ref(example.nested.more_nested.value)" - """ - config = parser.parse(content) - assert config["example"]["nested"]["path"] == "first one" - assert config["example"]["nested.path"] == "uh oh" - assert config["reference_to_nested_path"] == "uh oh" - assert config["example"]["nested"]["more_nested"]["value"] == "found it!" - assert config["reference_to_nested_nested_value"] == "found it!" - - -def test_list(): - content = """ - list: - - "A" - - "B" - elem_ref: "*ref(list[0])" - """ - config = parser.parse(content) - elem_ref = config["elem_ref"] - assert elem_ref == "A" diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_default_paginator.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_default_paginator.py index f6ec277d0a836..8aec567318bbf 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_default_paginator.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/paginators/test_default_paginator.py @@ -194,33 +194,11 @@ def test_page_size_option_cannot_be_set_if_strategy_has_no_limit(): pass -def test_page_size_option_must_be_set_if_strategy_has_limit(): - page_size_request_option = None - page_token_request_option = RequestOption(inject_into=RequestOptionType.request_parameter, field_name="offset", options={}) - cursor_value = "{{ response.next }}" - url_base = "https://airbyte.io" - config = {} - options = {} - strategy = CursorPaginationStrategy(page_size=5, cursor_value=cursor_value, config=config, options=options) - try: - DefaultPaginator( - page_size_option=page_size_request_option, - page_token_option=page_token_request_option, - pagination_strategy=strategy, - config=config, - url_base=url_base, - options={}, - ) - assert False - except ValueError: - pass - - def test_reset(): page_size_request_option = RequestOption(inject_into=RequestOptionType.request_parameter, field_name="limit", options={}) page_token_request_option = RequestOption(inject_into=RequestOptionType.request_parameter, field_name="offset", options={}) url_base = "https://airbyte.io" config = {} strategy = MagicMock() - DefaultPaginator(page_size_request_option, page_token_request_option, strategy, config, url_base, options={}).reset() + DefaultPaginator(strategy, config, url_base, options={}, page_size_option=page_size_request_option, page_token_option=page_token_request_option).reset() assert strategy.reset.called diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/test_interpolated_request_options_provider.py b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/test_interpolated_request_options_provider.py index 457ddc9a22d8b..59cbcd100c7b4 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/test_interpolated_request_options_provider.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/requesters/request_options/test_interpolated_request_options_provider.py @@ -21,12 +21,17 @@ ("test_value_depends_on_stream_slice", {"read_from_slice": "{{ stream_slice['start_date'] }}"}, {"read_from_slice": "2020-01-01"}), ("test_value_depends_on_next_page_token", {"read_from_token": "{{ next_page_token['offset'] }}"}, {"read_from_token": 12345}), ("test_value_depends_on_config", {"read_from_config": "{{ config['option'] }}"}, {"read_from_config": "OPTION"}), - ("test_none_value", {"missing_param": "{{ fake_path['date'] }}"}, {}), + ("test_missing_value", {"missing_param": "{{ fake_path['date'] }}"}, {}), ( "test_parameter_is_interpolated", {"{{ stream_state['date'] }} - {{stream_slice['start_date']}} - {{next_page_token['offset']}} - {{config['option']}}": "ABC"}, {"2021-01-01 - 2020-01-01 - 12345 - OPTION": "ABC"}, ), + ("test_boolean_false_value", {"boolean_false": "{{ False }}"}, {"boolean_false": False}), + ("test_integer_falsy_value", {"integer_falsy": "{{ 0 }}"}, {"integer_falsy": 0}), + ("test_number_falsy_value", {"number_falsy": "{{ 0.0 }}"}, {"number_falsy": 0.0}), + ("test_string_falsy_value", {"string_falsy": "{{ '' }}"}, {}), + ("test_none_value", {"none_value": "{{ None }}"}, {}), ], ) def test_interpolated_request_params(test_name, input_request_params, expected_request_params): @@ -45,12 +50,17 @@ def test_interpolated_request_params(test_name, input_request_params, expected_r ("test_value_depends_on_stream_slice", {"read_from_slice": "{{ stream_slice['start_date'] }}"}, {"read_from_slice": "2020-01-01"}), ("test_value_depends_on_next_page_token", {"read_from_token": "{{ next_page_token['offset'] }}"}, {"read_from_token": 12345}), ("test_value_depends_on_config", {"read_from_config": "{{ config['option'] }}"}, {"read_from_config": "OPTION"}), - ("test_none_value", {"missing_json": "{{ fake_path['date'] }}"}, {}), + ("test_missing_value", {"missing_json": "{{ fake_path['date'] }}"}, {}), ( "test_interpolated_keys", {"{{ stream_state['date'] }}": 123, "{{ config['option'] }}": "ABC"}, {"2021-01-01": 123, "OPTION": "ABC"}, ), + ("test_boolean_false_value", {"boolean_false": "{{ False }}"}, {"boolean_false": False}), + ("test_integer_falsy_value", {"integer_falsy": "{{ 0 }}"}, {"integer_falsy": 0}), + ("test_number_falsy_value", {"number_falsy": "{{ 0.0 }}"}, {"number_falsy": 0.0}), + ("test_string_falsy_value", {"string_falsy": "{{ '' }}"}, {}), + ("test_none_value", {"none_value": "{{ None }}"}, {}), ], ) def test_interpolated_request_json(test_name, input_request_json, expected_request_json): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py b/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py index ba801f8bcdd5b..9f8059cb8e4fe 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/retrievers/test_simple_retriever.py @@ -7,22 +7,27 @@ import airbyte_cdk.sources.declarative.requesters.error_handlers.response_status as response_status import pytest import requests -from airbyte_cdk.models import SyncMode +from airbyte_cdk.models import AirbyteLogMessage, Level, SyncMode from airbyte_cdk.sources.declarative.exceptions import ReadException from airbyte_cdk.sources.declarative.requesters.error_handlers.response_action import ResponseAction from airbyte_cdk.sources.declarative.requesters.error_handlers.response_status import ResponseStatus from airbyte_cdk.sources.declarative.requesters.request_option import RequestOptionType from airbyte_cdk.sources.declarative.requesters.requester import HttpMethod from airbyte_cdk.sources.declarative.retrievers.simple_retriever import SimpleRetriever +from airbyte_cdk.sources.declarative.stream_slicers import DatetimeStreamSlicer from airbyte_cdk.sources.streams.http.auth import NoAuth from airbyte_cdk.sources.streams.http.http import HttpStream primary_key = "pk" records = [{"id": 1}, {"id": 2}] +request_response_logs = [ + AirbyteLogMessage(level=Level.INFO, message="request:{}"), + AirbyteLogMessage(level=Level.INFO, message="response{}"), +] config = {} -@patch.object(HttpStream, "read_records", return_value=[]) +@patch.object(HttpStream, "_read_pages", return_value=[]) def test_simple_retriever_full(mock_http_stream): requester = MagicMock() request_params = {"param": "value"} @@ -45,7 +50,7 @@ def test_simple_retriever_full(mock_http_stream): underlying_state = {"date": "2021-01-01"} iterator.get_stream_state.return_value = underlying_state - requester.get_authenticator.return_value = NoAuth + requester.get_authenticator.return_value = NoAuth() url_base = "https://airbyte.io" requester.get_url_base.return_value = url_base path = "/v1" @@ -77,6 +82,7 @@ def test_simple_retriever_full(mock_http_stream): record_selector=record_selector, stream_slicer=iterator, options={}, + config={}, ) assert retriever.primary_key == primary_key @@ -89,7 +95,7 @@ def test_simple_retriever_full(mock_http_stream): assert retriever._last_response is None assert retriever._last_records is None - assert retriever.parse_response(response, stream_state=None) == records + assert retriever.parse_response(response, stream_state={}) == records assert retriever._last_response == response assert retriever._last_records == records @@ -106,6 +112,67 @@ def test_simple_retriever_full(mock_http_stream): paginator.reset.assert_called() +@patch.object(HttpStream, "_read_pages", return_value=[*request_response_logs, *records]) +def test_simple_retriever_with_request_response_logs(mock_http_stream): + requester = MagicMock() + paginator = MagicMock() + record_selector = MagicMock() + iterator = DatetimeStreamSlicer( + start_datetime="", end_datetime="", step="1d", cursor_field="id", datetime_format="", config={}, options={} + ) + + retriever = SimpleRetriever( + name="stream_name", + primary_key=primary_key, + requester=requester, + paginator=paginator, + record_selector=record_selector, + stream_slicer=iterator, + options={}, + config={}, + ) + + actual_messages = [r for r in retriever.read_records(SyncMode.full_refresh)] + paginator.reset.assert_called() + + assert isinstance(actual_messages[0], AirbyteLogMessage) + assert isinstance(actual_messages[1], AirbyteLogMessage) + assert actual_messages[2] == records[0] + assert actual_messages[3] == records[1] + + +@patch.object(HttpStream, "_read_pages", return_value=[]) +def test_simple_retriever_with_request_response_log_last_records(mock_http_stream): + requester = MagicMock() + paginator = MagicMock() + record_selector = MagicMock() + record_selector.select_records.return_value = request_response_logs + response = requests.Response() + iterator = DatetimeStreamSlicer( + start_datetime="", end_datetime="", step="1d", cursor_field="id", datetime_format="", config={}, options={} + ) + + retriever = SimpleRetriever( + name="stream_name", + primary_key=primary_key, + requester=requester, + paginator=paginator, + record_selector=record_selector, + stream_slicer=iterator, + options={}, + config={}, + ) + + assert retriever._last_response is None + assert retriever._last_records is None + assert retriever.parse_response(response, stream_state={}) == request_response_logs + assert retriever._last_response == response + assert retriever._last_records == request_response_logs + + [r for r in retriever.read_records(SyncMode.full_refresh)] + paginator.reset.assert_called() + + @pytest.mark.parametrize( "test_name, requester_response, expected_should_retry, expected_backoff_time", [ @@ -116,7 +183,9 @@ def test_simple_retriever_full(mock_http_stream): ) def test_should_retry(test_name, requester_response, expected_should_retry, expected_backoff_time): requester = MagicMock(use_cache=False) - retriever = SimpleRetriever(name="stream_name", primary_key=primary_key, requester=requester, record_selector=MagicMock(), options={}) + retriever = SimpleRetriever( + name="stream_name", primary_key=primary_key, requester=requester, record_selector=MagicMock(), options={}, config={} + ) requester.interpret_response_status.return_value = requester_response assert retriever.should_retry(requests.Response()) == expected_should_retry if requester_response.action == ResponseAction.RETRY: @@ -149,7 +218,7 @@ def test_parse_response(test_name, status_code, response_status, len_expected_re record_selector = MagicMock() record_selector.select_records.return_value = [{"id": 100}] retriever = SimpleRetriever( - name="stream_name", primary_key=primary_key, requester=requester, record_selector=record_selector, options={} + name="stream_name", primary_key=primary_key, requester=requester, record_selector=record_selector, options={}, config={} ) response = requests.Response() response.status_code = status_code @@ -180,7 +249,7 @@ def test_backoff_time(test_name, response_action, retry_in, expected_backoff_tim record_selector.select_records.return_value = [{"id": 100}] response = requests.Response() retriever = SimpleRetriever( - name="stream_name", primary_key=primary_key, requester=requester, record_selector=record_selector, options={} + name="stream_name", primary_key=primary_key, requester=requester, record_selector=record_selector, options={}, config={} ) if expected_backoff_time: requester.interpret_response_status.return_value = ResponseStatus(response_action, retry_in) @@ -232,6 +301,7 @@ def test_get_request_options_from_pagination(test_name, paginator_mapping, strea paginator=paginator, stream_slicer=stream_slicer, options={}, + config={}, ) request_option_type_to_method = { @@ -271,7 +341,13 @@ def test_get_request_headers(test_name, paginator_mapping, expected_mapping): record_selector = MagicMock() retriever = SimpleRetriever( - name="stream_name", primary_key=primary_key, requester=requester, record_selector=record_selector, paginator=paginator, options={} + name="stream_name", + primary_key=primary_key, + requester=requester, + record_selector=record_selector, + paginator=paginator, + options={}, + config={}, ) request_option_type_to_method = { @@ -315,6 +391,7 @@ def test_request_body_data(test_name, requester_body_data, paginator_body_data, record_selector=record_selector, paginator=paginator, options={}, + config={}, ) if expected_body_data: @@ -350,6 +427,7 @@ def test_path(test_name, requester_path, paginator_path, expected_path): record_selector=record_selector, paginator=paginator, options={}, + config={}, ) actual_path = retriever.path(stream_state=None, stream_slice=None, next_page_token=None) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_datetime_stream_slicer.py b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_datetime_stream_slicer.py index ea83a06ad4499..1bee16057667b 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_datetime_stream_slicer.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/stream_slicers/test_datetime_stream_slicer.py @@ -70,6 +70,56 @@ def mock_datetime_now(monkeypatch): {"start_time": "2021-01-09T00:00:00.000000+0000", "end_time": "2021-01-10T00:00:00.000000+0000"}, ], ), + ( + "test_1_week", + None, + MinMaxDatetime(datetime="{{ config['start_date'] }}", options={}), + MinMaxDatetime(datetime="2021-02-10T00:00:00.000000+0000", options={}), + "1w", + cursor_field, + None, + datetime_format, + [ + {"start_time": "2021-01-01T00:00:00.000000+0000", "end_time": "2021-01-07T00:00:00.000000+0000"}, + {"start_time": "2021-01-08T00:00:00.000000+0000", "end_time": "2021-01-14T00:00:00.000000+0000"}, + {"start_time": "2021-01-15T00:00:00.000000+0000", "end_time": "2021-01-21T00:00:00.000000+0000"}, + {"start_time": "2021-01-22T00:00:00.000000+0000", "end_time": "2021-01-28T00:00:00.000000+0000"}, + {"start_time": "2021-01-29T00:00:00.000000+0000", "end_time": "2021-02-04T00:00:00.000000+0000"}, + {"start_time": "2021-02-05T00:00:00.000000+0000", "end_time": "2021-02-10T00:00:00.000000+0000"}, + ], + ), + ( + "test_1_month", + None, + MinMaxDatetime(datetime="{{ config['start_date'] }}", options={}), + MinMaxDatetime(datetime="2021-06-10T00:00:00.000000+0000", options={}), + "1m", + cursor_field, + None, + datetime_format, + [ + {"start_time": "2021-01-01T00:00:00.000000+0000", "end_time": "2021-01-31T00:00:00.000000+0000"}, + {"start_time": "2021-02-01T00:00:00.000000+0000", "end_time": "2021-02-28T00:00:00.000000+0000"}, + {"start_time": "2021-03-01T00:00:00.000000+0000", "end_time": "2021-03-31T00:00:00.000000+0000"}, + {"start_time": "2021-04-01T00:00:00.000000+0000", "end_time": "2021-04-30T00:00:00.000000+0000"}, + {"start_time": "2021-05-01T00:00:00.000000+0000", "end_time": "2021-05-31T00:00:00.000000+0000"}, + {"start_time": "2021-06-01T00:00:00.000000+0000", "end_time": "2021-06-10T00:00:00.000000+0000"}, + ], + ), + ( + "test_1_year", + None, + MinMaxDatetime(datetime="{{ config['start_date'] }}", options={}), + MinMaxDatetime(datetime="2022-06-10T00:00:00.000000+0000", options={}), + "1y", + cursor_field, + None, + datetime_format, + [ + {"start_time": "2021-01-01T00:00:00.000000+0000", "end_time": "2021-12-31T00:00:00.000000+0000"}, + {"start_time": "2022-01-01T00:00:00.000000+0000", "end_time": "2022-01-01T00:00:00.000000+0000"}, + ], + ), ( "test_from_stream_state", {"date": "2021-01-05T00:00:00.000000+0000"}, diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py index 98f5a4563504a..7ff319b9ec74e 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_declarative_stream.py @@ -5,12 +5,10 @@ from unittest import mock from unittest.mock import MagicMock, call -from airbyte_cdk.models import SyncMode +from airbyte_cdk.models import AirbyteLogMessage, AirbyteTraceMessage, Level, SyncMode, TraceType from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream from airbyte_cdk.sources.declarative.transformations import RecordTransformation -from .schema.source_test import SourceTest # noqa #pylint: disable=unused-import - def test_declarative_stream(): name = "stream" @@ -22,8 +20,17 @@ def test_declarative_stream(): schema_loader.get_json_schema.return_value = json_schema state = MagicMock() - records = [{"pk": 1234, "field": "value"}, {"pk": 4567, "field": "different_value"}] - stream_slices = [{"date": "2021-01-01"}, {"date": "2021-01-02"}, {"date": "2021-01-03"}] + records = [ + {"pk": 1234, "field": "value"}, + {"pk": 4567, "field": "different_value"}, + AirbyteLogMessage(level=Level.INFO, message="This is a log message"), + AirbyteTraceMessage(type=TraceType.ERROR, emitted_at=12345), + ] + stream_slices = [ + {"date": "2021-01-01"}, + {"date": "2021-01-02"}, + {"date": "2021-01-03"}, + ] checkpoint_interval = 1000 retriever = MagicMock() @@ -60,5 +67,7 @@ def test_declarative_stream(): assert stream.state_checkpoint_interval == checkpoint_interval for transformation in transformations: assert len(transformation.transform.call_args_list) == len(records) - expected_calls = [call(record, config=config, stream_slice=input_slice, stream_state=state) for record in records] + expected_calls = [ + call(record, config=config, stream_slice=input_slice, stream_state=state) for record in records if isinstance(record, dict) + ] transformation.transform.assert_has_calls(expected_calls, any_order=False) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_factory.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_factory.py index 8b496c5c0ffdf..bb47c97097fc5 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_factory.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_factory.py @@ -15,7 +15,7 @@ from airbyte_cdk.sources.declarative.extractors.record_selector import RecordSelector from airbyte_cdk.sources.declarative.interpolation import InterpolatedString from airbyte_cdk.sources.declarative.parsers.factory import DeclarativeComponentFactory -from airbyte_cdk.sources.declarative.parsers.yaml_parser import YamlParser +from airbyte_cdk.sources.declarative.parsers.manifest_reference_resolver import ManifestReferenceResolver from airbyte_cdk.sources.declarative.requesters.error_handlers import BackoffStrategy from airbyte_cdk.sources.declarative.requesters.error_handlers.backoff_strategies import ( ConstantBackoffStrategy, @@ -40,11 +40,13 @@ from airbyte_cdk.sources.declarative.stream_slicers.list_stream_slicer import ListStreamSlicer from airbyte_cdk.sources.declarative.transformations import AddFields, RemoveFields from airbyte_cdk.sources.declarative.transformations.add_fields import AddedFieldDefinition +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource +from dateutil.relativedelta import relativedelta from jsonschema import ValidationError factory = DeclarativeComponentFactory() -parser = YamlParser() +resolver = ManifestReferenceResolver() input_config = {"apikey": "verysecrettoken", "repos": ["airbyte", "airbyte-cloud"]} @@ -63,7 +65,7 @@ def test_factory(): request_body_json: body_offset: "{{ next_page_token['offset'] }}" """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["request_options"], input_config, False) @@ -88,7 +90,7 @@ def test_interpolate_config(): body_field: "yoyoyo" interpolated_body_field: "{{ config['apikey'] }}" """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["authenticator"], input_config, False) @@ -110,7 +112,7 @@ def test_list_based_stream_slicer_with_values_refd(): slice_values: "*ref(repositories)" cursor_field: repository """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["stream_slicer"], input_config, False) @@ -128,7 +130,7 @@ def test_list_based_stream_slicer_with_values_defined_in_config(): inject_into: header field_name: repository """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["stream_slicer"], input_config, False) @@ -180,7 +182,7 @@ def test_create_substream_slicer(): parent_key: someid stream_slice_field: word_id """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") stream_slicer = factory.create_component(config["stream_slicer"], input_config)() parent_stream_configs = stream_slicer.parent_stream_configs @@ -215,7 +217,7 @@ def test_create_cartesian_stream_slicer(): - "*ref(stream_slicer_A)" - "*ref(stream_slicer_B)" """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["stream_slicer"], input_config, False) @@ -247,7 +249,7 @@ def test_datetime_stream_slicer(): field_name: created[gte] """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["stream_slicer"], input_config, False) @@ -261,7 +263,7 @@ def test_datetime_stream_slicer(): assert stream_slicer.start_datetime.datetime.string == "{{ config['start_time'] }}" assert stream_slicer.start_datetime.min_datetime.string == "{{ config['start_time'] + day_delta(2) }}" assert stream_slicer.end_datetime.datetime.string == "{{ config['end_time'] }}" - assert stream_slicer._step == datetime.timedelta(days=10) + assert stream_slicer._step == relativedelta(days=10) assert stream_slicer.cursor_field.string == "created" assert stream_slicer.lookback_window.string == "5d" assert stream_slicer.start_time_option.inject_into == RequestOptionType.request_parameter @@ -344,8 +346,24 @@ def test_full_config(): check: class_name: airbyte_cdk.sources.declarative.checks.check_stream.CheckStream stream_names: ["list_stream"] +spec: + class_name: airbyte_cdk.sources.declarative.spec.Spec + documentation_url: https://airbyte.com/#yaml-from-manifest + connection_specification: + title: Test Spec + type: object + required: + - api_key + additionalProperties: false + properties: + api_key: + type: string + airbyte_secret: true + title: API Key + description: Test API Key + order: 0 """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["list_stream"], input_config, False) @@ -376,6 +394,20 @@ def test_full_config(): assert len(streams_to_check) == 1 assert list(streams_to_check)[0] == "list_stream" + spec = factory.create_component(config["spec"], input_config)() + documentation_url = spec.documentation_url + connection_specification = spec.connection_specification + assert documentation_url == "https://airbyte.com/#yaml-from-manifest" + assert connection_specification["title"] == "Test Spec" + assert connection_specification["required"] == ["api_key"] + assert connection_specification["properties"]["api_key"] == { + "type": "string", + "airbyte_secret": True, + "title": "API Key", + "description": "Test API Key", + "order": 0, + } + assert stream.retriever.requester.path.default == "marketing/lists" @@ -398,7 +430,7 @@ def test_create_record_selector(test_name, record_selector, expected_runtime_sel $ref: "*ref(extractor)" field_pointer: ["{record_selector}"] """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["selector"], input_config, False) @@ -445,7 +477,7 @@ def test_create_record_selector(test_name, record_selector, expected_runtime_sel ], ) def test_options_propagation(test_name, content, expected_field_pointer_value): - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") selector = factory.create_component(config["selector"], input_config, True)() assert selector.extractor.field_pointer[0].eval(input_config) == expected_field_pointer_value @@ -515,7 +547,7 @@ def test_create_requester(test_name, error_handler): header: header_value {error_handler} """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["requester"], input_config, False) @@ -545,7 +577,7 @@ def test_create_composite_error_handler(): - http_codes: [ 403 ] action: RETRY """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["error_handler"], input_config, False) @@ -594,7 +626,7 @@ def test_config_with_defaults(): streams: - "*ref(lists_stream)" """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["lists_stream"], input_config, False) @@ -631,7 +663,7 @@ def test_create_default_paginator(): page_size: 50 cursor_value: "{{ response._metadata.next }}" """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["paginator"], input_config, False) @@ -670,7 +702,7 @@ def test_no_transformations(self): $options: {self.base_options} """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["the_stream"], input_config, False) @@ -690,7 +722,7 @@ def test_remove_fields(self): - ["path", "to", "field1"] - ["path2"] """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["the_stream"], input_config, False) @@ -711,7 +743,7 @@ def test_add_fields(self): - path: ["field1"] value: "static_value" """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["the_stream"], input_config, False) @@ -743,7 +775,7 @@ def test_validation_wrong_input_type(): $ref: "*ref(extractor)" field_pointer: 408 """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") with pytest.raises(ValidationError): factory.create_component(config["selector"], input_config, False) @@ -766,7 +798,7 @@ def test_validation_type_missing_required_fields(): field_name: created[gte] """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") with pytest.raises(ValidationError): factory.create_component(config["stream_slicer"], input_config, False) @@ -786,7 +818,7 @@ def test_validation_wrong_interface_type(): type: "MinMaxDatetime" datetime: "{{ response._metadata.next }}" """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") with pytest.raises(ValidationError): factory.create_component(config["paginator"], input_config, False) @@ -802,7 +834,7 @@ def test_validation_create_composite_error_handler(): - response_filters: - http_codes: [ 403 ] """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") with pytest.raises(ValidationError): factory.create_component(config["error_handler"], input_config, False) @@ -827,7 +859,7 @@ def test_validation_wrong_object_type(): type: "MinMaxDatetime" datetime: "{{ response._metadata.next }}" """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["paginator"], input_config, False) @@ -841,7 +873,7 @@ def test_validate_types_nested_in_list(): - type: DpathExtractor field_pointer: ["result"] """ - config = parser.parse(content) + config = resolver.preprocess_manifest(YamlDeclarativeSource._parse(content), {}, "") factory.create_component(config["error_handler"], input_config, False) diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py new file mode 100644 index 0000000000000..c7abafc4a9b36 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_manifest_declarative_source.py @@ -0,0 +1,601 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json +import logging +import os +import sys + +import pytest +import yaml +from airbyte_cdk.sources.declarative.exceptions import InvalidConnectorDefinitionException +from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource +from jsonschema.exceptions import ValidationError + +logger = logging.getLogger("airbyte") + +EXTERNAL_CONNECTION_SPECIFICATION = { + "type": "object", + "required": ["api_token"], + "additionalProperties": False, + "properties": {"api_token": {"type": "string"}}, +} + + +class MockManifestDeclarativeSource(ManifestDeclarativeSource): + """ + Mock test class that is needed to monkey patch how we read from various files that make up a declarative source because of how our + tests write configuration files during testing. It is also used to properly namespace where files get written in specific + cases like when we temporarily write files like spec.yaml to the package unit_tests, which is the directory where it will + be read in during the tests. + """ + + +class TestYamlDeclarativeSource: + @pytest.fixture + def use_external_yaml_spec(self): + # Our way of resolving the absolute path to root of the airbyte-cdk unit test directory where spec.yaml files should + # be written to (i.e. ~/airbyte/airbyte-cdk/python/unit-tests) because that is where they are read from during testing. + module = sys.modules[__name__] + module_path = os.path.abspath(module.__file__) + test_path = os.path.dirname(module_path) + spec_root = test_path.split("/sources/declarative")[0] + + spec = {"documentationUrl": "https://airbyte.com/#yaml-from-external", "connectionSpecification": EXTERNAL_CONNECTION_SPECIFICATION} + + yaml_path = os.path.join(spec_root, "spec.yaml") + with open(yaml_path, "w") as f: + f.write(yaml.dump(spec)) + yield + os.remove(yaml_path) + + def test_valid_manifest(self): + manifest = { + "version": "version", + "definitions": { + "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + }, + "streams": [ + { + "type": "DeclarativeStream", + "$options": {"name": "lists", "primary_key": "id", "url_base": "https://api.sendgrid.com"}, + "schema_loader": { + "name": "{{ options.stream_name }}", + "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", + }, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + } + ], + "check": {"type": "CheckStream", "stream_names": ["lists"]}, + } + ManifestDeclarativeSource(source_config=manifest) + + def test_manifest_with_spec(self): + manifest = { + "version": "version", + "definitions": { + "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + }, + "streams": [ + { + "type": "DeclarativeStream", + "$options": {"name": "lists", "primary_key": "id", "url_base": "https://api.sendgrid.com"}, + "schema_loader": { + "name": "{{ options.stream_name }}", + "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", + }, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + } + ], + "check": {"type": "CheckStream", "stream_names": ["lists"]}, + "spec": { + "type": "Spec", + "documentation_url": "https://airbyte.com/#yaml-from-manifest", + "connection_specification": { + "title": "Test Spec", + "type": "object", + "required": ["api_key"], + "additionalProperties": False, + "properties": { + "api_key": {"type": "string", "airbyte_secret": True, "title": "API Key", "description": "Test API Key", "order": 0} + }, + }, + }, + } + source = ManifestDeclarativeSource(source_config=manifest) + connector_specification = source.spec(logger) + assert connector_specification is not None + assert connector_specification.documentationUrl == "https://airbyte.com/#yaml-from-manifest" + assert connector_specification.connectionSpecification["title"] == "Test Spec" + assert connector_specification.connectionSpecification["required"][0] == "api_key" + assert connector_specification.connectionSpecification["additionalProperties"] is False + assert connector_specification.connectionSpecification["properties"]["api_key"] == { + "type": "string", + "airbyte_secret": True, + "title": "API Key", + "description": "Test API Key", + "order": 0, + } + + def test_manifest_with_external_spec(self, use_external_yaml_spec): + manifest = { + "version": "version", + "definitions": { + "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + }, + "streams": [ + { + "type": "DeclarativeStream", + "$options": {"name": "lists", "primary_key": "id", "url_base": "https://api.sendgrid.com"}, + "schema_loader": { + "name": "{{ options.stream_name }}", + "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", + }, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + } + ], + "check": {"type": "CheckStream", "stream_names": ["lists"]}, + } + source = MockManifestDeclarativeSource(source_config=manifest) + + connector_specification = source.spec(logger) + + assert connector_specification.documentationUrl == "https://airbyte.com/#yaml-from-external" + assert connector_specification.connectionSpecification == EXTERNAL_CONNECTION_SPECIFICATION + + def test_source_is_not_created_if_toplevel_fields_are_unknown(self): + manifest = { + "version": "version", + "definitions": { + "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + }, + "streams": [ + { + "type": "DeclarativeStream", + "$options": {"name": "lists", "primary_key": "id", "url_base": "https://api.sendgrid.com"}, + "schema_loader": { + "name": "{{ options.stream_name }}", + "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", + }, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + } + ], + "check": {"type": "CheckStream", "stream_names": ["lists"]}, + "not_a_valid_field": "error", + } + with pytest.raises(InvalidConnectorDefinitionException): + ManifestDeclarativeSource(manifest) + + def test_source_missing_checker_fails_validation(self): + manifest = { + "version": "version", + "definitions": { + "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + }, + "streams": [ + { + "type": "DeclarativeStream", + "$options": {"name": "lists", "primary_key": "id", "url_base": "https://api.sendgrid.com"}, + "schema_loader": { + "name": "{{ options.stream_name }}", + "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", + }, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + } + ], + "check": {"type": "CheckStream"}, + } + with pytest.raises(ValidationError): + ManifestDeclarativeSource(source_config=manifest) + + def test_source_with_missing_streams_fails(self): + manifest = {"version": "version", "definitions": None, "check": {"type": "CheckStream", "stream_names": ["lists"]}} + with pytest.raises(ValidationError): + ManifestDeclarativeSource(manifest) + + def test_source_with_missing_version_fails(self): + manifest = { + "definitions": { + "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + }, + "streams": [ + { + "type": "DeclarativeStream", + "$options": {"name": "lists", "primary_key": "id", "url_base": "https://api.sendgrid.com"}, + "schema_loader": { + "name": "{{ options.stream_name }}", + "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", + }, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + } + ], + "check": {"type": "CheckStream", "stream_names": ["lists"]}, + } + with pytest.raises(ValidationError): + ManifestDeclarativeSource(manifest) + + def test_source_with_invalid_stream_config_fails_validation(self): + manifest = { + "version": "version", + "definitions": { + "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"} + }, + "streams": [ + { + "type": "DeclarativeStream", + "$options": {"name": "lists", "primary_key": "id", "url_base": "https://api.sendgrid.com"}, + "schema_loader": { + "name": "{{ options.stream_name }}", + "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", + }, + } + ], + "check": {"type": "CheckStream", "stream_names": ["lists"]}, + } + with pytest.raises(ValidationError): + ManifestDeclarativeSource(manifest) + + def test_source_with_no_external_spec_and_no_in_yaml_spec_fails(self): + manifest = { + "version": "version", + "definitions": { + "schema_loader": {"name": "{{ options.stream_name }}", "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml"}, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + }, + "streams": [ + { + "type": "DeclarativeStream", + "$options": {"name": "lists", "primary_key": "id", "url_base": "https://api.sendgrid.com"}, + "schema_loader": { + "name": "{{ options.stream_name }}", + "file_path": "./source_sendgrid/schemas/{{ options.name }}.yaml", + }, + "retriever": { + "paginator": { + "type": "DefaultPaginator", + "page_size": 10, + "page_size_option": {"inject_into": "request_parameter", "field_name": "page_size"}, + "page_token_option": {"inject_into": "path"}, + "pagination_strategy": {"type": "CursorPagination", "cursor_value": "{{ response._metadata.next }}"}, + }, + "requester": { + "path": "/v3/marketing/lists", + "authenticator": {"type": "BearerAuthenticator", "api_token": "{{ config.apikey }}"}, + "request_parameters": {"page_size": 10}, + }, + "record_selector": {"extractor": {"field_pointer": ["result"]}}, + }, + } + ], + "check": {"type": "CheckStream", "stream_names": ["lists"]}, + } + source = ManifestDeclarativeSource(source_config=manifest) + + # We expect to fail here because we have not created a temporary spec.yaml file + with pytest.raises(FileNotFoundError): + source.spec(logger) + + +def test_generate_schema(): + schema_str = ManifestDeclarativeSource.generate_schema() + schema = json.loads(schema_str) + + assert "version" in schema["required"] + assert "checker" in schema["required"] + assert "streams" in schema["required"] + assert schema["properties"]["checker"]["$ref"] == "#/definitions/CheckStream" + assert schema["properties"]["streams"]["items"]["$ref"] == "#/definitions/DeclarativeStream" + + check_stream = schema["definitions"]["CheckStream"] + assert {"stream_names"}.issubset(check_stream["required"]) + assert check_stream["properties"]["stream_names"]["type"] == "array" + assert check_stream["properties"]["stream_names"]["items"]["type"] == "string" + + declarative_stream = schema["definitions"]["DeclarativeStream"] + assert {"retriever", "config"}.issubset(declarative_stream["required"]) + assert {"$ref": "#/definitions/DefaultSchemaLoader"} in declarative_stream["properties"]["schema_loader"]["anyOf"] + assert {"$ref": "#/definitions/JsonFileSchemaLoader"} in declarative_stream["properties"]["schema_loader"]["anyOf"] + assert declarative_stream["properties"]["retriever"]["$ref"] == "#/definitions/SimpleRetriever" + assert declarative_stream["properties"]["name"]["type"] == "string" + assert {"type": "array", "items": {"type": "string"}} in declarative_stream["properties"]["primary_key"]["anyOf"] + assert {"type": "array", "items": {"type": "array", "items": {"type": "string"}}} in declarative_stream["properties"]["primary_key"][ + "anyOf" + ] + assert {"type": "string"} in declarative_stream["properties"]["primary_key"]["anyOf"] + assert {"type": "array", "items": {"type": "string"}} in declarative_stream["properties"]["stream_cursor_field"]["anyOf"] + assert {"type": "string"} in declarative_stream["properties"]["stream_cursor_field"]["anyOf"] + assert declarative_stream["properties"]["transformations"]["type"] == "array" + assert {"$ref": "#/definitions/AddFields"} in declarative_stream["properties"]["transformations"]["items"]["anyOf"] + assert {"$ref": "#/definitions/RemoveFields"} in declarative_stream["properties"]["transformations"]["items"]["anyOf"] + assert declarative_stream["properties"]["checkpoint_interval"]["type"] == "integer" + + simple_retriever = schema["definitions"]["SimpleRetriever"]["allOf"][1] + assert {"requester", "record_selector"}.issubset(simple_retriever["required"]) + assert simple_retriever["properties"]["requester"]["$ref"] == "#/definitions/HttpRequester" + assert simple_retriever["properties"]["record_selector"]["$ref"] == "#/definitions/RecordSelector" + assert simple_retriever["properties"]["name"]["type"] == "string" + assert {"type": "array", "items": {"type": "string"}} in declarative_stream["properties"]["primary_key"]["anyOf"] + assert {"type": "array", "items": {"type": "array", "items": {"type": "string"}}} in declarative_stream["properties"]["primary_key"][ + "anyOf" + ] + assert {"type": "string"} in declarative_stream["properties"]["primary_key"]["anyOf"] + assert {"$ref": "#/definitions/DefaultPaginator"} in simple_retriever["properties"]["paginator"]["anyOf"] + assert {"$ref": "#/definitions/NoPagination"} in simple_retriever["properties"]["paginator"]["anyOf"] + assert {"$ref": "#/definitions/CartesianProductStreamSlicer"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] + assert {"$ref": "#/definitions/DatetimeStreamSlicer"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] + assert {"$ref": "#/definitions/ListStreamSlicer"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] + assert {"$ref": "#/definitions/SingleSlice"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] + assert {"$ref": "#/definitions/SubstreamSlicer"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] + + http_requester = schema["definitions"]["HttpRequester"]["allOf"][1] + assert {"name", "url_base", "path", "config"}.issubset(http_requester["required"]) + assert http_requester["properties"]["name"]["type"] == "string" + assert {"$ref": "#/definitions/InterpolatedString"} in http_requester["properties"]["url_base"]["anyOf"] + assert {"type": "string"} in http_requester["properties"]["path"]["anyOf"] + assert {"$ref": "#/definitions/InterpolatedString"} in http_requester["properties"]["url_base"]["anyOf"] + assert {"type": "string"} in http_requester["properties"]["path"]["anyOf"] + assert {"type": "string"} in http_requester["properties"]["http_method"]["anyOf"] + assert {"type": "string", "enum": ["GET", "POST"]} in http_requester["properties"]["http_method"]["anyOf"] + assert http_requester["properties"]["request_options_provider"]["$ref"] == "#/definitions/InterpolatedRequestOptionsProvider" + assert {"$ref": "#/definitions/DeclarativeOauth2Authenticator"} in http_requester["properties"]["authenticator"]["anyOf"] + assert {"$ref": "#/definitions/ApiKeyAuthenticator"} in http_requester["properties"]["authenticator"]["anyOf"] + assert {"$ref": "#/definitions/BearerAuthenticator"} in http_requester["properties"]["authenticator"]["anyOf"] + assert {"$ref": "#/definitions/BasicHttpAuthenticator"} in http_requester["properties"]["authenticator"]["anyOf"] + assert {"$ref": "#/definitions/CompositeErrorHandler"} in http_requester["properties"]["error_handler"]["anyOf"] + assert {"$ref": "#/definitions/DefaultErrorHandler"} in http_requester["properties"]["error_handler"]["anyOf"] + + api_key_authenticator = schema["definitions"]["ApiKeyAuthenticator"]["allOf"][1] + assert {"header", "api_token", "config"}.issubset(api_key_authenticator["required"]) + assert {"$ref": "#/definitions/InterpolatedString"} in api_key_authenticator["properties"]["header"]["anyOf"] + assert {"type": "string"} in api_key_authenticator["properties"]["header"]["anyOf"] + assert {"$ref": "#/definitions/InterpolatedString"} in api_key_authenticator["properties"]["api_token"]["anyOf"] + assert {"type": "string"} in api_key_authenticator["properties"]["api_token"]["anyOf"] + + default_error_handler = schema["definitions"]["DefaultErrorHandler"]["allOf"][1] + assert default_error_handler["properties"]["response_filters"]["type"] == "array" + assert default_error_handler["properties"]["response_filters"]["items"]["$ref"] == "#/definitions/HttpResponseFilter" + assert default_error_handler["properties"]["max_retries"]["type"] == "integer" + assert default_error_handler["properties"]["backoff_strategies"]["type"] == "array" + + default_paginator = schema["definitions"]["DefaultPaginator"]["allOf"][1] + assert {"pagination_strategy", "config", "url_base"}.issubset(default_paginator["required"]) + assert default_paginator["properties"]["page_size_option"]["$ref"] == "#/definitions/RequestOption" + assert default_paginator["properties"]["page_token_option"]["$ref"] == "#/definitions/RequestOption" + assert {"$ref": "#/definitions/CursorPaginationStrategy"} in default_paginator["properties"]["pagination_strategy"]["anyOf"] + assert {"$ref": "#/definitions/OffsetIncrement"} in default_paginator["properties"]["pagination_strategy"]["anyOf"] + assert {"$ref": "#/definitions/PageIncrement"} in default_paginator["properties"]["pagination_strategy"]["anyOf"] + assert default_paginator["properties"]["decoder"]["$ref"] == "#/definitions/JsonDecoder" + assert {"$ref": "#/definitions/InterpolatedString"} in http_requester["properties"]["url_base"]["anyOf"] + assert {"type": "string"} in http_requester["properties"]["path"]["anyOf"] + + cursor_pagination_strategy = schema["definitions"]["CursorPaginationStrategy"]["allOf"][1] + assert {"cursor_value", "config"}.issubset(cursor_pagination_strategy["required"]) + assert {"$ref": "#/definitions/InterpolatedString"} in cursor_pagination_strategy["properties"]["cursor_value"]["anyOf"] + assert {"type": "string"} in cursor_pagination_strategy["properties"]["cursor_value"]["anyOf"] + assert {"$ref": "#/definitions/InterpolatedBoolean"} in cursor_pagination_strategy["properties"]["stop_condition"]["anyOf"] + assert {"type": "string"} in cursor_pagination_strategy["properties"]["stop_condition"]["anyOf"] + assert cursor_pagination_strategy["properties"]["decoder"]["$ref"] == "#/definitions/JsonDecoder" + + list_stream_slicer = schema["definitions"]["ListStreamSlicer"]["allOf"][1] + assert {"slice_values", "cursor_field", "config"}.issubset(list_stream_slicer["required"]) + assert {"type": "array", "items": {"type": "string"}} in list_stream_slicer["properties"]["slice_values"]["anyOf"] + assert {"type": "string"} in list_stream_slicer["properties"]["slice_values"]["anyOf"] + assert {"$ref": "#/definitions/InterpolatedString"} in list_stream_slicer["properties"]["cursor_field"]["anyOf"] + assert {"type": "string"} in list_stream_slicer["properties"]["cursor_field"]["anyOf"] + assert list_stream_slicer["properties"]["request_option"]["$ref"] == "#/definitions/RequestOption" + + added_field_definition = schema["definitions"]["AddedFieldDefinition"] + assert {"path", "value"}.issubset(added_field_definition["required"]) + assert added_field_definition["properties"]["path"]["type"] == "array" + assert added_field_definition["properties"]["path"]["items"]["type"] == "string" + assert {"$ref": "#/definitions/InterpolatedString"} in added_field_definition["properties"]["value"]["anyOf"] + assert {"type": "string"} in added_field_definition["properties"]["value"]["anyOf"] + + # There is something very strange about JsonSchemaMixin.json_schema(). For some reason, when this test is called independently + # it will pass. However, when it is invoked with the entire test file, certain components won't get generated in the schema. Since + # the generate_schema() method is invoked by independently so this doesn't happen under normal circumstance when we generate the + # complete schema. It only happens when the tests are all called together. + # One way to replicate this is to add DefaultErrorHandler.json_schema() to the start of this test and uncomment the assertions below + + # assert {"$ref": "#/definitions/ConstantBackoffStrategy"} in default_error_handler["properties"]["backoff_strategies"]["items"]["anyOf"] + # assert {"$ref": "#/definitions/ExponentialBackoffStrategy"} in default_error_handler["properties"]["backoff_strategies"]["items"][ + # "anyOf" + # ] + # assert {"$ref": "#/definitions/WaitTimeFromHeaderBackoffStrategy"} in default_error_handler["properties"]["backoff_strategies"][ + # "items" + # ]["anyOf"] + # assert {"$ref": "#/definitions/WaitUntilTimeFromHeaderBackoffStrategy"} in default_error_handler["properties"]["backoff_strategies"][ + # "items" + # ]["anyOf"] + # + # exponential_backoff_strategy = schema["definitions"]["ExponentialBackoffStrategy"]["allOf"][1] + # assert exponential_backoff_strategy["properties"]["factor"]["type"] == "number" diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/test_yaml_declarative_source.py b/airbyte-cdk/python/unit_tests/sources/declarative/test_yaml_declarative_source.py index 340d765a52ffb..ba6af18094f4d 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/test_yaml_declarative_source.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/test_yaml_declarative_source.py @@ -2,386 +2,147 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # -import json +import logging +import os +import tempfile +import pytest +from airbyte_cdk.sources.declarative.parsers.undefined_reference_exception import UndefinedReferenceException from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource - -# import pytest -# from airbyte_cdk.sources.declarative.exceptions import InvalidConnectorDefinitionException - -# import os -# import tempfile -# import unittest - - -# from jsonschema import ValidationError - - -# brianjlai: Commenting these out for the moment because I can't figure out why the temp file is unreadable at runtime during testing -# its more urgent to fix the connectors -# class TestYamlDeclarativeSource(unittest.TestCase): -# def test_source_is_created_if_toplevel_fields_are_known(self): -# content = """ -# version: "version" -# definitions: -# schema_loader: -# name: "{{ options.stream_name }}" -# file_path: "./source_sendgrid/schemas/{{ options.name }}.yaml" -# retriever: -# paginator: -# type: "DefaultPaginator" -# page_size: 10 -# page_size_option: -# inject_into: request_parameter -# field_name: page_size -# page_token_option: -# inject_into: path -# pagination_strategy: -# type: "CursorPagination" -# cursor_value: "{{ response._metadata.next }}" -# requester: -# path: "/v3/marketing/lists" -# authenticator: -# type: "BearerAuthenticator" -# api_token: "{{ config.apikey }}" -# request_parameters: -# page_size: 10 -# record_selector: -# extractor: -# field_pointer: ["result"] -# streams: -# - type: DeclarativeStream -# $options: -# name: "lists" -# primary_key: id -# url_base: "https://api.sendgrid.com" -# schema_loader: "*ref(definitions.schema_loader)" -# retriever: "*ref(definitions.retriever)" -# check: -# type: CheckStream -# stream_names: ["lists"] -# """ -# temporary_file = TestFileContent(content) -# YamlDeclarativeSource(temporary_file.filename) -# -# def test_source_is_not_created_if_toplevel_fields_are_unknown(self): -# content = """ -# version: "version" -# definitions: -# schema_loader: -# name: "{{ options.stream_name }}" -# file_path: "./source_sendgrid/schemas/{{ options.name }}.yaml" -# retriever: -# paginator: -# type: "DefaultPaginator" -# page_size: 10 -# page_size_option: -# inject_into: request_parameter -# field_name: page_size -# page_token_option: -# inject_into: path -# pagination_strategy: -# type: "CursorPagination" -# cursor_value: "{{ response._metadata.next }}" -# requester: -# path: "/v3/marketing/lists" -# authenticator: -# type: "BearerAuthenticator" -# api_token: "{{ config.apikey }}" -# request_parameters: -# page_size: 10 -# record_selector: -# extractor: -# field_pointer: ["result"] -# streams: -# - type: DeclarativeStream -# $options: -# name: "lists" -# primary_key: id -# url_base: "https://api.sendgrid.com" -# schema_loader: "*ref(definitions.schema_loader)" -# retriever: "*ref(definitions.retriever)" -# check: -# type: CheckStream -# stream_names: ["lists"] -# not_a_valid_field: "error" -# """ -# temporary_file = TestFileContent(content) -# with self.assertRaises(InvalidConnectorDefinitionException): -# YamlDeclarativeSource(temporary_file.filename) -# -# def test_source_missing_checker_fails_validation(self): -# content = """ -# version: "version" -# definitions: -# schema_loader: -# name: "{{ options.stream_name }}" -# file_path: "./source_sendgrid/schemas/{{ options.name }}.yaml" -# retriever: -# paginator: -# type: "DefaultPaginator" -# page_size: 10 -# page_size_option: -# inject_into: request_parameter -# field_name: page_size -# page_token_option: -# inject_into: path -# pagination_strategy: -# type: "CursorPagination" -# cursor_value: "{{ response._metadata.next }}" -# requester: -# path: "/v3/marketing/lists" -# authenticator: -# type: "BearerAuthenticator" -# api_token: "{{ config.apikey }}" -# request_parameters: -# page_size: 10 -# record_selector: -# extractor: -# field_pointer: ["result"] -# streams: -# - type: DeclarativeStream -# $options: -# name: "lists" -# primary_key: id -# url_base: "https://api.sendgrid.com" -# schema_loader: "*ref(definitions.schema_loader)" -# retriever: "*ref(definitions.retriever)" -# check: -# type: CheckStream -# """ -# temporary_file = TestFileContent(content) -# with pytest.raises(ValidationError): -# YamlDeclarativeSource(temporary_file.filename) -# -# def test_source_with_missing_streams_fails(self): -# content = """ -# version: "version" -# definitions: -# check: -# type: CheckStream -# stream_names: ["lists"] -# """ -# temporary_file = TestFileContent(content) -# with pytest.raises(ValidationError): -# YamlDeclarativeSource(temporary_file.filename) -# -# def test_source_with_missing_version_fails(self): -# content = """ -# definitions: -# schema_loader: -# name: "{{ options.stream_name }}" -# file_path: "./source_sendgrid/schemas/{{ options.name }}.yaml" -# retriever: -# paginator: -# type: "DefaultPaginator" -# page_size: 10 -# page_size_option: -# inject_into: request_parameter -# field_name: page_size -# page_token_option: -# inject_into: path -# pagination_strategy: -# type: "CursorPagination" -# cursor_value: "{{ response._metadata.next }}" -# requester: -# path: "/v3/marketing/lists" -# authenticator: -# type: "BearerAuthenticator" -# api_token: "{{ config.apikey }}" -# request_parameters: -# page_size: 10 -# record_selector: -# extractor: -# field_pointer: ["result"] -# streams: -# - type: DeclarativeStream -# $options: -# name: "lists" -# primary_key: id -# url_base: "https://api.sendgrid.com" -# schema_loader: "*ref(definitions.schema_loader)" -# retriever: "*ref(definitions.retriever)" -# check: -# type: CheckStream -# stream_names: ["lists"] -# """ -# temporary_file = TestFileContent(content) -# with pytest.raises(ValidationError): -# YamlDeclarativeSource(temporary_file.filename) -# -# def test_source_with_invalid_stream_config_fails_validation(self): -# content = """ -# version: "version" -# definitions: -# schema_loader: -# name: "{{ options.stream_name }}" -# file_path: "./source_sendgrid/schemas/{{ options.name }}.yaml" -# streams: -# - type: DeclarativeStream -# $options: -# name: "lists" -# primary_key: id -# url_base: "https://api.sendgrid.com" -# schema_loader: "*ref(definitions.schema_loader)" -# check: -# type: CheckStream -# stream_names: ["lists"] -# """ -# temporary_file = TestFileContent(content) -# with pytest.raises(ValidationError): -# YamlDeclarativeSource(temporary_file.filename) -# -# -# class TestFileContent: -# def __init__(self, content): -# self.file = tempfile.NamedTemporaryFile(mode="w", delete=False) -# -# with self.file as f: -# f.write(content) -# -# @property -# def filename(self): -# return self.file.name -# -# def __enter__(self): -# return self -# -# def __exit__(self, type, value, traceback): -# os.unlink(self.filename) - - -def test_generate_schema(): - schema_str = YamlDeclarativeSource.generate_schema() - schema = json.loads(schema_str) - - assert "version" in schema["required"] - assert "checker" in schema["required"] - assert "streams" in schema["required"] - assert schema["properties"]["checker"]["$ref"] == "#/definitions/CheckStream" - assert schema["properties"]["streams"]["items"]["$ref"] == "#/definitions/DeclarativeStream" - - check_stream = schema["definitions"]["CheckStream"] - assert {"stream_names"}.issubset(check_stream["required"]) - assert check_stream["properties"]["stream_names"]["type"] == "array" - assert check_stream["properties"]["stream_names"]["items"]["type"] == "string" - - declarative_stream = schema["definitions"]["DeclarativeStream"] - assert {"retriever", "config"}.issubset(declarative_stream["required"]) - assert {"$ref": "#/definitions/DefaultSchemaLoader"} in declarative_stream["properties"]["schema_loader"]["anyOf"] - assert {"$ref": "#/definitions/JsonFileSchemaLoader"} in declarative_stream["properties"]["schema_loader"]["anyOf"] - assert declarative_stream["properties"]["retriever"]["$ref"] == "#/definitions/SimpleRetriever" - assert declarative_stream["properties"]["name"]["type"] == "string" - assert {"type": "array", "items": {"type": "string"}} in declarative_stream["properties"]["primary_key"]["anyOf"] - assert {"type": "array", "items": {"type": "array", "items": {"type": "string"}}} in declarative_stream["properties"]["primary_key"][ - "anyOf" - ] - assert {"type": "string"} in declarative_stream["properties"]["primary_key"]["anyOf"] - assert {"type": "array", "items": {"type": "string"}} in declarative_stream["properties"]["stream_cursor_field"]["anyOf"] - assert {"type": "string"} in declarative_stream["properties"]["stream_cursor_field"]["anyOf"] - assert declarative_stream["properties"]["transformations"]["type"] == "array" - assert {"$ref": "#/definitions/AddFields"} in declarative_stream["properties"]["transformations"]["items"]["anyOf"] - assert {"$ref": "#/definitions/RemoveFields"} in declarative_stream["properties"]["transformations"]["items"]["anyOf"] - assert declarative_stream["properties"]["checkpoint_interval"]["type"] == "integer" - - simple_retriever = schema["definitions"]["SimpleRetriever"]["allOf"][1] - assert {"requester", "record_selector"}.issubset(simple_retriever["required"]) - assert simple_retriever["properties"]["requester"]["$ref"] == "#/definitions/HttpRequester" - assert simple_retriever["properties"]["record_selector"]["$ref"] == "#/definitions/RecordSelector" - assert simple_retriever["properties"]["name"]["type"] == "string" - assert {"type": "array", "items": {"type": "string"}} in declarative_stream["properties"]["primary_key"]["anyOf"] - assert {"type": "array", "items": {"type": "array", "items": {"type": "string"}}} in declarative_stream["properties"]["primary_key"][ - "anyOf" - ] - assert {"type": "string"} in declarative_stream["properties"]["primary_key"]["anyOf"] - assert {"$ref": "#/definitions/DefaultPaginator"} in simple_retriever["properties"]["paginator"]["anyOf"] - assert {"$ref": "#/definitions/NoPagination"} in simple_retriever["properties"]["paginator"]["anyOf"] - assert {"$ref": "#/definitions/CartesianProductStreamSlicer"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] - assert {"$ref": "#/definitions/DatetimeStreamSlicer"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] - assert {"$ref": "#/definitions/ListStreamSlicer"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] - assert {"$ref": "#/definitions/SingleSlice"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] - assert {"$ref": "#/definitions/SubstreamSlicer"} in simple_retriever["properties"]["stream_slicer"]["anyOf"] - - http_requester = schema["definitions"]["HttpRequester"]["allOf"][1] - assert {"name", "url_base", "path", "config"}.issubset(http_requester["required"]) - assert http_requester["properties"]["name"]["type"] == "string" - assert {"$ref": "#/definitions/InterpolatedString"} in http_requester["properties"]["url_base"]["anyOf"] - assert {"type": "string"} in http_requester["properties"]["path"]["anyOf"] - assert {"$ref": "#/definitions/InterpolatedString"} in http_requester["properties"]["url_base"]["anyOf"] - assert {"type": "string"} in http_requester["properties"]["path"]["anyOf"] - assert {"type": "string"} in http_requester["properties"]["http_method"]["anyOf"] - assert {"type": "string", "enum": ["GET", "POST"]} in http_requester["properties"]["http_method"]["anyOf"] - assert http_requester["properties"]["request_options_provider"]["$ref"] == "#/definitions/InterpolatedRequestOptionsProvider" - assert {"$ref": "#/definitions/DeclarativeOauth2Authenticator"} in http_requester["properties"]["authenticator"]["anyOf"] - assert {"$ref": "#/definitions/ApiKeyAuthenticator"} in http_requester["properties"]["authenticator"]["anyOf"] - assert {"$ref": "#/definitions/BearerAuthenticator"} in http_requester["properties"]["authenticator"]["anyOf"] - assert {"$ref": "#/definitions/BasicHttpAuthenticator"} in http_requester["properties"]["authenticator"]["anyOf"] - assert {"$ref": "#/definitions/CompositeErrorHandler"} in http_requester["properties"]["error_handler"]["anyOf"] - assert {"$ref": "#/definitions/DefaultErrorHandler"} in http_requester["properties"]["error_handler"]["anyOf"] - - api_key_authenticator = schema["definitions"]["ApiKeyAuthenticator"]["allOf"][1] - assert {"header", "api_token", "config"}.issubset(api_key_authenticator["required"]) - assert {"$ref": "#/definitions/InterpolatedString"} in api_key_authenticator["properties"]["header"]["anyOf"] - assert {"type": "string"} in api_key_authenticator["properties"]["header"]["anyOf"] - assert {"$ref": "#/definitions/InterpolatedString"} in api_key_authenticator["properties"]["api_token"]["anyOf"] - assert {"type": "string"} in api_key_authenticator["properties"]["api_token"]["anyOf"] - - default_error_handler = schema["definitions"]["DefaultErrorHandler"]["allOf"][1] - assert default_error_handler["properties"]["response_filters"]["type"] == "array" - assert default_error_handler["properties"]["response_filters"]["items"]["$ref"] == "#/definitions/HttpResponseFilter" - assert default_error_handler["properties"]["max_retries"]["type"] == "integer" - assert default_error_handler["properties"]["backoff_strategies"]["type"] == "array" - - default_paginator = schema["definitions"]["DefaultPaginator"]["allOf"][1] - assert {"page_token_option", "pagination_strategy", "config", "url_base"}.issubset(default_paginator["required"]) - assert default_paginator["properties"]["page_size_option"]["$ref"] == "#/definitions/RequestOption" - assert default_paginator["properties"]["page_token_option"]["$ref"] == "#/definitions/RequestOption" - assert {"$ref": "#/definitions/CursorPaginationStrategy"} in default_paginator["properties"]["pagination_strategy"]["anyOf"] - assert {"$ref": "#/definitions/OffsetIncrement"} in default_paginator["properties"]["pagination_strategy"]["anyOf"] - assert {"$ref": "#/definitions/PageIncrement"} in default_paginator["properties"]["pagination_strategy"]["anyOf"] - assert default_paginator["properties"]["decoder"]["$ref"] == "#/definitions/JsonDecoder" - assert {"$ref": "#/definitions/InterpolatedString"} in http_requester["properties"]["url_base"]["anyOf"] - assert {"type": "string"} in http_requester["properties"]["path"]["anyOf"] - - cursor_pagination_strategy = schema["definitions"]["CursorPaginationStrategy"]["allOf"][1] - assert {"cursor_value", "config"}.issubset(cursor_pagination_strategy["required"]) - assert {"$ref": "#/definitions/InterpolatedString"} in cursor_pagination_strategy["properties"]["cursor_value"]["anyOf"] - assert {"type": "string"} in cursor_pagination_strategy["properties"]["cursor_value"]["anyOf"] - assert {"$ref": "#/definitions/InterpolatedBoolean"} in cursor_pagination_strategy["properties"]["stop_condition"]["anyOf"] - assert {"type": "string"} in cursor_pagination_strategy["properties"]["stop_condition"]["anyOf"] - assert cursor_pagination_strategy["properties"]["decoder"]["$ref"] == "#/definitions/JsonDecoder" - - list_stream_slicer = schema["definitions"]["ListStreamSlicer"]["allOf"][1] - assert {"slice_values", "cursor_field", "config"}.issubset(list_stream_slicer["required"]) - assert {"type": "array", "items": {"type": "string"}} in list_stream_slicer["properties"]["slice_values"]["anyOf"] - assert {"type": "string"} in list_stream_slicer["properties"]["slice_values"]["anyOf"] - assert {"$ref": "#/definitions/InterpolatedString"} in list_stream_slicer["properties"]["cursor_field"]["anyOf"] - assert {"type": "string"} in list_stream_slicer["properties"]["cursor_field"]["anyOf"] - assert list_stream_slicer["properties"]["request_option"]["$ref"] == "#/definitions/RequestOption" - - added_field_definition = schema["definitions"]["AddedFieldDefinition"] - assert {"path", "value"}.issubset(added_field_definition["required"]) - assert added_field_definition["properties"]["path"]["type"] == "array" - assert added_field_definition["properties"]["path"]["items"]["type"] == "string" - assert {"$ref": "#/definitions/InterpolatedString"} in added_field_definition["properties"]["value"]["anyOf"] - assert {"type": "string"} in added_field_definition["properties"]["value"]["anyOf"] - - # There is something very strange about JsonSchemaMixin.json_schema(). For some reason, when this test is called independently - # it will pass. However, when it is invoked with the entire test file, certain components won't get generated in the schema. Since - # the generate_schema() method is invoked by independently so this doesn't happen under normal circumstance when we generate the - # complete schema. It only happens when the tests are all called together. - # One way to replicate this is to add DefaultErrorHandler.json_schema() to the start of this test and uncomment the assertions below - - # assert {"$ref": "#/definitions/ConstantBackoffStrategy"} in default_error_handler["properties"]["backoff_strategies"]["items"]["anyOf"] - # assert {"$ref": "#/definitions/ExponentialBackoffStrategy"} in default_error_handler["properties"]["backoff_strategies"]["items"][ - # "anyOf" - # ] - # assert {"$ref": "#/definitions/WaitTimeFromHeaderBackoffStrategy"} in default_error_handler["properties"]["backoff_strategies"][ - # "items" - # ]["anyOf"] - # assert {"$ref": "#/definitions/WaitUntilTimeFromHeaderBackoffStrategy"} in default_error_handler["properties"]["backoff_strategies"][ - # "items" - # ]["anyOf"] - # - # exponential_backoff_strategy = schema["definitions"]["ExponentialBackoffStrategy"]["allOf"][1] - # assert exponential_backoff_strategy["properties"]["factor"]["type"] == "number" +from yaml.parser import ParserError + +logger = logging.getLogger("airbyte") + + +EXTERNAL_CONNECTION_SPECIFICATION = { + "type": "object", + "required": ["api_token"], + "additionalProperties": False, + "properties": {"api_token": {"type": "string"}}, +} + + +class MockYamlDeclarativeSource(YamlDeclarativeSource): + """ + Mock test class that is needed to monkey patch how we read from various files that make up a declarative source because of how our + tests write configuration files during testing. It is also used to properly namespace where files get written in specific + cases like when we temporarily write files like spec.yaml to the package unit_tests, which is the directory where it will + be read in during the tests. + """ + + def _read_and_parse_yaml_file(self, path_to_yaml_file): + """ + We override the default behavior because we use tempfile to write the yaml manifest to a temporary directory which is + not mounted during runtime which prevents pkgutil.get_data() from being able to find the yaml file needed to generate + # the declarative source. For tests we use open() which supports using an absolute path. + """ + with open(path_to_yaml_file, "r") as f: + config_content = f.read() + parsed_config = YamlDeclarativeSource._parse(config_content) + return parsed_config + + +class TestYamlDeclarativeSource: + def test_source_is_created_if_toplevel_fields_are_known(self): + content = """ + version: "version" + definitions: + schema_loader: + name: "{{ options.stream_name }}" + file_path: "./source_sendgrid/schemas/{{ options.name }}.yaml" + retriever: + paginator: + type: "DefaultPaginator" + page_size: 10 + page_size_option: + inject_into: request_parameter + field_name: page_size + page_token_option: + inject_into: path + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response._metadata.next }}" + requester: + path: "/v3/marketing/lists" + authenticator: + type: "BearerAuthenticator" + api_token: "{{ config.apikey }}" + request_parameters: + page_size: 10 + record_selector: + extractor: + field_pointer: ["result"] + streams: + - type: DeclarativeStream + $options: + name: "lists" + primary_key: id + url_base: "https://api.sendgrid.com" + schema_loader: "*ref(definitions.schema_loader)" + retriever: "*ref(definitions.retriever)" + check: + type: CheckStream + stream_names: ["lists"] + """ + temporary_file = TestFileContent(content) + MockYamlDeclarativeSource(temporary_file.filename) + + def test_source_fails_for_invalid_yaml(self): + content = """ + version: "version" + definitions: + this is not parsable yaml: " at all + streams: + - type: DeclarativeStream + $options: + name: "lists" + primary_key: id + url_base: "https://api.sendgrid.com" + check: + type: CheckStream + stream_names: ["lists"] + """ + temporary_file = TestFileContent(content) + with pytest.raises(ParserError): + MockYamlDeclarativeSource(temporary_file.filename) + + def test_source_with_missing_reference_fails(self): + content = """ + version: "version" + definitions: + schema_loader: + name: "{{ options.stream_name }}" + file_path: "./source_sendgrid/schemas/{{ options.name }}.yaml" + streams: + - type: DeclarativeStream + $options: + name: "lists" + primary_key: id + url_base: "https://api.sendgrid.com" + schema_loader: "*ref(definitions.schema_loader)" + retriever: "*ref(definitions.retriever)" + check: + type: CheckStream + stream_names: ["lists"] + """ + temporary_file = TestFileContent(content) + with pytest.raises(UndefinedReferenceException): + MockYamlDeclarativeSource(temporary_file.filename) + + +class TestFileContent: + def __init__(self, content): + self.file = tempfile.NamedTemporaryFile(mode="w", delete=False) + + with self.file as f: + f.write(content) + + @property + def filename(self): + return self.file.name + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + os.unlink(self.filename) diff --git a/airbyte-cdk/python/unit_tests/sources/streams/http/requests_native_auth/test_requests_native_auth.py b/airbyte-cdk/python/unit_tests/sources/streams/http/requests_native_auth/test_requests_native_auth.py index 36386d2143d66..97fc2d9e283dc 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/http/requests_native_auth/test_requests_native_auth.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/http/requests_native_auth/test_requests_native_auth.py @@ -126,11 +126,12 @@ def test_refresh_request_body(self): refresh_token="some_refresh_token", scopes=["scope1", "scope2"], token_expiry_date=pendulum.now().add(days=3), + grant_type="some_grant_type", refresh_request_body={"custom_field": "in_outbound_request", "another_field": "exists_in_body", "scopes": ["no_override"]}, ) body = oauth.build_refresh_request_body() expected = { - "grant_type": "refresh_token", + "grant_type": "some_grant_type", "client_id": "some_client_id", "client_secret": "some_client_secret", "refresh_token": "some_refresh_token", diff --git a/airbyte-cdk/python/unit_tests/sources/streams/test_streams_core.py b/airbyte-cdk/python/unit_tests/sources/streams/test_streams_core.py index 82fe96d412c23..1ae9214079c94 100644 --- a/airbyte-cdk/python/unit_tests/sources/streams/test_streams_core.py +++ b/airbyte-cdk/python/unit_tests/sources/streams/test_streams_core.py @@ -4,6 +4,7 @@ from typing import Any, Iterable, List, Mapping +from unittest import mock import pytest from airbyte_cdk.models import AirbyteStream, SyncMode @@ -173,3 +174,11 @@ def test_wrapped_primary_key_various_argument(test_input, expected): wrapped = Stream._wrapped_primary_key(test_input) assert wrapped == expected + + +@mock.patch("airbyte_cdk.sources.utils.schema_helpers.ResourceSchemaLoader.get_schema") +def test_get_json_schema_is_cached(mocked_method): + stream = StreamStubFullRefresh() + for i in range(5): + stream.get_json_schema() + assert mocked_method.call_count == 1 diff --git a/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py b/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py index 7696a058383fe..bbb808a0ec747 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py +++ b/airbyte-cdk/python/unit_tests/sources/test_abstract_source.py @@ -2,6 +2,7 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import copy import logging from collections import defaultdict from typing import Any, Callable, Dict, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union @@ -11,6 +12,7 @@ from airbyte_cdk.models import ( AirbyteCatalog, AirbyteConnectionStatus, + AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, AirbyteStateBlob, @@ -21,6 +23,7 @@ ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode, + Level, Status, StreamDescriptor, SyncMode, @@ -29,6 +32,7 @@ from airbyte_cdk.sources import AbstractSource from airbyte_cdk.sources.connector_state_manager import ConnectorStateManager from airbyte_cdk.sources.streams import IncrementalMixin, Stream +from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message from airbyte_cdk.utils.traced_exception import AirbyteTracedException logger = logging.getLogger("airbyte") @@ -149,6 +153,31 @@ def state(self, value): pass +class MockStreamEmittingAirbyteMessages(MockStreamWithState): + def __init__( + self, inputs_and_mocked_outputs: List[Tuple[Mapping[str, Any], Iterable[AirbyteMessage]]] = None, name: str = None, state=None + ): + super().__init__(inputs_and_mocked_outputs, name, state) + self._inputs_and_mocked_outputs = inputs_and_mocked_outputs + self._name = name + + @property + def name(self): + return self._name + + @property + def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: + return "pk" + + @property + def state(self) -> MutableMapping[str, Any]: + return {self.cursor_field: self._cursor_value} if self._cursor_value else {} + + @state.setter + def state(self, value: MutableMapping[str, Any]): + self._cursor_value = value.get(self.cursor_field, self.start_date) + + def test_discover(mocker): """Tests that the appropriate AirbyteCatalog is returned from the discover method""" airbyte_stream1 = AirbyteStream( @@ -783,6 +812,115 @@ def test_with_slices_and_interval(self, mocker, use_legacy, per_stream_enabled): assert expected == messages + @pytest.mark.parametrize( + "per_stream_enabled", + [ + pytest.param(False, id="test_source_emits_state_as_per_stream_format"), + ], + ) + def test_emit_non_records(self, mocker, per_stream_enabled): + """ + Tests that an incremental read which uses slices and a checkpoint interval: + 1. outputs all records + 2. outputs a state message every N records (N=checkpoint_interval) + 3. outputs a state message after reading the entire slice + """ + + input_state = [] + slices = [{"1": "1"}, {"2": "2"}] + stream_output = [ + {"k1": "v1"}, + AirbyteLogMessage(level=Level.INFO, message="HELLO"), + {"k2": "v2"}, + {"k3": "v3"}, + ] + stream_1 = MockStreamEmittingAirbyteMessages( + [ + ( + { + "sync_mode": SyncMode.incremental, + "stream_slice": s, + "stream_state": mocker.ANY, + }, + stream_output, + ) + for s in slices + ], + name="s1", + state=copy.deepcopy(input_state), + ) + stream_2 = MockStreamEmittingAirbyteMessages( + [ + ( + { + "sync_mode": SyncMode.incremental, + "stream_slice": s, + "stream_state": mocker.ANY, + }, + stream_output, + ) + for s in slices + ], + name="s2", + state=copy.deepcopy(input_state), + ) + state = {"cursor": "value"} + mocker.patch.object(MockStream, "get_updated_state", return_value=state) + mocker.patch.object(MockStream, "supports_incremental", return_value=True) + mocker.patch.object(MockStream, "get_json_schema", return_value={}) + mocker.patch.object(MockStream, "stream_slices", return_value=slices) + mocker.patch.object( + MockStream, + "state_checkpoint_interval", + new_callable=mocker.PropertyMock, + return_value=2, + ) + + src = MockSource(streams=[stream_1, stream_2], per_stream=per_stream_enabled) + catalog = ConfiguredAirbyteCatalog( + streams=[ + _configured_stream(stream_1, SyncMode.incremental), + _configured_stream(stream_2, SyncMode.incremental), + ] + ) + + expected = _fix_emitted_at( + [ + # stream 1 slice 1 + stream_data_to_airbyte_message("s1", stream_output[0]), + stream_data_to_airbyte_message("s1", stream_output[1]), + stream_data_to_airbyte_message("s1", stream_output[2]), + _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + stream_data_to_airbyte_message("s1", stream_output[3]), + _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + # stream 1 slice 2 + stream_data_to_airbyte_message("s1", stream_output[0]), + stream_data_to_airbyte_message("s1", stream_output[1]), + stream_data_to_airbyte_message("s1", stream_output[2]), + _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + stream_data_to_airbyte_message("s1", stream_output[3]), + _as_state({"s1": state}, "s1", state) if per_stream_enabled else _as_state({"s1": state}), + # stream 2 slice 1 + stream_data_to_airbyte_message("s2", stream_output[0]), + stream_data_to_airbyte_message("s2", stream_output[1]), + stream_data_to_airbyte_message("s2", stream_output[2]), + _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + stream_data_to_airbyte_message("s2", stream_output[3]), + _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + # stream 2 slice 2 + stream_data_to_airbyte_message("s2", stream_output[0]), + stream_data_to_airbyte_message("s2", stream_output[1]), + stream_data_to_airbyte_message("s2", stream_output[2]), + _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + stream_data_to_airbyte_message("s2", stream_output[3]), + _as_state({"s1": state, "s2": state}, "s2", state) if per_stream_enabled else _as_state({"s1": state, "s2": state}), + ] + ) + + messages = _fix_emitted_at(list(src.read(logger, {}, catalog, state=input_state))) + + assert expected == messages + def test_checkpoint_state_from_stream_instance(): teams_stream = MockStreamOverridesStateMethod() diff --git a/airbyte-cdk/python/unit_tests/sources/test_source.py b/airbyte-cdk/python/unit_tests/sources/test_source.py index c81b794e5af63..1034975c1892c 100644 --- a/airbyte-cdk/python/unit_tests/sources/test_source.py +++ b/airbyte-cdk/python/unit_tests/sources/test_source.py @@ -425,8 +425,8 @@ def test_source_config_no_transform(abstract_source, catalog): records = [r for r in abstract_source.read(logger=logger_mock, config={}, catalog=catalog, state={})] assert len(records) == 2 * 5 assert [r.record.data for r in records] == [{"value": 23}] * 2 * 5 - assert http_stream.get_json_schema.call_count == 1 - assert non_http_stream.get_json_schema.call_count == 1 + assert http_stream.get_json_schema.call_count == 5 + assert non_http_stream.get_json_schema.call_count == 5 def test_source_config_transform(abstract_source, catalog): diff --git a/airbyte-cdk/python/unit_tests/sources/utils/test_record_helper.py b/airbyte-cdk/python/unit_tests/sources/utils/test_record_helper.py new file mode 100644 index 0000000000000..e1db000716f68 --- /dev/null +++ b/airbyte-cdk/python/unit_tests/sources/utils/test_record_helper.py @@ -0,0 +1,85 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import pytest +from airbyte_cdk.models import ( + AirbyteLogMessage, + AirbyteMessage, + AirbyteRecordMessage, + AirbyteStateMessage, + AirbyteStateType, + AirbyteTraceMessage, + Level, + TraceType, +) +from airbyte_cdk.models import Type as MessageType +from airbyte_cdk.sources.utils.record_helper import stream_data_to_airbyte_message + +NOW = 1234567 +STREAM_NAME = "my_stream" + + +@pytest.mark.parametrize( + "test_name, data, expected_message", + [ + ( + "test_data_to_airbyte_record", + {"id": 0, "field_A": 1.0, "field_B": "airbyte"}, + AirbyteMessage( + type=MessageType.RECORD, + record=AirbyteRecordMessage(stream="my_stream", data={"id": 0, "field_A": 1.0, "field_B": "airbyte"}, emitted_at=NOW), + ), + ), + ], +) +def test_data_or_record_to_airbyte_record(test_name, data, expected_message): + transformer = MagicMock() + schema = {} + message = stream_data_to_airbyte_message(STREAM_NAME, data, transformer, schema) + message.record.emitted_at = NOW + + if isinstance(data, dict): + transformer.transform.assert_called_with(data, schema) + else: + assert not transformer.transform.called + assert expected_message == message + + +@pytest.mark.parametrize( + "test_name, data, expected_message", + [ + ( + "test_log_message_to_airbyte_record", + AirbyteLogMessage(level=Level.INFO, message="Hello, this is a log message"), + AirbyteMessage(type=MessageType.LOG, log=AirbyteLogMessage(level=Level.INFO, message="Hello, this is a log message")), + ), + ( + "test_trace_message_to_airbyte_record", + AirbyteTraceMessage(type=TraceType.ERROR, emitted_at=101), + AirbyteMessage(type=MessageType.TRACE, trace=AirbyteTraceMessage(type=TraceType.ERROR, emitted_at=101)), + ), + ], +) +def test_log_or_trace_to_message(test_name, data, expected_message): + transformer = MagicMock() + schema = {} + message = stream_data_to_airbyte_message(STREAM_NAME, data, transformer, schema) + + assert not transformer.transform.called + assert expected_message == message + + +@pytest.mark.parametrize( + "test_name, data", + [ + ("test_log_message_to_airbyte_record", AirbyteStateMessage(type=AirbyteStateType.STREAM)), + ], +) +def test_state_message_to_message(test_name, data): + transformer = MagicMock() + schema = {} + with pytest.raises(ValueError): + stream_data_to_airbyte_message(STREAM_NAME, data, transformer, schema) diff --git a/airbyte-commons-temporal/build.gradle b/airbyte-commons-temporal/build.gradle index 1f01642c9ab7c..7e553f1c7e943 100644 --- a/airbyte-commons-temporal/build.gradle +++ b/airbyte-commons-temporal/build.gradle @@ -19,6 +19,8 @@ dependencies { implementation project(':airbyte-persistence:job-persistence') implementation project(':airbyte-protocol:protocol-models') implementation project(':airbyte-worker-models') + implementation project(':airbyte-api') + implementation project(':airbyte-json-validation') testImplementation libs.temporal.testing // Needed to be able to mock final class diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java index 47ccaa36ab1ff..3a5b3628e8e38 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java @@ -30,6 +30,22 @@ @Slf4j public class ConnectionManagerUtils { + /** + * Send a cancellation to the workflow. It will swallow any exception and won't check if the + * workflow is already deleted when being cancel. + */ + public void deleteWorkflowIfItExist(final WorkflowClient client, + final UUID connectionId) { + try { + final ConnectionManagerWorkflow connectionManagerWorkflow = + client.newWorkflowStub(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); + connectionManagerWorkflow.deleteConnection(); + } catch (final Exception e) { + log.warn("The workflow is not reachable when trying to cancel it", e); + } + + } + /** * Attempts to send a signal to the existing ConnectionManagerWorkflow for the provided connection. * @@ -197,16 +213,21 @@ public ConnectionManagerWorkflow getConnectionManagerWorkflow(final WorkflowClie return connectionManagerWorkflow; } - boolean isWorkflowStateRunning(final WorkflowClient client, final UUID connectionId) { + Optional getWorkflowState(final WorkflowClient client, final UUID connectionId) { try { final ConnectionManagerWorkflow connectionManagerWorkflow = client.newWorkflowStub(ConnectionManagerWorkflow.class, getConnectionManagerName(connectionId)); - return connectionManagerWorkflow.getState().isRunning(); + return Optional.of(connectionManagerWorkflow.getState()); } catch (final Exception e) { - return false; + log.error("Exception thrown while checking workflow state for connection id {}", connectionId, e); + return Optional.empty(); } } + boolean isWorkflowStateRunning(final WorkflowClient client, final UUID connectionId) { + return getWorkflowState(client, connectionId).map(WorkflowState::isRunning).orElse(false); + } + public WorkflowExecutionStatus getConnectionManagerWorkflowStatus(final WorkflowClient workflowClient, final UUID connectionId) { final DescribeWorkflowExecutionRequest describeWorkflowExecutionRequest = DescribeWorkflowExecutionRequest.newBuilder() .setExecution(WorkflowExecution.newBuilder() diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java index db543d0b11d04..9abceab23c756 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java @@ -16,6 +16,7 @@ import io.airbyte.commons.temporal.scheduling.DiscoverCatalogWorkflow; import io.airbyte.commons.temporal.scheduling.SpecWorkflow; import io.airbyte.commons.temporal.scheduling.SyncWorkflow; +import io.airbyte.commons.temporal.scheduling.state.WorkflowState; import io.airbyte.config.ConnectorJobOutput; import io.airbyte.config.JobCheckConnectionConfig; import io.airbyte.config.JobDiscoverCatalogConfig; @@ -191,6 +192,10 @@ public static class ManualOperationResult { } + public Optional getWorkflowState(final UUID connectionId) { + return connectionManagerUtils.getWorkflowState(client, connectionId); + } + public ManualOperationResult startNewManualSync(final UUID connectionId) { log.info("Manual sync request"); @@ -476,13 +481,13 @@ public ConnectionManagerWorkflow submitConnectionUpdaterAsync(final UUID connect return connectionManagerWorkflow; } - public void deleteConnection(final UUID connectionId) { - try { - connectionManagerUtils.signalWorkflowAndRepairIfNecessary(client, connectionId, - connectionManagerWorkflow -> connectionManagerWorkflow::deleteConnection); - } catch (final DeletedWorkflowException e) { - log.info("Connection {} has already been deleted.", connectionId); - } + /** + * This will cancel a workflow even if the connection is deleted already + * + * @param connectionId - connectionId to cancel + */ + public void forceDeleteWorkflow(final UUID connectionId) { + connectionManagerUtils.deleteWorkflowIfItExist(client, connectionId); } public void update(final UUID connectionId) { diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalJobType.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalJobType.java index cb8f66f3f6306..40c0a5451ae4c 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalJobType.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalJobType.java @@ -11,5 +11,6 @@ public enum TemporalJobType { SYNC, RESET_CONNECTION, CONNECTION_UPDATER, - REPLICATE + REPLICATE, + NOTIFY } diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionNotificationWorkflow.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionNotificationWorkflow.java new file mode 100644 index 0000000000000..9aeb45634aedf --- /dev/null +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionNotificationWorkflow.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.temporal.scheduling; + +import io.airbyte.api.client.invoker.generated.ApiException; +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.validation.json.JsonValidationException; +import io.temporal.workflow.WorkflowInterface; +import io.temporal.workflow.WorkflowMethod; +import java.io.IOException; +import java.util.UUID; + +@WorkflowInterface +public interface ConnectionNotificationWorkflow { + + @WorkflowMethod + boolean sendSchemaChangeNotification(UUID connectionId) + throws IOException, InterruptedException, ApiException, ConfigNotFoundException, JsonValidationException; + +} diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java index 82b161e5eb05f..08e7fd9865520 100644 --- a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java +++ b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java @@ -326,12 +326,12 @@ void migrateCalled() { @Nested @DisplayName("Test delete connection method.") - class DeleteConnection { + class ForceCancelConnection { @Test @SuppressWarnings(UNCHECKED) @DisplayName("Test delete connection method when workflow is in a running state.") - void testDeleteConnection() { + void testforceCancelConnection() { final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); final WorkflowState mWorkflowState = mock(WorkflowState.class); when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); @@ -349,54 +349,9 @@ void testDeleteConnection() { .withConfiguredAirbyteCatalog(new ConfiguredAirbyteCatalog()); temporalClient.submitSync(JOB_ID, ATTEMPT_ID, syncConfig, CONNECTION_ID); - temporalClient.deleteConnection(CONNECTION_ID); + temporalClient.forceDeleteWorkflow(CONNECTION_ID); - verify(workflowClient, Mockito.never()).newSignalWithStartRequest(); - verify(mConnectionManagerWorkflow).deleteConnection(); - } - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test delete connection method when workflow is in an unexpected state") - void testDeleteConnectionInUnexpectedState() { - final ConnectionManagerWorkflow mTerminatedConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(mTerminatedConnectionManagerWorkflow.getState()) - .thenThrow(new IllegalStateException(EXCEPTION_MESSAGE)); - when(workflowClient.newWorkflowStub(any(Class.class), any(String.class))).thenReturn(mTerminatedConnectionManagerWorkflow); - - final ConnectionManagerWorkflow mNewConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - when(workflowClient.newWorkflowStub(any(Class.class), any(WorkflowOptions.class))).thenReturn(mNewConnectionManagerWorkflow); - final BatchRequest mBatchRequest = mock(BatchRequest.class); - when(workflowClient.newSignalWithStartRequest()).thenReturn(mBatchRequest); - - temporalClient.deleteConnection(CONNECTION_ID); - verify(workflowClient).signalWithStart(mBatchRequest); - - // Verify that the deleteConnection signal was passed to the batch request by capturing the - // argument, - // executing the signal, and verifying that the desired signal was executed - final ArgumentCaptor batchRequestAddArgCaptor = ArgumentCaptor.forClass(Proc.class); - verify(mBatchRequest).add(batchRequestAddArgCaptor.capture()); - final Proc signal = batchRequestAddArgCaptor.getValue(); - signal.apply(); - verify(mNewConnectionManagerWorkflow).deleteConnection(); - } - - @Test - @SuppressWarnings(UNCHECKED) - @DisplayName("Test delete connection method when workflow has already been deleted") - void testDeleteConnectionOnDeletedWorkflow() { - final ConnectionManagerWorkflow mConnectionManagerWorkflow = mock(ConnectionManagerWorkflow.class); - final WorkflowState mWorkflowState = mock(WorkflowState.class); - when(mConnectionManagerWorkflow.getState()).thenReturn(mWorkflowState); - when(mWorkflowState.isDeleted()).thenReturn(true); - when(workflowClient.newWorkflowStub(any(), anyString())).thenReturn(mConnectionManagerWorkflow); - mockWorkflowStatus(WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED); - - temporalClient.deleteConnection(CONNECTION_ID); - - verify(temporalClient).deleteConnection(CONNECTION_ID); - verifyNoMoreInteractions(temporalClient); + verify(connectionManagerUtils).deleteWorkflowIfItExist(workflowClient, CONNECTION_ID); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java index a4364b0c08488..0232234de3ddc 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/RecordSchemaValidator.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.validation.json.JsonValidationException; import io.airbyte.workers.exception.RecordSchemaValidationException; @@ -22,9 +23,9 @@ public class RecordSchemaValidator { - private final Map streams; + private final Map streams; - public RecordSchemaValidator(final Map streamNamesToSchemas) { + public RecordSchemaValidator(final Map streamNamesToSchemas) { // streams is Map of a stream source namespace + name mapped to the stream schema // for easy access when we check each record's schema this.streams = streamNamesToSchemas; @@ -37,7 +38,8 @@ public RecordSchemaValidator(final Map streamNamesToSchemas) { * @param message * @throws RecordSchemaValidationException */ - public void validateSchema(final AirbyteRecordMessage message, final String messageStream) throws RecordSchemaValidationException { + public void validateSchema(final AirbyteRecordMessage message, final AirbyteStreamNameNamespacePair messageStream) + throws RecordSchemaValidationException { final JsonNode messageData = message.getData(); final JsonNode matchingSchema = streams.get(messageStream); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerMetricReporter.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerMetricReporter.java index 44a1ae1073247..072bf6a08397b 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerMetricReporter.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerMetricReporter.java @@ -7,6 +7,7 @@ import io.airbyte.metrics.lib.MetricAttribute; import io.airbyte.metrics.lib.MetricClient; import io.airbyte.metrics.lib.OssMetricsRegistry; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; public class WorkerMetricReporter { @@ -21,9 +22,9 @@ public WorkerMetricReporter(final MetricClient metricClient, final String docker this.metricClient = metricClient; } - public void trackSchemaValidationError(final String stream) { + public void trackSchemaValidationError(final AirbyteStreamNameNamespacePair stream) { metricClient.count(OssMetricsRegistry.NUM_SOURCE_STREAMS_WITH_RECORD_SCHEMA_VALIDATION_ERRORS, 1, new MetricAttribute("docker_repo", dockerRepo), - new MetricAttribute("docker_version", dockerVersion), new MetricAttribute("stream", stream)); + new MetricAttribute("docker_version", dockerVersion), new MetricAttribute("stream", stream.toString())); } public void trackStateMetricTrackerError() { diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerUtils.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerUtils.java index 81da1aab53ec6..b704ade5f6a43 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerUtils.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/WorkerUtils.java @@ -13,6 +13,7 @@ import io.airbyte.config.WorkerSourceConfig; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.helper.FailureHelper; @@ -22,11 +23,9 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -132,18 +131,12 @@ public static ConnectorJobOutput getJobFailureOutputOrThrow(final OutputType out throw new WorkerException(defaultErrorMessage); } - public static Map mapStreamNamesToSchemas(final StandardSyncInput syncInput) { + public static Map mapStreamNamesToSchemas(final StandardSyncInput syncInput) { return syncInput.getCatalog().getStreams().stream().collect( Collectors.toMap( - k -> { - return streamNameWithNamespace(k.getStream().getNamespace(), k.getStream().getName()); - }, + k -> AirbyteStreamNameNamespacePair.fromAirbyteStream(k.getStream()), v -> v.getStream().getJsonSchema())); } - public static String streamNameWithNamespace(final @Nullable String namespace, final String streamName) { - return Objects.toString(namespace, "").trim() + streamName.trim(); - } - } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java index 6ab4ef51bab77..ec52aeadd94eb 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/DefaultReplicationWorker.java @@ -9,6 +9,7 @@ import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.JOB_ROOT_KEY; import static io.airbyte.metrics.lib.ApmTraceConstants.WORKER_OPERATION_NAME; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import datadog.trace.api.Trace; import io.airbyte.commons.io.LineGobbler; @@ -26,6 +27,7 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.WorkerMetricReporter; import io.airbyte.workers.WorkerUtils; @@ -139,7 +141,6 @@ public final ReplicationOutput run(final StandardSyncInput syncInput, final Path destinationConfig.setCatalog(mapper.mapCatalog(destinationConfig.getCatalog())); final ThreadedTimeTracker timeTracker = new ThreadedTimeTracker(); - final long startTime = System.currentTimeMillis(); timeTracker.trackReplicationStartTime(); final AtomicReference replicationRunnableFailureRef = new AtomicReference<>(); @@ -152,207 +153,142 @@ public final ReplicationOutput run(final StandardSyncInput syncInput, final Path s -> String.format("%s - %s", s.getSyncMode(), s.getDestinationSyncMode())))); final WorkerSourceConfig sourceConfig = WorkerUtils.syncToWorkerSourceConfig(syncInput); - final Map mdc = MDC.getCopyOfContextMap(); - ApmTraceUtils.addTagsToTrace(generateTraceTags(destinationConfig, jobRoot)); - - // note: resources are closed in the opposite order in which they are declared. thus source will be - // closed first (which is what we want). - try (destination; source) { - destination.start(destinationConfig, jobRoot); - timeTracker.trackSourceReadStartTime(); - source.start(sourceConfig, jobRoot); - timeTracker.trackDestinationWriteStartTime(); - - // note: `whenComplete` is used instead of `exceptionally` so that the original exception is still - // thrown - final CompletableFuture destinationOutputThreadFuture = CompletableFuture.runAsync( - getDestinationOutputRunnable(destination, cancelled, messageTracker, mdc, timeTracker), - executors) - .whenComplete((msg, ex) -> { - if (ex != null) { - if (ex.getCause() instanceof DestinationException) { - destinationRunnableFailureRef.set(FailureHelper.destinationFailure(ex, Long.valueOf(jobId), attempt)); - } else { - destinationRunnableFailureRef.set(FailureHelper.replicationFailure(ex, Long.valueOf(jobId), attempt)); - } - } - }); - - final CompletableFuture replicationThreadFuture = CompletableFuture.runAsync( - getReplicationRunnable(source, destination, cancelled, mapper, messageTracker, mdc, recordSchemaValidator, metricReporter, timeTracker), - executors) - .whenComplete((msg, ex) -> { - if (ex != null) { - if (ex.getCause() instanceof SourceException) { - replicationRunnableFailureRef.set(FailureHelper.sourceFailure(ex, Long.valueOf(jobId), attempt)); - } else if (ex.getCause() instanceof DestinationException) { - replicationRunnableFailureRef.set(FailureHelper.destinationFailure(ex, Long.valueOf(jobId), attempt)); - } else { - replicationRunnableFailureRef.set(FailureHelper.replicationFailure(ex, Long.valueOf(jobId), attempt)); - } - } - }); - - LOGGER.info("Waiting for source and destination threads to complete."); - // CompletableFuture#allOf waits until all futures finish before returning, even if one throws an - // exception. So in order to handle exceptions from a future immediately without needing to wait for - // the other future to finish, we first call CompletableFuture#anyOf. - CompletableFuture.anyOf(replicationThreadFuture, destinationOutputThreadFuture).get(); - LOGGER.info("One of source or destination thread complete. Waiting on the other."); - CompletableFuture.allOf(replicationThreadFuture, destinationOutputThreadFuture).get(); - LOGGER.info("Source and destination threads complete."); - - } catch (final Exception e) { - hasFailed.set(true); - ApmTraceUtils.addExceptionToTrace(e); - LOGGER.error("Sync worker failed.", e); - } finally { - executors.shutdownNow(); - } - - final ReplicationStatus outputStatus; - // First check if the process was cancelled. Cancellation takes precedence over failures. - if (cancelled.get()) { - outputStatus = ReplicationStatus.CANCELLED; - } - // if the process was not cancelled but still failed, then it's an actual failure - else if (hasFailed.get()) { - outputStatus = ReplicationStatus.FAILED; - } else { - outputStatus = ReplicationStatus.COMPLETED; - } - + replicate(jobRoot, destinationConfig, timeTracker, replicationRunnableFailureRef, destinationRunnableFailureRef, sourceConfig); timeTracker.trackReplicationEndTime(); - final SyncStats totalSyncStats = new SyncStats() - .withRecordsEmitted(messageTracker.getTotalRecordsEmitted()) - .withBytesEmitted(messageTracker.getTotalBytesEmitted()) - .withSourceStateMessagesEmitted(messageTracker.getTotalSourceStateMessagesEmitted()) - .withDestinationStateMessagesEmitted(messageTracker.getTotalDestinationStateMessagesEmitted()) - .withMaxSecondsBeforeSourceStateMessageEmitted(messageTracker.getMaxSecondsToReceiveSourceStateMessage()) - .withMeanSecondsBeforeSourceStateMessageEmitted(messageTracker.getMeanSecondsToReceiveSourceStateMessage()) - .withMaxSecondsBetweenStateMessageEmittedandCommitted(messageTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted().orElse(null)) - .withMeanSecondsBetweenStateMessageEmittedandCommitted(messageTracker.getMeanSecondsBetweenStateMessageEmittedAndCommitted().orElse(null)) - .withReplicationStartTime(timeTracker.getReplicationStartTime()) - .withReplicationEndTime(timeTracker.getReplicationEndTime()) - .withSourceReadStartTime(timeTracker.getSourceReadStartTime()) - .withSourceReadEndTime(timeTracker.getSourceReadEndTime()) - .withDestinationWriteStartTime(timeTracker.getDestinationWriteStartTime()) - .withDestinationWriteEndTime(timeTracker.getDestinationWriteEndTime()); - - if (outputStatus == ReplicationStatus.COMPLETED) { - totalSyncStats.setRecordsCommitted(totalSyncStats.getRecordsEmitted()); - } else if (messageTracker.getTotalRecordsCommitted().isPresent()) { - totalSyncStats.setRecordsCommitted(messageTracker.getTotalRecordsCommitted().get()); - } else { - LOGGER.warn("Could not reliably determine committed record counts, committed record stats will be set to null"); - totalSyncStats.setRecordsCommitted(null); - } - - // assume every stream with stats is in streamToEmittedRecords map - final List streamSyncStats = messageTracker.getStreamToEmittedRecords().keySet().stream().map(stream -> { - final SyncStats syncStats = new SyncStats() - .withRecordsEmitted(messageTracker.getStreamToEmittedRecords().get(stream)) - .withBytesEmitted(messageTracker.getStreamToEmittedBytes().get(stream)) - .withSourceStateMessagesEmitted(null) - .withDestinationStateMessagesEmitted(null); - - if (outputStatus == ReplicationStatus.COMPLETED) { - syncStats.setRecordsCommitted(messageTracker.getStreamToEmittedRecords().get(stream)); - } else if (messageTracker.getStreamToCommittedRecords().isPresent()) { - syncStats.setRecordsCommitted(messageTracker.getStreamToCommittedRecords().get().get(stream)); - } else { - syncStats.setRecordsCommitted(null); - } - return new StreamSyncStats() - .withStreamName(stream) - .withStats(syncStats); - }).collect(Collectors.toList()); - - final ReplicationAttemptSummary summary = new ReplicationAttemptSummary() - .withStatus(outputStatus) - .withRecordsSynced(messageTracker.getTotalRecordsEmitted()) // TODO (parker) remove in favor of totalRecordsEmitted - .withBytesSynced(messageTracker.getTotalBytesEmitted()) // TODO (parker) remove in favor of totalBytesEmitted - .withTotalStats(totalSyncStats) - .withStreamStats(streamSyncStats) - .withStartTime(startTime) - .withEndTime(System.currentTimeMillis()); - - final ReplicationOutput output = new ReplicationOutput() - .withReplicationAttemptSummary(summary) - .withOutputCatalog(destinationConfig.getCatalog()); - - // only .setFailures() if a failure occurred or if there is an AirbyteErrorTraceMessage - final FailureReason sourceFailure = replicationRunnableFailureRef.get(); - final FailureReason destinationFailure = destinationRunnableFailureRef.get(); - final FailureReason traceMessageFailure = messageTracker.errorTraceMessageFailure(Long.valueOf(jobId), attempt); - - final List failures = new ArrayList<>(); - - if (traceMessageFailure != null) { - failures.add(traceMessageFailure); - } - - if (sourceFailure != null) { - failures.add(sourceFailure); - } - if (destinationFailure != null) { - failures.add(destinationFailure); - } - if (!failures.isEmpty()) { - output.setFailures(failures); - } + return getReplicationOutput(syncInput, destinationConfig, replicationRunnableFailureRef, destinationRunnableFailureRef, timeTracker); + } catch (final Exception e) { + ApmTraceUtils.addExceptionToTrace(e); + throw new WorkerException("Sync failed", e); + } - if (messageTracker.getSourceOutputState().isPresent()) { - LOGGER.info("Source output at least one state message"); - } else { - LOGGER.info("Source did not output any state messages"); - } + } - if (messageTracker.getDestinationOutputState().isPresent()) { - LOGGER.info("State capture: Updated state to: {}", messageTracker.getDestinationOutputState()); - final State state = messageTracker.getDestinationOutputState().get(); - output.withState(state); - } else if (syncInput.getState() != null) { - LOGGER.warn("State capture: No new state, falling back on input state: {}", syncInput.getState()); - output.withState(syncInput.getState()); - } else { - LOGGER.warn("State capture: No state retained."); - } + private void replicate(Path jobRoot, + WorkerDestinationConfig destinationConfig, + ThreadedTimeTracker timeTracker, + AtomicReference replicationRunnableFailureRef, + AtomicReference destinationRunnableFailureRef, + WorkerSourceConfig sourceConfig) { + final Map mdc = MDC.getCopyOfContextMap(); + + // note: resources are closed in the opposite order in which they are declared. thus source will be + // closed first (which is what we want). + try (destination; source) { + destination.start(destinationConfig, jobRoot); + timeTracker.trackSourceReadStartTime(); + source.start(sourceConfig, jobRoot); + timeTracker.trackDestinationWriteStartTime(); + + // note: `whenComplete` is used instead of `exceptionally` so that the original exception is still + // thrown + final CompletableFuture readFromDstThread = CompletableFuture.runAsync( + readFromDstRunnable(destination, cancelled, messageTracker, mdc, timeTracker), + executors) + .whenComplete((msg, ex) -> { + if (ex != null) { + if (ex.getCause() instanceof DestinationException) { + destinationRunnableFailureRef.set(FailureHelper.destinationFailure(ex, Long.valueOf(jobId), attempt)); + } else { + destinationRunnableFailureRef.set(FailureHelper.replicationFailure(ex, Long.valueOf(jobId), attempt)); + } + } + }); - if (messageTracker.getUnreliableStateTimingMetrics()) { - metricReporter.trackStateMetricTrackerError(); - } + final CompletableFuture readSrcAndWriteDstThread = CompletableFuture.runAsync( + readFromSrcAndWriteToDstRunnable(source, destination, cancelled, mapper, messageTracker, mdc, recordSchemaValidator, metricReporter, + timeTracker), + executors) + .whenComplete((msg, ex) -> { + if (ex != null) { + if (ex.getCause() instanceof SourceException) { + replicationRunnableFailureRef.set(FailureHelper.sourceFailure(ex, Long.valueOf(jobId), attempt)); + } else if (ex.getCause() instanceof DestinationException) { + replicationRunnableFailureRef.set(FailureHelper.destinationFailure(ex, Long.valueOf(jobId), attempt)); + } else { + replicationRunnableFailureRef.set(FailureHelper.replicationFailure(ex, Long.valueOf(jobId), attempt)); + } + } + }); - final ObjectMapper mapper = new ObjectMapper(); - LOGGER.info("sync summary: {}", mapper.writerWithDefaultPrettyPrinter().writeValueAsString(summary)); - LOGGER.info("failures: {}", mapper.writerWithDefaultPrettyPrinter().writeValueAsString(failures)); + LOGGER.info("Waiting for source and destination threads to complete."); + // CompletableFuture#allOf waits until all futures finish before returning, even if one throws an + // exception. So in order to handle exceptions from a future immediately without needing to wait for + // the other future to finish, we first call CompletableFuture#anyOf. + CompletableFuture.anyOf(readSrcAndWriteDstThread, readFromDstThread).get(); + LOGGER.info("One of source or destination thread complete. Waiting on the other."); + CompletableFuture.allOf(readSrcAndWriteDstThread, readFromDstThread).get(); + LOGGER.info("Source and destination threads complete."); - LineGobbler.endSection("REPLICATION"); - return output; } catch (final Exception e) { + hasFailed.set(true); ApmTraceUtils.addExceptionToTrace(e); - throw new WorkerException("Sync failed", e); + LOGGER.error("Sync worker failed.", e); + } finally { + executors.shutdownNow(); } + } + @SuppressWarnings("PMD.AvoidInstanceofChecksInCatchClause") + private static Runnable readFromDstRunnable(final AirbyteDestination destination, + final AtomicBoolean cancelled, + final MessageTracker messageTracker, + final Map mdc, + final ThreadedTimeTracker timeHolder) { + return () -> { + MDC.setContextMap(mdc); + LOGGER.info("Destination output thread started."); + try { + while (!cancelled.get() && !destination.isFinished()) { + final Optional messageOptional; + try { + messageOptional = destination.attemptRead(); + } catch (final Exception e) { + throw new DestinationException("Destination process read attempt failed", e); + } + if (messageOptional.isPresent()) { + LOGGER.info("State in DefaultReplicationWorker from destination: {}", messageOptional.get()); + messageTracker.acceptFromDestination(messageOptional.get()); + } + } + timeHolder.trackDestinationWriteEndTime(); + if (!cancelled.get() && destination.getExitValue() != 0) { + throw new DestinationException("Destination process exited with non-zero exit code " + destination.getExitValue()); + } + } catch (final Exception e) { + if (!cancelled.get()) { + // Although this thread is closed first, it races with the destination's closure and can attempt one + // final read after the destination is closed before it's terminated. + // This read will fail and throw an exception. Because of this, throw exceptions only if the worker + // was not cancelled. + + if (e instanceof DestinationException) { + // Surface Destination exceptions directly so that they can be classified properly by the worker + throw e; + } else { + throw new RuntimeException(e); + } + } + } + }; } @SuppressWarnings("PMD.AvoidInstanceofChecksInCatchClause") - private static Runnable getReplicationRunnable(final AirbyteSource source, - final AirbyteDestination destination, - final AtomicBoolean cancelled, - final AirbyteMapper mapper, - final MessageTracker messageTracker, - final Map mdc, - final RecordSchemaValidator recordSchemaValidator, - final WorkerMetricReporter metricReporter, - final ThreadedTimeTracker timeHolder) { + private static Runnable readFromSrcAndWriteToDstRunnable(final AirbyteSource source, + final AirbyteDestination destination, + final AtomicBoolean cancelled, + final AirbyteMapper mapper, + final MessageTracker messageTracker, + final Map mdc, + final RecordSchemaValidator recordSchemaValidator, + final WorkerMetricReporter metricReporter, + final ThreadedTimeTracker timeHolder) { return () -> { MDC.setContextMap(mdc); LOGGER.info("Replication thread started."); Long recordsRead = 0L; - final Map, Integer>> validationErrors = new HashMap<>(); + final Map, Integer>> validationErrors = new HashMap<>(); try { while (!cancelled.get() && !source.isFinished()) { final Optional messageOptional; @@ -427,15 +363,169 @@ private static Runnable getReplicationRunnable(final AirbyteSource source, }; } + private ReplicationOutput getReplicationOutput(StandardSyncInput syncInput, + WorkerDestinationConfig destinationConfig, + AtomicReference replicationRunnableFailureRef, + AtomicReference destinationRunnableFailureRef, + ThreadedTimeTracker timeTracker) + throws JsonProcessingException { + final ReplicationStatus outputStatus; + // First check if the process was cancelled. Cancellation takes precedence over failures. + if (cancelled.get()) { + outputStatus = ReplicationStatus.CANCELLED; + } + // if the process was not cancelled but still failed, then it's an actual failure + else if (hasFailed.get()) { + outputStatus = ReplicationStatus.FAILED; + } else { + outputStatus = ReplicationStatus.COMPLETED; + } + + final SyncStats totalSyncStats = getTotalStats(timeTracker, outputStatus); + final List streamSyncStats = getPerStreamStats(outputStatus); + + final ReplicationAttemptSummary summary = new ReplicationAttemptSummary() + .withStatus(outputStatus) + .withRecordsSynced(messageTracker.getTotalRecordsEmitted()) // TODO (parker) remove in favor of totalRecordsEmitted + .withBytesSynced(messageTracker.getTotalBytesEmitted()) // TODO (parker) remove in favor of totalBytesEmitted + .withTotalStats(totalSyncStats) + .withStreamStats(streamSyncStats) + .withStartTime(timeTracker.getReplicationStartTime()) + .withEndTime(System.currentTimeMillis()); + + final ReplicationOutput output = new ReplicationOutput() + .withReplicationAttemptSummary(summary) + .withOutputCatalog(destinationConfig.getCatalog()); + + final List failures = getFailureReasons(replicationRunnableFailureRef, destinationRunnableFailureRef, + output); + + prepStateForLaterSaving(syncInput, output); + + final ObjectMapper mapper = new ObjectMapper(); + LOGGER.info("sync summary: {}", mapper.writerWithDefaultPrettyPrinter().writeValueAsString(summary)); + LOGGER.info("failures: {}", mapper.writerWithDefaultPrettyPrinter().writeValueAsString(failures)); + LineGobbler.endSection("REPLICATION"); + + return output; + } + + private SyncStats getTotalStats(ThreadedTimeTracker timeTracker, ReplicationStatus outputStatus) { + final SyncStats totalSyncStats = new SyncStats() + .withRecordsEmitted(messageTracker.getTotalRecordsEmitted()) + .withBytesEmitted(messageTracker.getTotalBytesEmitted()) + .withSourceStateMessagesEmitted(messageTracker.getTotalSourceStateMessagesEmitted()) + .withDestinationStateMessagesEmitted(messageTracker.getTotalDestinationStateMessagesEmitted()) + .withMaxSecondsBeforeSourceStateMessageEmitted(messageTracker.getMaxSecondsToReceiveSourceStateMessage()) + .withMeanSecondsBeforeSourceStateMessageEmitted(messageTracker.getMeanSecondsToReceiveSourceStateMessage()) + .withMaxSecondsBetweenStateMessageEmittedandCommitted(messageTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted().orElse(null)) + .withMeanSecondsBetweenStateMessageEmittedandCommitted(messageTracker.getMeanSecondsBetweenStateMessageEmittedAndCommitted().orElse(null)) + .withReplicationStartTime(timeTracker.getReplicationStartTime()) + .withReplicationEndTime(timeTracker.getReplicationEndTime()) + .withSourceReadStartTime(timeTracker.getSourceReadStartTime()) + .withSourceReadEndTime(timeTracker.getSourceReadEndTime()) + .withDestinationWriteStartTime(timeTracker.getDestinationWriteStartTime()) + .withDestinationWriteEndTime(timeTracker.getDestinationWriteEndTime()); + + if (outputStatus == ReplicationStatus.COMPLETED) { + totalSyncStats.setRecordsCommitted(totalSyncStats.getRecordsEmitted()); + } else if (messageTracker.getTotalRecordsCommitted().isPresent()) { + totalSyncStats.setRecordsCommitted(messageTracker.getTotalRecordsCommitted().get()); + } else { + LOGGER.warn("Could not reliably determine committed record counts, committed record stats will be set to null"); + totalSyncStats.setRecordsCommitted(null); + } + return totalSyncStats; + } + + private List getPerStreamStats(ReplicationStatus outputStatus) { + // assume every stream with stats is in streamToEmittedRecords map + return messageTracker.getStreamToEmittedRecords().keySet().stream().map(stream -> { + final SyncStats syncStats = new SyncStats() + .withRecordsEmitted(messageTracker.getStreamToEmittedRecords().get(stream)) + .withBytesEmitted(messageTracker.getStreamToEmittedBytes().get(stream)) + .withSourceStateMessagesEmitted(null) + .withDestinationStateMessagesEmitted(null); + + if (outputStatus == ReplicationStatus.COMPLETED) { + syncStats.setRecordsCommitted(messageTracker.getStreamToEmittedRecords().get(stream)); + } else if (messageTracker.getStreamToCommittedRecords().isPresent()) { + syncStats.setRecordsCommitted(messageTracker.getStreamToCommittedRecords().get().get(stream)); + } else { + syncStats.setRecordsCommitted(null); + } + return new StreamSyncStats() + .withStreamName(stream.getName()) + .withStreamNamespace(stream.getNamespace()) + .withStats(syncStats); + }).collect(Collectors.toList()); + } + + /** + * Extracts state out to the {@link ReplicationOutput} so it can be later saved in the + * PersistStateActivity - State is NOT SAVED here. + * + * @param syncInput + * @param output + */ + private void prepStateForLaterSaving(StandardSyncInput syncInput, ReplicationOutput output) { + if (messageTracker.getSourceOutputState().isPresent()) { + LOGGER.info("Source output at least one state message"); + } else { + LOGGER.info("Source did not output any state messages"); + } + + if (messageTracker.getDestinationOutputState().isPresent()) { + LOGGER.info("State capture: Updated state to: {}", messageTracker.getDestinationOutputState()); + final State state = messageTracker.getDestinationOutputState().get(); + output.withState(state); + } else if (syncInput.getState() != null) { + LOGGER.warn("State capture: No new state, falling back on input state: {}", syncInput.getState()); + output.withState(syncInput.getState()); + } else { + LOGGER.warn("State capture: No state retained."); + } + + if (messageTracker.getUnreliableStateTimingMetrics()) { + metricReporter.trackStateMetricTrackerError(); + } + } + + private List getFailureReasons(AtomicReference replicationRunnableFailureRef, + AtomicReference destinationRunnableFailureRef, + ReplicationOutput output) { + // only .setFailures() if a failure occurred or if there is an AirbyteErrorTraceMessage + final FailureReason sourceFailure = replicationRunnableFailureRef.get(); + final FailureReason destinationFailure = destinationRunnableFailureRef.get(); + final FailureReason traceMessageFailure = messageTracker.errorTraceMessageFailure(Long.valueOf(jobId), attempt); + + final List failures = new ArrayList<>(); + + if (traceMessageFailure != null) { + failures.add(traceMessageFailure); + } + + if (sourceFailure != null) { + failures.add(sourceFailure); + } + if (destinationFailure != null) { + failures.add(destinationFailure); + } + if (!failures.isEmpty()) { + output.setFailures(failures); + } + return failures; + } + private static void validateSchema(final RecordSchemaValidator recordSchemaValidator, - final Map, Integer>> validationErrors, + final Map, Integer>> validationErrors, final AirbyteMessage message) { if (message.getRecord() == null) { return; } final AirbyteRecordMessage record = message.getRecord(); - final String messageStream = WorkerUtils.streamNameWithNamespace(record.getNamespace(), record.getStream()); + final AirbyteStreamNameNamespacePair messageStream = AirbyteStreamNameNamespacePair.fromRecordMessage(record); // avoid noise by validating only if the stream has less than 10 records with validation errors final boolean streamHasLessThenTenErrs = validationErrors.get(messageStream) == null || validationErrors.get(messageStream).getRight() < 10; @@ -457,50 +547,6 @@ private static void validateSchema(final RecordSchemaValidator recordSchemaValid } } - @SuppressWarnings("PMD.AvoidInstanceofChecksInCatchClause") - private static Runnable getDestinationOutputRunnable(final AirbyteDestination destination, - final AtomicBoolean cancelled, - final MessageTracker messageTracker, - final Map mdc, - final ThreadedTimeTracker timeHolder) { - return () -> { - MDC.setContextMap(mdc); - LOGGER.info("Destination output thread started."); - try { - while (!cancelled.get() && !destination.isFinished()) { - final Optional messageOptional; - try { - messageOptional = destination.attemptRead(); - } catch (final Exception e) { - throw new DestinationException("Destination process read attempt failed", e); - } - if (messageOptional.isPresent()) { - LOGGER.info("State in DefaultReplicationWorker from destination: {}", messageOptional.get()); - messageTracker.acceptFromDestination(messageOptional.get()); - } - } - timeHolder.trackDestinationWriteEndTime(); - if (!cancelled.get() && destination.getExitValue() != 0) { - throw new DestinationException("Destination process exited with non-zero exit code " + destination.getExitValue()); - } - } catch (final Exception e) { - if (!cancelled.get()) { - // Although this thread is closed first, it races with the destination's closure and can attempt one - // final read after the destination is closed before it's terminated. - // This read will fail and throw an exception. Because of this, throw exceptions only if the worker - // was not cancelled. - - if (e instanceof DestinationException) { - // Surface Destination exceptions directly so that they can be classified properly by the worker - throw e; - } else { - throw new RuntimeException(e); - } - } - } - }; - } - @Trace(operationName = WORKER_OPERATION_NAME) @Override public void cancel() { diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMessageTracker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMessageTracker.java index aa4b348887aeb..f7d8caa4be698 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMessageTracker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/AirbyteMessageTracker.java @@ -23,6 +23,7 @@ import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.workers.helper.FailureHelper; import io.airbyte.workers.internal.StateMetricsTracker.StateMetricsTrackerNoStateMatchException; @@ -33,6 +34,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; @@ -48,7 +50,7 @@ public class AirbyteMessageTracker implements MessageTracker { private final AtomicReference destinationOutputState; private final Map streamToRunningCount; private final HashFunction hashFunction; - private final BiMap streamNameToIndex; + private final BiMap nameNamespacePairToIndex; private final Map streamToTotalBytesEmitted; private final Map streamToTotalRecordsEmitted; private final StateDeltaTracker stateDeltaTracker; @@ -89,7 +91,7 @@ protected AirbyteMessageTracker(final StateDeltaTracker stateDeltaTracker, this.sourceOutputState = new AtomicReference<>(); this.destinationOutputState = new AtomicReference<>(); this.streamToRunningCount = new HashMap<>(); - this.streamNameToIndex = HashBiMap.create(); + this.nameNamespacePairToIndex = HashBiMap.create(); this.hashFunction = Hashing.murmur3_32_fixed(); this.streamToTotalBytesEmitted = new HashMap<>(); this.streamToTotalRecordsEmitted = new HashMap<>(); @@ -139,7 +141,7 @@ private void handleSourceEmittedRecord(final AirbyteRecordMessage recordMessage) stateMetricsTracker.setFirstRecordReceivedAt(LocalDateTime.now()); } - final short streamIndex = getStreamIndex(recordMessage.getStream()); + final short streamIndex = getStreamIndex(AirbyteStreamNameNamespacePair.fromRecordMessage(recordMessage)); final long currentRunningCount = streamToRunningCount.getOrDefault(streamIndex, 0L); streamToRunningCount.put(streamIndex, currentRunningCount + 1); @@ -250,6 +252,7 @@ private void handleEmittedOrchestratorConnectorConfig(final AirbyteControlConnec */ private void handleEmittedTrace(final AirbyteTraceMessage traceMessage, final ConnectorType connectorType) { switch (traceMessage.getType()) { + case ESTIMATE -> handleEmittedEstimateTrace(traceMessage, connectorType); case ERROR -> handleEmittedErrorTrace(traceMessage, connectorType); default -> log.warn("Invalid message type for trace message: {}", traceMessage); } @@ -263,12 +266,17 @@ private void handleEmittedErrorTrace(final AirbyteTraceMessage errorTraceMessage } } - private short getStreamIndex(final String streamName) { - if (!streamNameToIndex.containsKey(streamName)) { - streamNameToIndex.put(streamName, nextStreamIndex); + @SuppressWarnings("PMD") // until method is implemented + private void handleEmittedEstimateTrace(final AirbyteTraceMessage estimateTraceMessage, final ConnectorType connectorType) { + + } + + private short getStreamIndex(final AirbyteStreamNameNamespacePair pair) { + if (!nameNamespacePairToIndex.containsKey(pair)) { + nameNamespacePairToIndex.put(pair, nextStreamIndex); nextStreamIndex++; } - return streamNameToIndex.get(streamName); + return nameNamespacePairToIndex.get(pair); } private int getStateHashCode(final AirbyteStateMessage stateMessage) { @@ -341,36 +349,32 @@ public Optional getDestinationOutputState() { * because committed record counts cannot be reliably computed. */ @Override - public Optional> getStreamToCommittedRecords() { + public Optional> getStreamToCommittedRecords() { if (unreliableCommittedCounts) { return Optional.empty(); } final Map streamIndexToCommittedRecordCount = stateDeltaTracker.getStreamToCommittedRecords(); return Optional.of( streamIndexToCommittedRecordCount.entrySet().stream().collect( - Collectors.toMap( - entry -> streamNameToIndex.inverse().get(entry.getKey()), - Map.Entry::getValue))); + Collectors.toMap(entry -> nameNamespacePairToIndex.inverse().get(entry.getKey()), Entry::getValue))); } /** * Swap out stream indices for stream names and return total records emitted by stream. */ @Override - public Map getStreamToEmittedRecords() { + public Map getStreamToEmittedRecords() { return streamToTotalRecordsEmitted.entrySet().stream().collect(Collectors.toMap( - entry -> streamNameToIndex.inverse().get(entry.getKey()), - Map.Entry::getValue)); + entry -> nameNamespacePairToIndex.inverse().get(entry.getKey()), Entry::getValue)); } /** * Swap out stream indices for stream names and return total bytes emitted by stream. */ @Override - public Map getStreamToEmittedBytes() { + public Map getStreamToEmittedBytes() { return streamToTotalBytesEmitted.entrySet().stream().collect(Collectors.toMap( - entry -> streamNameToIndex.inverse().get(entry.getKey()), - Map.Entry::getValue)); + entry -> nameNamespacePairToIndex.inverse().get(entry.getKey()), Entry::getValue)); } /** diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java index fe01eeb2f0d40..d776dfba1a954 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteDestination.java @@ -36,7 +36,7 @@ public class DefaultAirbyteDestination implements AirbyteDestination { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultAirbyteDestination.class); - private static final MdcScope.Builder CONTAINER_LOG_MDC_BUILDER = new Builder() + public static final MdcScope.Builder CONTAINER_LOG_MDC_BUILDER = new Builder() .setLogPrefix("destination") .setPrefixColor(Color.YELLOW_BACKGROUND); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java index 9da89aa0b75f6..452fb439ab14e 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/DefaultAirbyteSource.java @@ -39,7 +39,7 @@ public class DefaultAirbyteSource implements AirbyteSource { private static final Duration HEARTBEAT_FRESH_DURATION = Duration.of(5, ChronoUnit.MINUTES); private static final Duration GRACEFUL_SHUTDOWN_DURATION = Duration.of(1, ChronoUnit.MINUTES); - private static final MdcScope.Builder CONTAINER_LOG_MDC_BUILDER = new Builder() + public static final MdcScope.Builder CONTAINER_LOG_MDC_BUILDER = new Builder() .setLogPrefix("source") .setPrefixColor(Color.BLUE_BACKGROUND); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/MessageTracker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/MessageTracker.java index 86994fd785c85..09507ec7a374e 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/MessageTracker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/MessageTracker.java @@ -7,6 +7,7 @@ import io.airbyte.config.FailureReason; import io.airbyte.config.State; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteTraceMessage; import java.util.Map; import java.util.Optional; @@ -55,7 +56,7 @@ public interface MessageTracker { * @return returns a map of committed record count by stream name. If committed record counts cannot * be computed, empty. */ - Optional> getStreamToCommittedRecords(); + Optional> getStreamToCommittedRecords(); /** * Get the per-stream emitted record count. This includes messages that were emitted by the source, @@ -63,7 +64,7 @@ public interface MessageTracker { * * @return returns a map of emitted record count by stream name. */ - Map getStreamToEmittedRecords(); + Map getStreamToEmittedRecords(); /** * Get the per-stream emitted byte count. This includes messages that were emitted by the source, @@ -71,7 +72,7 @@ public interface MessageTracker { * * @return returns a map of emitted record count by stream name. */ - Map getStreamToEmittedBytes(); + Map getStreamToEmittedBytes(); /** * Get the overall emitted record count. This includes messages that were emitted by the source, but diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java index ce593c0b2d54a..ca3902cec66d1 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/AsyncOrchestratorPodProcess.java @@ -39,10 +39,10 @@ * application. Unlike {@link KubePodProcess} there is no heartbeat mechanism that requires the * launching pod and the launched pod to co-exist for the duration of execution for the launched * pod. - * + *

* Instead, this process creates the pod and interacts with a document store on cloud storage to * understand the state of the created pod. - * + *

* The document store is considered to be the truth when retrieving the status for an async pod * process. If the store isn't updated by the underlying pod, it will appear as failed. */ @@ -190,10 +190,12 @@ public boolean hasExited() { public boolean waitFor(final long timeout, final TimeUnit unit) throws InterruptedException { // implementation copied from Process.java since this isn't a real Process long remainingNanos = unit.toNanos(timeout); - if (hasExited()) + if (hasExited()) { return true; - if (timeout <= 0) + } + if (timeout <= 0) { return false; + } final long deadline = System.nanoTime() + remainingNanos; do { @@ -202,8 +204,9 @@ public boolean waitFor(final long timeout, final TimeUnit unit) throws Interrupt // We are waiting polling every 500ms for status. The trade-off here is between how often // we poll our status storage (GCS) and how reactive we are to detect that a process is done. Thread.sleep(Math.min(TimeUnit.NANOSECONDS.toMillis(remainingNanos) + 1, 500)); - if (hasExited()) + if (hasExited()) { return true; + } remainingNanos = deadline - System.nanoTime(); } while (remainingNanos > 0); @@ -236,7 +239,7 @@ private boolean checkStatus(final AsyncKubePodStatus status) { /** * Checks terminal states first, then running, then initialized. Defaults to not started. - * + *

* The order matters here! */ public AsyncKubePodStatus getDocStoreStatus() { @@ -257,7 +260,8 @@ public AsyncKubePodStatus getDocStoreStatus() { public void create(final Map allLabels, final ResourceRequirements resourceRequirements, final Map fileMap, - final Map portMap) { + final Map portMap, + final Map nodeSelectors) { final List volumes = new ArrayList<>(); final List volumeMounts = new ArrayList<>(); final List envVars = new ArrayList<>(); @@ -298,6 +302,33 @@ public void create(final Map allLabels, final List containerPorts = KubePodProcess.createContainerPortList(portMap); containerPorts.add(new ContainerPort(serverPort, null, null, null, null)); + final var initContainer = new ContainerBuilder() + .withName(KubePodProcess.INIT_CONTAINER_NAME) + .withImage("busybox:1.35") + .withVolumeMounts(volumeMounts) + .withCommand(List.of( + "sh", + "-c", + String.format(""" + i=0 + until [ $i -gt 60 ] + do + echo "$i - waiting for config file transfer to complete..." + # check if the upload-complete file exists, if so exit without error + if [ -f "%s/%s" ]; then + exit 0 + fi + i=$((i+1)) + sleep 1 + done + echo "config files did not transfer in time" + # no upload-complete file was created in time, exit with error + exit 1 + """, + KubePodProcess.CONFIG_DIR, + KubePodProcess.SUCCESS_FILE_NAME))) + .build(); + final var mainContainer = new ContainerBuilder() .withName(KubePodProcess.MAIN_CONTAINER_NAME) .withImage(kubePodInfo.mainContainerInfo().image()) @@ -316,10 +347,13 @@ public void create(final Map allLabels, .withLabels(allLabels) .endMetadata() .withNewSpec() - .withServiceAccount("airbyte-admin").withAutomountServiceAccountToken(true) + .withServiceAccount("airbyte-admin") + .withAutomountServiceAccountToken(true) .withRestartPolicy("Never") .withContainers(mainContainer) + .withInitContainers(initContainer) .withVolumes(volumes) + .withNodeSelector(nodeSelectors) .endSpec() .build(); @@ -332,9 +366,9 @@ public void create(final Map allLabels, kubernetesClient.pods() .inNamespace(kubePodInfo.namespace()) .withName(kubePodInfo.name()) - .waitUntilCondition(p -> { - return !p.getStatus().getContainerStatuses().isEmpty() && p.getStatus().getContainerStatuses().get(0).getState().getWaiting() == null; - }, 5, TimeUnit.MINUTES); + .waitUntilCondition(p -> !p.getStatus().getInitContainerStatuses().isEmpty() + && p.getStatus().getInitContainerStatuses().get(0).getState().getWaiting() == null, + 5, TimeUnit.MINUTES); final var podStatus = kubernetesClient.pods() .inNamespace(kubePodInfo.namespace()) @@ -343,7 +377,7 @@ public void create(final Map allLabels, .getStatus(); final var containerState = podStatus - .getContainerStatuses() + .getInitContainerStatuses() .get(0) .getState(); @@ -378,7 +412,7 @@ public static void copyFilesToKubeConfigVolumeMain(final Pod podDefinition, fina // several issues with copying files. See https://github.com/airbytehq/airbyte/issues/8643 for // details. final String command = String.format("kubectl cp %s %s/%s:%s -c %s", tmpFile, podDefinition.getMetadata().getNamespace(), - podDefinition.getMetadata().getName(), containerPath, "main"); + podDefinition.getMetadata().getName(), containerPath, KubePodProcess.INIT_CONTAINER_NAME); log.info(command); proc = Runtime.getRuntime().exec(command); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java index 793b0f0dc47bb..ae57868a69090 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/process/KubePodProcess.java @@ -67,7 +67,7 @@ * parent process starting a Kube Pod Process needs to exist within the Kube networking space. This * is so the parent process can forward data into the child's stdin and read the child's stdout and * stderr streams and copy configuration files over. - * + *

* This is made possible by: *

    *
  • 1) An init container that creates 3 named pipes corresponding to stdin, stdout and std err on @@ -91,7 +91,7 @@ *
* The docker image used for this pod process must expose a AIRBYTE_ENTRYPOINT which contains the * entrypoint we will wrap when creating the main container in the pod. - * + *

* See the constructor for more information. */ @@ -104,7 +104,7 @@ public class KubePodProcess extends Process implements KubePod { private static final Logger LOGGER = LoggerFactory.getLogger(KubePodProcess.class); public static final String MAIN_CONTAINER_NAME = "main"; - private static final String INIT_CONTAINER_NAME = "init"; + public static final String INIT_CONTAINER_NAME = "init"; private static final String DEFAULT_MEMORY_REQUEST = "25Mi"; private static final String DEFAULT_MEMORY_LIMIT = "50Mi"; private static final String DEFAULT_CPU_REQUEST = "0.1"; @@ -701,7 +701,7 @@ public KubePodInfo getInfo() { /** * Close all open resource in the opposite order of resource creation. - * + *

* Null checks exist because certain local Kube clusters (e.g. Docker for Desktop) back this * implementation with OS processes and resources, which are automatically reaped by the OS. */ diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java index 184b1f7ea23a8..efad287d22b5e 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/DbtLauncherWorker.java @@ -42,7 +42,8 @@ public DbtLauncherWorker(final UUID connectionId, Void.class, activityContext, serverPort, - temporalUtils); + temporalUtils, + workerConfigs); } } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/LauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/LauncherWorker.java index c433f922ce50d..839ca31d4df5e 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/LauncherWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/LauncherWorker.java @@ -21,6 +21,7 @@ import io.airbyte.persistence.job.models.JobRunConfig; import io.airbyte.workers.ContainerOrchestratorConfig; import io.airbyte.workers.Worker; +import io.airbyte.workers.WorkerConfigs; import io.airbyte.workers.WorkerConstants; import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.process.AsyncKubePodStatus; @@ -70,6 +71,7 @@ public class LauncherWorker implements Worker { private final Supplier activityContext; private final Integer serverPort; private final TemporalUtils temporalUtils; + private final WorkerConfigs workerConfigs; private final AtomicBoolean cancelled = new AtomicBoolean(false); private AsyncOrchestratorPodProcess process; @@ -84,7 +86,8 @@ public LauncherWorker(final UUID connectionId, final Class outputClass, final Supplier activityContext, final Integer serverPort, - final TemporalUtils temporalUtils) { + final TemporalUtils temporalUtils, + final WorkerConfigs workerConfigs) { this.connectionId = connectionId; this.application = application; @@ -97,6 +100,7 @@ public LauncherWorker(final UUID connectionId, this.activityContext = activityContext; this.serverPort = serverPort; this.temporalUtils = temporalUtils; + this.workerConfigs = workerConfigs; } @Trace(operationName = WORKER_OPERATION_NAME) @@ -174,7 +178,8 @@ public OUTPUT run(final INPUT input, final Path jobRoot) throws WorkerException allLabels, resourceRequirements, fileMap, - portMap); + portMap, + workerConfigs.getworkerKubeNodeSelectors()); } catch (final KubernetesClientException e) { ApmTraceUtils.addExceptionToTrace(e); throw new WorkerException( diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java index 92149919037f9..1c47a5435baf2 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/NormalizationLauncherWorker.java @@ -43,7 +43,8 @@ public NormalizationLauncherWorker(final UUID connectionId, NormalizationSummary.class, activityContext, serverPort, - temporalUtils); + temporalUtils, + workerConfigs); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/ReplicationLauncherWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/ReplicationLauncherWorker.java index 0851322dae44d..da9767b68164e 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/ReplicationLauncherWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/ReplicationLauncherWorker.java @@ -12,6 +12,7 @@ import io.airbyte.persistence.job.models.IntegrationLauncherConfig; import io.airbyte.persistence.job.models.JobRunConfig; import io.airbyte.workers.ContainerOrchestratorConfig; +import io.airbyte.workers.WorkerConfigs; import io.temporal.activity.ActivityExecutionContext; import java.util.Map; import java.util.UUID; @@ -37,7 +38,8 @@ public ReplicationLauncherWorker(final UUID connectionId, final ResourceRequirements resourceRequirements, final Supplier activityContext, final Integer serverPort, - final TemporalUtils temporalUtils) { + final TemporalUtils temporalUtils, + final WorkerConfigs workerConfigs) { super( connectionId, REPLICATION, @@ -51,7 +53,8 @@ public ReplicationLauncherWorker(final UUID connectionId, ReplicationOutput.class, activityContext, serverPort, - temporalUtils); + temporalUtils, + workerConfigs); } } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java index 3412b5573e0f4..7bc4920d407ab 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/RecordSchemaValidatorTest.java @@ -9,6 +9,7 @@ import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncInput; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.workers.exception.RecordSchemaValidationException; import io.airbyte.workers.test_utils.AirbyteMessageUtils; import io.airbyte.workers.test_utils.TestConfigHelpers; @@ -34,13 +35,14 @@ void setup() throws Exception { @Test void testValidateValidSchema() throws Exception { final RecordSchemaValidator recordSchemaValidator = new RecordSchemaValidator(WorkerUtils.mapStreamNamesToSchemas(syncInput)); - recordSchemaValidator.validateSchema(VALID_RECORD.getRecord(), STREAM_NAME); + recordSchemaValidator.validateSchema(VALID_RECORD.getRecord(), AirbyteStreamNameNamespacePair.fromRecordMessage(VALID_RECORD.getRecord())); } @Test void testValidateInvalidSchema() throws Exception { final RecordSchemaValidator recordSchemaValidator = new RecordSchemaValidator(WorkerUtils.mapStreamNamesToSchemas(syncInput)); - assertThrows(RecordSchemaValidationException.class, () -> recordSchemaValidator.validateSchema(INVALID_RECORD.getRecord(), STREAM_NAME)); + assertThrows(RecordSchemaValidationException.class, () -> recordSchemaValidator.validateSchema(INVALID_RECORD.getRecord(), + AirbyteStreamNameNamespacePair.fromRecordMessage(INVALID_RECORD.getRecord()))); } } diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerUtilsTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerUtilsTest.java index b0f51c46f9620..6f820d921eb71 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerUtilsTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/WorkerUtilsTest.java @@ -16,6 +16,7 @@ import io.airbyte.config.EnvConfigs; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSyncInput; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.workers.internal.HeartbeatMonitor; import io.airbyte.workers.test_utils.TestConfigHelpers; import java.time.Duration; @@ -128,17 +129,17 @@ void testProcessDies() { void testMapStreamNamesToSchemasWithNullNamespace() { final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(); final StandardSyncInput syncInput = syncPair.getValue(); - final Map mapOutput = WorkerUtils.mapStreamNamesToSchemas(syncInput); - assertNotNull(mapOutput.get("user_preferences")); + final Map mapOutput = WorkerUtils.mapStreamNamesToSchemas(syncInput); + assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", null))); } @Test void testMapStreamNamesToSchemasWithMultipleNamespaces() { final ImmutablePair syncPair = TestConfigHelpers.createSyncConfig(true); final StandardSyncInput syncInput = syncPair.getValue(); - final Map mapOutput = WorkerUtils.mapStreamNamesToSchemas(syncInput); - assertNotNull(mapOutput.get("namespaceuser_preferences")); - assertNotNull(mapOutput.get("namespace2user_preferences")); + final Map mapOutput = WorkerUtils.mapStreamNamesToSchemas(syncInput); + assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", "namespace"))); + assertNotNull(mapOutput.get(new AirbyteStreamNameNamespacePair("user_preferences", "namespace2"))); } /** diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java index f6a570b46fbc7..7b915ed4943ca 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/DefaultReplicationWorkerTest.java @@ -43,6 +43,7 @@ import io.airbyte.metrics.lib.MetricClientFactory; import io.airbyte.protocol.models.AirbyteLogMessage.Level; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteTraceMessage; import io.airbyte.validation.json.JsonSchemaValidator; import io.airbyte.workers.*; @@ -90,6 +91,8 @@ class DefaultReplicationWorkerTest { private static final AirbyteTraceMessage ERROR_TRACE_MESSAGE = AirbyteMessageUtils.createErrorTraceMessage("a connector error occurred", Double.valueOf(123)); private static final String STREAM1 = "stream1"; + + private static final String NAMESPACE = "namespace"; private static final String INDUCED_EXCEPTION = "induced exception"; private Path jobRoot; @@ -160,8 +163,10 @@ void test() throws Exception { verify(destination).accept(RECORD_MESSAGE2); verify(source, atLeastOnce()).close(); verify(destination).close(); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE1.getRecord(), STREAM_NAME); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE2.getRecord(), STREAM_NAME); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE1.getRecord(), + AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE1.getRecord())); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE2.getRecord(), + AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE2.getRecord())); } @Test @@ -185,9 +190,12 @@ void testInvalidSchema() throws Exception { verify(destination).accept(RECORD_MESSAGE1); verify(destination).accept(RECORD_MESSAGE2); verify(destination).accept(RECORD_MESSAGE3); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE1.getRecord(), STREAM_NAME); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE2.getRecord(), STREAM_NAME); - verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE3.getRecord(), STREAM_NAME); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE1.getRecord(), + AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE1.getRecord())); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE2.getRecord(), + AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE2.getRecord())); + verify(recordSchemaValidator).validateSchema(RECORD_MESSAGE3.getRecord(), + AirbyteStreamNameNamespacePair.fromRecordMessage(RECORD_MESSAGE3.getRecord())); verify(source).close(); verify(destination).close(); } @@ -477,8 +485,9 @@ void testPopulatesOutputOnSuccess() throws WorkerException { when(messageTracker.getTotalBytesEmitted()).thenReturn(100L); when(messageTracker.getTotalSourceStateMessagesEmitted()).thenReturn(3L); when(messageTracker.getTotalDestinationStateMessagesEmitted()).thenReturn(1L); - when(messageTracker.getStreamToEmittedBytes()).thenReturn(Collections.singletonMap(STREAM1, 100L)); - when(messageTracker.getStreamToEmittedRecords()).thenReturn(Collections.singletonMap(STREAM1, 12L)); + when(messageTracker.getStreamToEmittedBytes()).thenReturn(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 100L)); + when(messageTracker.getStreamToEmittedRecords()) + .thenReturn(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 12L)); when(messageTracker.getMaxSecondsToReceiveSourceStateMessage()).thenReturn(5L); when(messageTracker.getMeanSecondsToReceiveSourceStateMessage()).thenReturn(4L); when(messageTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted()).thenReturn(Optional.of(6L)); @@ -513,6 +522,7 @@ void testPopulatesOutputOnSuccess() throws WorkerException { .withStreamStats(Collections.singletonList( new StreamSyncStats() .withStreamName(STREAM1) + .withStreamNamespace(NAMESPACE) .withStats(new SyncStats() .withBytesEmitted(100L) .withRecordsEmitted(12L) @@ -593,9 +603,11 @@ void testPopulatesStatsOnFailureIfAvailable() throws Exception { when(messageTracker.getTotalRecordsCommitted()).thenReturn(Optional.of(6L)); when(messageTracker.getTotalSourceStateMessagesEmitted()).thenReturn(3L); when(messageTracker.getTotalDestinationStateMessagesEmitted()).thenReturn(2L); - when(messageTracker.getStreamToEmittedBytes()).thenReturn(Collections.singletonMap(STREAM1, 100L)); - when(messageTracker.getStreamToEmittedRecords()).thenReturn(Collections.singletonMap(STREAM1, 12L)); - when(messageTracker.getStreamToCommittedRecords()).thenReturn(Optional.of(Collections.singletonMap(STREAM1, 6L))); + when(messageTracker.getStreamToEmittedBytes()).thenReturn(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 100L)); + when(messageTracker.getStreamToEmittedRecords()) + .thenReturn(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 12L)); + when(messageTracker.getStreamToCommittedRecords()) + .thenReturn(Optional.of(Collections.singletonMap(new AirbyteStreamNameNamespacePair(STREAM1, NAMESPACE), 6L))); when(messageTracker.getMaxSecondsToReceiveSourceStateMessage()).thenReturn(10L); when(messageTracker.getMeanSecondsToReceiveSourceStateMessage()).thenReturn(8L); when(messageTracker.getMaxSecondsBetweenStateMessageEmittedAndCommitted()).thenReturn(Optional.of(12L)); @@ -625,6 +637,7 @@ void testPopulatesStatsOnFailureIfAvailable() throws Exception { final List expectedStreamStats = Collections.singletonList( new StreamSyncStats() .withStreamName(STREAM1) + .withStreamNamespace(NAMESPACE) .withStats(new SyncStats() .withBytesEmitted(100L) .withRecordsEmitted(12L) diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/AirbyteMessageTrackerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/AirbyteMessageTrackerTest.java index 313debe985ebc..5123b299453ce 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/AirbyteMessageTrackerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/AirbyteMessageTrackerTest.java @@ -11,6 +11,7 @@ import io.airbyte.config.FailureReason; import io.airbyte.config.State; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.workers.helper.FailureHelper; import io.airbyte.workers.internal.StateDeltaTracker.StateDeltaTrackerException; import io.airbyte.workers.internal.state_aggregator.StateAggregator; @@ -107,10 +108,10 @@ void testEmittedRecordsByStream() { messageTracker.acceptFromSource(r3); messageTracker.acceptFromSource(r3); - final Map expected = new HashMap<>(); - expected.put(STREAM_1, 1L); - expected.put(STREAM_2, 2L); - expected.put(STREAM_3, 3L); + final HashMap expected = new HashMap<>(); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r1.getRecord()), 1L); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r2.getRecord()), 2L); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r3.getRecord()), 3L); assertEquals(expected, messageTracker.getStreamToEmittedRecords()); } @@ -132,10 +133,10 @@ void testEmittedBytesByStream() { messageTracker.acceptFromSource(r3); messageTracker.acceptFromSource(r3); - final Map expected = new HashMap<>(); - expected.put(STREAM_1, r1Bytes); - expected.put(STREAM_2, r2Bytes * 2); - expected.put(STREAM_3, r3Bytes * 3); + final Map expected = new HashMap<>(); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r1.getRecord()), r1Bytes); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r2.getRecord()), r2Bytes * 2); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r3.getRecord()), r3Bytes * 3); assertEquals(expected, messageTracker.getStreamToEmittedBytes()); } @@ -160,14 +161,14 @@ void testGetCommittedRecordsByStream() { messageTracker.acceptFromSource(s2); // emit state 2 final Map countsByIndex = new HashMap<>(); - final Map expected = new HashMap<>(); + final Map expected = new HashMap<>(); Mockito.when(mStateDeltaTracker.getStreamToCommittedRecords()).thenReturn(countsByIndex); countsByIndex.put((short) 0, 1L); countsByIndex.put((short) 1, 2L); // result only contains counts up to state 1 - expected.put(STREAM_1, 1L); - expected.put(STREAM_2, 2L); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r1.getRecord()), 1L); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r2.getRecord()), 2L); assertEquals(expected, messageTracker.getStreamToCommittedRecords().get()); countsByIndex.clear(); @@ -177,9 +178,9 @@ void testGetCommittedRecordsByStream() { countsByIndex.put((short) 1, 3L); countsByIndex.put((short) 2, 1L); // result updated with counts between state 1 and state 2 - expected.put(STREAM_1, 3L); - expected.put(STREAM_2, 3L); - expected.put(STREAM_3, 1L); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r1.getRecord()), 3L); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r2.getRecord()), 3L); + expected.put(AirbyteStreamNameNamespacePair.fromRecordMessage(r3.getRecord()), 1L); assertEquals(expected, messageTracker.getStreamToCommittedRecords().get()); } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/json/Jsons.java b/airbyte-commons/src/main/java/io/airbyte/commons/json/Jsons.java index a92e2c49985cd..d461175ba1b78 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/json/Jsons.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/json/Jsons.java @@ -281,6 +281,10 @@ public static void mergeMaps(final Map originalMap, final String Entry::getValue))); } + public static Map deserializeToStringMap(JsonNode json) { + return OBJECT_MAPPER.convertValue(json, new TypeReference<>() {}); + } + /** * By the Jackson DefaultPrettyPrinter prints objects with an extra space as follows: {"name" : * "airbyte"}. We prefer {"name": "airbyte"}. diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java index d7d718dba28f8..35337c1b738f5 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/Configs.java @@ -579,6 +579,11 @@ public interface Configs { */ boolean shouldRunConnectionManagerWorkflows(); + /** + * Define if the worker should run notification workflows. Defaults to true. Internal-use only. + */ + public boolean shouldRunNotifyWorkflows(); + // Worker - Data Plane configs /** diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java index 7dd72dc36f10a..676c06c9fe58b 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/EnvConfigs.java @@ -83,6 +83,7 @@ public class EnvConfigs implements Configs { public static final String MAX_CHECK_WORKERS = "MAX_CHECK_WORKERS"; public static final String MAX_DISCOVER_WORKERS = "MAX_DISCOVER_WORKERS"; public static final String MAX_SYNC_WORKERS = "MAX_SYNC_WORKERS"; + public static final String MAX_NOTIFY_WORKERS = "MAX_NOTIFY_WORKERS"; private static final String TEMPORAL_HOST = "TEMPORAL_HOST"; private static final String TEMPORAL_WORKER_PORTS = "TEMPORAL_WORKER_PORTS"; private static final String TEMPORAL_HISTORY_RETENTION_IN_DAYS = "TEMPORAL_HISTORY_RETENTION_IN_DAYS"; @@ -135,6 +136,7 @@ public class EnvConfigs implements Configs { private static final String SHOULD_RUN_DISCOVER_WORKFLOWS = "SHOULD_RUN_DISCOVER_WORKFLOWS"; private static final String SHOULD_RUN_SYNC_WORKFLOWS = "SHOULD_RUN_SYNC_WORKFLOWS"; private static final String SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS = "SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS"; + private static final String SHOULD_RUN_NOTIFY_WORKFLOWS = "SHOULD_RUN_NOTIFY_WORKFLOWS"; // Worker - Control plane configs private static final String DEFAULT_DATA_SYNC_TASK_QUEUES = "SYNC"; // should match TemporalJobType.SYNC.name() @@ -198,6 +200,7 @@ public class EnvConfigs implements Configs { private static final long DEFAULT_MAX_CHECK_WORKERS = 5; private static final long DEFAULT_MAX_DISCOVER_WORKERS = 5; private static final long DEFAULT_MAX_SYNC_WORKERS = 5; + private static final long DEFAULT_MAX_NOTIFY_WORKERS = 5; private static final String DEFAULT_NETWORK = "host"; public static final Map> JOB_SHARED_ENVS = Map.of( @@ -918,7 +921,8 @@ public MaxWorkersConfig getMaxWorkers() { Math.toIntExact(getEnvOrDefault(MAX_SPEC_WORKERS, DEFAULT_MAX_SPEC_WORKERS)), Math.toIntExact(getEnvOrDefault(MAX_CHECK_WORKERS, DEFAULT_MAX_CHECK_WORKERS)), Math.toIntExact(getEnvOrDefault(MAX_DISCOVER_WORKERS, DEFAULT_MAX_DISCOVER_WORKERS)), - Math.toIntExact(getEnvOrDefault(MAX_SYNC_WORKERS, DEFAULT_MAX_SYNC_WORKERS))); + Math.toIntExact(getEnvOrDefault(MAX_SYNC_WORKERS, DEFAULT_MAX_SYNC_WORKERS)), + Math.toIntExact(getEnvOrDefault(MAX_NOTIFY_WORKERS, DEFAULT_MAX_NOTIFY_WORKERS))); } @Override @@ -946,6 +950,11 @@ public boolean shouldRunConnectionManagerWorkflows() { return getEnvOrDefault(SHOULD_RUN_CONNECTION_MANAGER_WORKFLOWS, true); } + @Override + public boolean shouldRunNotifyWorkflows() { + return getEnvOrDefault(SHOULD_RUN_NOTIFY_WORKFLOWS, false); + } + // Worker - Data plane @Override diff --git a/airbyte-config/config-models/src/main/java/io/airbyte/config/MaxWorkersConfig.java b/airbyte-config/config-models/src/main/java/io/airbyte/config/MaxWorkersConfig.java index 1cd3aff8145d5..7eb08a41b17d2 100644 --- a/airbyte-config/config-models/src/main/java/io/airbyte/config/MaxWorkersConfig.java +++ b/airbyte-config/config-models/src/main/java/io/airbyte/config/MaxWorkersConfig.java @@ -10,12 +10,18 @@ public class MaxWorkersConfig { private final int maxCheckWorkers; private final int maxDiscoverWorkers; private final int maxSyncWorkers; + private final int maxNotifyWorkers; - public MaxWorkersConfig(final int maxSpecWorkers, final int maxCheckWorkers, final int maxDiscoverWorkers, final int maxSyncWorkers) { + public MaxWorkersConfig(final int maxSpecWorkers, + final int maxCheckWorkers, + final int maxDiscoverWorkers, + final int maxSyncWorkers, + final int maxNotifyWorkers) { this.maxSpecWorkers = maxSpecWorkers; this.maxCheckWorkers = maxCheckWorkers; this.maxDiscoverWorkers = maxDiscoverWorkers; this.maxSyncWorkers = maxSyncWorkers; + this.maxNotifyWorkers = maxNotifyWorkers; } public int getMaxSpecWorkers() { @@ -34,6 +40,10 @@ public int getMaxSyncWorkers() { return maxSyncWorkers; } + public int getMaxNotifyWorkers() { + return maxNotifyWorkers; + } + @Override public String toString() { return "MaxWorkersConfig{" + @@ -41,6 +51,7 @@ public String toString() { ", maxCheckWorkers=" + maxCheckWorkers + ", maxDiscoverWorkers=" + maxDiscoverWorkers + ", maxSyncWorkers=" + maxSyncWorkers + + ", maxNotifyWorkers=" + maxNotifyWorkers + '}'; } diff --git a/airbyte-config/config-models/src/main/resources/types/StreamSyncStats.yaml b/airbyte-config/config-models/src/main/resources/types/StreamSyncStats.yaml index c20003f72c5dc..5ce73ce21d1d4 100644 --- a/airbyte-config/config-models/src/main/resources/types/StreamSyncStats.yaml +++ b/airbyte-config/config-models/src/main/resources/types/StreamSyncStats.yaml @@ -11,5 +11,8 @@ additionalProperties: false properties: streamName: type: string + # Not required as not all sources emits a namespace for each Stream. + streamNamespace: + type: string stats: "$ref": SyncStats.yaml diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionMigrator.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionMigrator.java index 7e53abb011629..51b37e9d77950 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionMigrator.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ActorDefinitionMigrator.java @@ -4,7 +4,6 @@ package io.airbyte.config.persistence; -import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; import static org.jooq.impl.DSL.asterisk; @@ -32,11 +31,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.UUID; import java.util.stream.Collectors; -import java.util.stream.Stream; import org.jooq.DSLContext; -import org.jooq.Record1; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,7 +71,7 @@ void updateConfigsFromSeed(final DSLContext ctx, throws IOException { LOGGER.info("Updating connector definitions from the seed if necessary..."); - final Set connectorRepositoriesInUse = getConnectorRepositoriesInUse(ctx); + final Set connectorRepositoriesInUse = ConfigWriter.getConnectorRepositoriesInUse(ctx); LOGGER.info("Connectors in use: {}", connectorRepositoriesInUse); final Map connectorRepositoryToInfoMap = getConnectorRepositoryToInfoMap(ctx); @@ -97,29 +93,6 @@ void updateConfigsFromSeed(final DSLContext ctx, LOGGER.info("Connector definitions have been updated ({} new connectors, and {} updates)", newConnectorCount, updatedConnectorCount); } - /** - * @return A set of connectors (both source and destination) that are already used in standard - * syncs. We identify connectors by its repository name instead of definition id because - * connectors can be added manually by users, and their config ids are not always the same - * as those in the seed. - */ - private Set getConnectorRepositoriesInUse(final DSLContext ctx) { - final Set usedConnectorDefinitionIds = ctx - .select(ACTOR.ACTOR_DEFINITION_ID) - .from(ACTOR) - .fetch() - .stream() - .flatMap(row -> Stream.of(row.value1())) - .collect(Collectors.toSet()); - - return ctx.select(ACTOR_DEFINITION.DOCKER_REPOSITORY) - .from(ACTOR_DEFINITION) - .where(ACTOR_DEFINITION.ID.in(usedConnectorDefinitionIds)) - .fetch().stream() - .map(Record1::value1) - .collect(Collectors.toSet()); - } - /** * @return A map about current connectors (both source and destination). It maps from connector * repository to its definition id and docker image tag. We identify a connector by its diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index e800f07793043..e8f20a29a4329 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -27,6 +27,8 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.MoreBooleans; import io.airbyte.commons.version.AirbyteProtocolVersion; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; +import io.airbyte.commons.version.Version; import io.airbyte.config.ActorCatalog; import io.airbyte.config.ActorCatalogFetchEvent; import io.airbyte.config.ConfigSchema; @@ -249,6 +251,10 @@ public List listStandardSourceDefinitions(final boolea return sourceDefinitions; } + public Map> getActorDefinitionToProtocolVersionMap() throws IOException { + return database.query(ConfigWriter::getActorDefinitionsInUseToProtocolVersion); + } + public List listPublicSourceDefinitions(final boolean includeTombstone) throws IOException { return listStandardActorDefinitions( ActorType.source, @@ -728,6 +734,23 @@ public void writeStandardSync(final StandardSync standardSync) throws JsonValida standardSyncPersistence.writeStandardSync(standardSync); } + /** + * For the StandardSyncs related to actorDefinitionId, clear the unsupported protocol version flag + * if both connectors are now within support range. + * + * @param actorDefinitionId the actorDefinitionId to query + * @param actorType the ActorType of actorDefinitionId + * @param supportedRange the supported range of protocol versions + */ + // We have conflicting imports here, ActorType is imported from jooq for most internal uses. Since + // this is a public method, we should be using the ActorType from airbyte-config. + public void clearUnsupportedProtocolVersionFlag(final UUID actorDefinitionId, + final io.airbyte.config.ActorType actorType, + final AirbyteProtocolVersionRange supportedRange) + throws IOException { + standardSyncPersistence.clearUnsupportedProtocolVersionFlag(actorDefinitionId, actorType, supportedRange); + } + public List listStandardSyncs() throws IOException, JsonValidationException { return standardSyncPersistence.listStandardSync(); } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigWriter.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigWriter.java index efcca3a997a22..fc20758a6cadb 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigWriter.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigWriter.java @@ -4,8 +4,13 @@ package io.airbyte.config.persistence; +import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; +import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; + import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.version.AirbyteProtocolVersion; +import io.airbyte.commons.version.Version; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.db.instance.configs.jooq.generated.Tables; @@ -15,8 +20,15 @@ import java.time.LocalDate; import java.time.OffsetDateTime; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.jooq.DSLContext; import org.jooq.JSONB; +import org.jooq.Record4; import org.jooq.impl.DSL; /** @@ -29,6 +41,37 @@ @SuppressWarnings("PMD.CognitiveComplexity") public class ConfigWriter { + /** + * @return A set of connectors (both source and destination) that are already used in standard + * syncs. We identify connectors by its repository name instead of definition id because + * connectors can be added manually by users, and their config ids are not always the same + * as those in the seed. + */ + static Set getConnectorRepositoriesInUse(final DSLContext ctx) { + return getActorDefinitionsInUse(ctx) + .map(r -> r.get(ACTOR_DEFINITION.DOCKER_REPOSITORY)) + .collect(Collectors.toSet()); + } + + /** + * Get a map of connector to protocol version for all the connectors that are used in a standard + * syncs. + */ + static Map> getActorDefinitionsInUseToProtocolVersion(final DSLContext ctx) { + return getActorDefinitionsInUse(ctx) + .collect(Collectors.toMap(r -> r.get(ACTOR_DEFINITION.ID), + r -> Map.entry( + r.get(ACTOR_DEFINITION.ACTOR_TYPE) == ActorType.source ? io.airbyte.config.ActorType.SOURCE : io.airbyte.config.ActorType.DESTINATION, + AirbyteProtocolVersion.getWithDefault(r.get(ACTOR_DEFINITION.PROTOCOL_VERSION))))); + } + + private static Stream> getActorDefinitionsInUse(final DSLContext ctx) { + return ctx.select(ACTOR_DEFINITION.ID, ACTOR_DEFINITION.DOCKER_REPOSITORY, ACTOR_DEFINITION.ACTOR_TYPE, ACTOR_DEFINITION.PROTOCOL_VERSION) + .from(ACTOR_DEFINITION) + .join(ACTOR).on(ACTOR.ACTOR_DEFINITION_ID.equal(ACTOR_DEFINITION.ID)) + .fetchStream(); + } + static void writeStandardSourceDefinition(final List configs, final DSLContext ctx) { final OffsetDateTime timestamp = OffsetDateTime.now(); configs.forEach((standardSourceDefinition) -> { diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StandardSyncPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StandardSyncPersistence.java index 56a74d6ec30dc..5a3572a1c3cbf 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StandardSyncPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StandardSyncPersistence.java @@ -4,6 +4,8 @@ package io.airbyte.config.persistence; +import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR; +import static io.airbyte.db.instance.configs.jooq.generated.Tables.ACTOR_DEFINITION; import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION_OPERATION; import static io.airbyte.db.instance.configs.jooq.generated.Tables.STATE; @@ -12,18 +14,25 @@ import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.version.AirbyteProtocolVersion; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; +import io.airbyte.commons.version.Version; +import io.airbyte.config.ActorType; import io.airbyte.config.ConfigSchema; import io.airbyte.config.ConfigWithMetadata; import io.airbyte.config.StandardSync; import io.airbyte.config.helpers.ScheduleHelpers; import io.airbyte.db.Database; import io.airbyte.db.ExceptionWrappingDatabase; +import io.airbyte.db.instance.configs.jooq.generated.tables.Actor; +import io.airbyte.db.instance.configs.jooq.generated.tables.ActorDefinition; import java.io.IOException; import java.time.OffsetDateTime; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.UUID; +import java.util.stream.Stream; import org.jooq.DSLContext; import org.jooq.JSONB; import org.jooq.Record; @@ -32,6 +41,13 @@ public class StandardSyncPersistence { + private record StandardSyncIdsWithProtocolVersions( + UUID standardSyncId, + UUID sourceDefId, + Version sourceProtocolVersion, + UUID destinationDefId, + Version destinationProtocolVersion) {} + private final ExceptionWrappingDatabase database; public StandardSyncPersistence(final Database database) { @@ -78,6 +94,29 @@ public void deleteStandardSync(final UUID standardSyncId) throws IOException { }); } + /** + * For the StandardSyncs related to actorDefinitionId, clear the unsupported protocol version flag + * if both connectors are now within support range. + * + * @param actorDefinitionId the actorDefinitionId to query + * @param actorType the ActorType of actorDefinitionId + * @param supportedRange the supported range of protocol versions + */ + public void clearUnsupportedProtocolVersionFlag(final UUID actorDefinitionId, + final ActorType actorType, + final AirbyteProtocolVersionRange supportedRange) + throws IOException { + final Stream candidateSyncs = database.query(ctx -> findDisabledSyncs(ctx, actorDefinitionId, actorType)); + final List standardSyncsToReEnable = candidateSyncs + .filter(sync -> supportedRange.isSupported(sync.sourceProtocolVersion()) && supportedRange.isSupported(sync.destinationProtocolVersion())) + .map(StandardSyncIdsWithProtocolVersions::standardSyncId) + .toList(); + database.query(ctx -> { + clearProtocolVersionFlag(ctx, standardSyncsToReEnable); + return null; + }); + } + private List> listStandardSyncWithMetadata(final Optional configId) throws IOException { final Result result = database.query(ctx -> { final SelectJoinStep query = ctx.select(asterisk()).from(CONNECTION); @@ -214,4 +253,41 @@ private void writeStandardSync(final StandardSync standardSync, final DSLContext } } + private Stream findDisabledSyncs(final DSLContext ctx, final UUID actorDefId, final ActorType actorType) { + // Table aliasing to help have a readable join + final Actor source = ACTOR.as("source"); + final Actor destination = ACTOR.as("destination"); + final ActorDefinition sourceDef = ACTOR_DEFINITION.as("sourceDef"); + final ActorDefinition destDef = ACTOR_DEFINITION.as("destDef"); + + // Retrieve all the connections currently disabled due to a bad protocol version + // where the actor definition is matching the one provided to this function + final Stream results = ctx + .select(CONNECTION.ID, sourceDef.ID, sourceDef.PROTOCOL_VERSION, destDef.ID, destDef.PROTOCOL_VERSION) + .from(CONNECTION) + .join(source).on(CONNECTION.SOURCE_ID.eq(source.ID)) + .join(sourceDef).on(source.ACTOR_DEFINITION_ID.eq(sourceDef.ID)) + .join(destination).on(CONNECTION.DESTINATION_ID.eq(destination.ID)) + .join(destDef).on(destination.ACTOR_DEFINITION_ID.eq(destDef.ID)) + .where( + CONNECTION.UNSUPPORTED_PROTOCOL_VERSION.eq(true).and( + (actorType == ActorType.DESTINATION ? destDef : sourceDef).ID.eq(actorDefId))) + .fetchStream() + .map(r -> new StandardSyncIdsWithProtocolVersions( + r.get(CONNECTION.ID), + r.get(sourceDef.ID), + AirbyteProtocolVersion.getWithDefault(r.get(sourceDef.PROTOCOL_VERSION)), + r.get(destDef.ID), + AirbyteProtocolVersion.getWithDefault(r.get(destDef.PROTOCOL_VERSION)))); + return results; + } + + private void clearProtocolVersionFlag(final DSLContext ctx, final List standardSyncIds) { + ctx.update(CONNECTION) + .set(CONNECTION.UNSUPPORTED_PROTOCOL_VERSION, false) + .set(CONNECTION.UPDATED_AT, OffsetDateTime.now()) + .where(CONNECTION.ID.in(standardSyncIds)) + .execute(); + } + } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceE2ETest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceE2ETest.java new file mode 100644 index 0000000000000..1aecba632e0f2 --- /dev/null +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceE2ETest.java @@ -0,0 +1,313 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.config.persistence; + +import static io.airbyte.db.instance.configs.jooq.generated.Tables.CONNECTION; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; +import io.airbyte.commons.version.Version; +import io.airbyte.config.ActorDefinitionResourceRequirements; +import io.airbyte.config.ActorType; +import io.airbyte.config.DestinationConnection; +import io.airbyte.config.Geography; +import io.airbyte.config.JobSyncConfig.NamespaceDefinitionType; +import io.airbyte.config.ResourceRequirements; +import io.airbyte.config.SourceConnection; +import io.airbyte.config.StandardDestinationDefinition; +import io.airbyte.config.StandardSourceDefinition; +import io.airbyte.config.StandardSourceDefinition.SourceType; +import io.airbyte.config.StandardSync; +import io.airbyte.config.StandardSync.Status; +import io.airbyte.config.StandardWorkspace; +import io.airbyte.db.factory.DSLContextFactory; +import io.airbyte.db.factory.DataSourceFactory; +import io.airbyte.db.factory.FlywayFactory; +import io.airbyte.db.instance.configs.ConfigsDatabaseMigrator; +import io.airbyte.db.instance.configs.ConfigsDatabaseTestProvider; +import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.test.utils.DatabaseConnectionHelper; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; +import java.sql.SQLException; +import java.time.OffsetDateTime; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; +import org.apache.commons.lang3.tuple.ImmutablePair; +import org.apache.commons.lang3.tuple.Pair; +import org.jooq.SQLDialect; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class StandardSyncPersistenceE2ETest extends BaseDatabaseConfigPersistenceTest { + + record StandardSyncProtocolVersionFlag(UUID standardSyncId, boolean unsupportedProtocolVersion) {} + + private ConfigRepository configRepository; + + UUID workspaceId; + StandardWorkspace workspace; + StandardSourceDefinition sourceDef1; + StandardSourceDefinition sourceDef2; + SourceConnection source1; + SourceConnection source2; + StandardDestinationDefinition destDef1; + StandardDestinationDefinition destDef2; + DestinationConnection destination1; + DestinationConnection destination2; + + final AirbyteProtocolVersionRange protocolRange_0_0 = new AirbyteProtocolVersionRange(new Version("0.0.0"), new Version("0.1.0")); + final AirbyteProtocolVersionRange protocolRange_0_1 = new AirbyteProtocolVersionRange(new Version("0.0.1"), new Version("1.0.0")); + final AirbyteProtocolVersionRange protocolRange_1_1 = new AirbyteProtocolVersionRange(new Version("1.0.0"), new Version("1.10.0")); + + @BeforeEach + void beforeEach() throws Exception { + dataSource = DatabaseConnectionHelper.createDataSource(container); + dslContext = DSLContextFactory.create(dataSource, SQLDialect.POSTGRES); + flyway = FlywayFactory.create(dataSource, StandardSyncPersistenceE2ETest.class.getName(), ConfigsDatabaseMigrator.DB_IDENTIFIER, + ConfigsDatabaseMigrator.MIGRATION_FILE_LOCATION); + database = new ConfigsDatabaseTestProvider(dslContext, flyway).create(true); + truncateAllTables(); + + standardSyncPersistence = new StandardSyncPersistence(database); + configRepository = new ConfigRepository(database); + } + + @AfterEach + void afterEach() throws Exception { + dslContext.close(); + DataSourceFactory.close(dataSource); + } + + @Test + void testClearUnsupportedProtocolVersionFlagFromSource() throws IOException, JsonValidationException, SQLException { + createBaseObjects(); + + final StandardSync sync1 = createStandardSync(source1, destination1); + final StandardSync sync2 = createStandardSync(source1, destination2); + final List syncs = List.of(sync1, sync2); + + setProtocolVersionFlagForSyncs(List.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true))); + + // Only sync1 should be flipped since sync2 has dest2 with protocol v1 + standardSyncPersistence.clearUnsupportedProtocolVersionFlag(sourceDef1.getSourceDefinitionId(), ActorType.SOURCE, protocolRange_0_0); + assertEquals(Set.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), false), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true)), getProtocolVersionFlagForSyncs(syncs)); + + standardSyncPersistence.clearUnsupportedProtocolVersionFlag(sourceDef1.getSourceDefinitionId(), ActorType.SOURCE, protocolRange_0_1); + assertEquals(Set.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), false), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), false)), getProtocolVersionFlagForSyncs(syncs)); + + // Making sure we updated the updated_at timestamp + final Optional> datetimes = database.query(ctx -> ctx + .select(CONNECTION.CREATED_AT, CONNECTION.UPDATED_AT).from(CONNECTION).where(CONNECTION.ID.eq(sync2.getConnectionId())) + .stream().findFirst() + .map(r -> new ImmutablePair<>(r.get(CONNECTION.CREATED_AT), r.get(CONNECTION.UPDATED_AT)))); + assertTrue(datetimes.isPresent()); + assertNotEquals(datetimes.get().getLeft(), datetimes.get().getRight()); + } + + @Test + void testClearUnsupportedProtocolVersionFlagFromSourceMultiFlipAtOnce() throws IOException, JsonValidationException, SQLException { + createBaseObjects(); + + final StandardSync sync1 = createStandardSync(source1, destination1); + final StandardSync sync2 = createStandardSync(source1, destination2); + final List syncs = List.of(sync1, sync2); + + setProtocolVersionFlagForSyncs(List.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true))); + + // Making sure we flip all the connections if more than one is impacted + standardSyncPersistence.clearUnsupportedProtocolVersionFlag(sourceDef1.getSourceDefinitionId(), ActorType.SOURCE, protocolRange_0_1); + assertEquals(Set.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), false), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), false)), getProtocolVersionFlagForSyncs(syncs)); + } + + @Test + void testClearUnsupportedProtocolVersionFlagFromDest() throws IOException, JsonValidationException, SQLException { + createBaseObjects(); + + final StandardSync sync1 = createStandardSync(source1, destination2); + final StandardSync sync2 = createStandardSync(source2, destination2); + final List syncs = List.of(sync1, sync2); + + setProtocolVersionFlagForSyncs(List.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true))); + + // destDef1 is not tied to anything, there should be no change + standardSyncPersistence.clearUnsupportedProtocolVersionFlag(destDef1.getDestinationDefinitionId(), ActorType.DESTINATION, protocolRange_0_1); + assertEquals(Set.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), true)), getProtocolVersionFlagForSyncs(syncs)); + + // Only sync1 should be flipped since sync2 has source1 with protocol v0 + standardSyncPersistence.clearUnsupportedProtocolVersionFlag(destDef2.getDestinationDefinitionId(), ActorType.DESTINATION, protocolRange_1_1); + assertEquals(Set.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), true), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), false)), getProtocolVersionFlagForSyncs(syncs)); + + standardSyncPersistence.clearUnsupportedProtocolVersionFlag(destDef2.getDestinationDefinitionId(), ActorType.DESTINATION, protocolRange_0_1); + assertEquals(Set.of( + new StandardSyncProtocolVersionFlag(sync1.getConnectionId(), false), + new StandardSyncProtocolVersionFlag(sync2.getConnectionId(), false)), getProtocolVersionFlagForSyncs(syncs)); + } + + Set getProtocolVersionFlagForSyncs(final List standardSync) throws SQLException { + return database.query(ctx -> ctx + .select(CONNECTION.ID, CONNECTION.UNSUPPORTED_PROTOCOL_VERSION) + .from(CONNECTION) + .where(CONNECTION.ID.in(standardSync.stream().map(StandardSync::getConnectionId).toList())) + .fetchStream()) + .map(r -> new StandardSyncProtocolVersionFlag(r.get(CONNECTION.ID), r.get(CONNECTION.UNSUPPORTED_PROTOCOL_VERSION))) + .collect(Collectors.toSet()); + } + + void setProtocolVersionFlagForSyncs(final List updates) throws SQLException { + final List setToTrue = + updates.stream().filter(s -> s.unsupportedProtocolVersion).map(StandardSyncProtocolVersionFlag::standardSyncId).toList(); + final List setToFalse = + updates.stream().filter(s -> !s.unsupportedProtocolVersion).map(StandardSyncProtocolVersionFlag::standardSyncId).toList(); + database.query(ctx -> { + if (!setToTrue.isEmpty()) { + ctx.update(CONNECTION) + .set(CONNECTION.UNSUPPORTED_PROTOCOL_VERSION, true) + .where(CONNECTION.ID.in(setToTrue)) + .execute(); + } + if (!setToFalse.isEmpty()) { + ctx.update(CONNECTION) + .set(CONNECTION.UNSUPPORTED_PROTOCOL_VERSION, false) + .where(CONNECTION.ID.in(setToFalse)) + .execute(); + } + return null; + }); + } + + private void createBaseObjects() throws IOException, JsonValidationException { + workspaceId = UUID.randomUUID(); + workspace = new StandardWorkspace() + .withWorkspaceId(workspaceId) + .withName("Another Workspace") + .withSlug("another-workspace") + .withInitialSetupComplete(true) + .withTombstone(false) + .withDefaultGeography(Geography.AUTO); + configRepository.writeStandardWorkspaceNoSecrets(workspace); + + sourceDef1 = createStandardSourceDefinition("0.2.2"); + source1 = createSourceConnection(workspaceId, sourceDef1); + + sourceDef2 = createStandardSourceDefinition("1.1.0"); + source2 = createSourceConnection(workspaceId, sourceDef2); + + destDef1 = createStandardDestDefinition("0.2.3"); + destination1 = createDestinationConnection(workspaceId, destDef1); + + destDef2 = createStandardDestDefinition("1.0.0"); + destination2 = createDestinationConnection(workspaceId, destDef2); + } + + private StandardSourceDefinition createStandardSourceDefinition(final String protocolVersion) throws JsonValidationException, IOException { + final UUID sourceDefId = UUID.randomUUID(); + final StandardSourceDefinition sourceDef = new StandardSourceDefinition() + .withSourceDefinitionId(sourceDefId) + .withSourceType(SourceType.API) + .withName("random-source-" + sourceDefId) + .withDockerImageTag("tag-1") + .withDockerRepository("repository-1") + .withDocumentationUrl("documentation-url-1") + .withIcon("icon-1") + .withSpec(new ConnectorSpecification()) + .withProtocolVersion(protocolVersion) + .withTombstone(false) + .withPublic(true) + .withCustom(false) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(new ResourceRequirements().withCpuRequest("2"))); + configRepository.writeStandardSourceDefinition(sourceDef); + return sourceDef; + } + + private StandardDestinationDefinition createStandardDestDefinition(final String protocolVersion) throws JsonValidationException, IOException { + final UUID destDefId = UUID.randomUUID(); + final StandardDestinationDefinition destDef = new StandardDestinationDefinition() + .withDestinationDefinitionId(destDefId) + .withName("random-destination-" + destDefId) + .withDockerImageTag("tag-3") + .withDockerRepository("repository-3") + .withDocumentationUrl("documentation-url-3") + .withIcon("icon-3") + .withSpec(new ConnectorSpecification()) + .withProtocolVersion(protocolVersion) + .withTombstone(false) + .withPublic(true) + .withCustom(false) + .withResourceRequirements(new ActorDefinitionResourceRequirements().withDefault(new ResourceRequirements().withCpuRequest("2"))); + configRepository.writeStandardDestinationDefinition(destDef); + return destDef; + } + + private SourceConnection createSourceConnection(final UUID workspaceId, final StandardSourceDefinition sourceDef) + throws JsonValidationException, IOException { + final UUID sourceId = UUID.randomUUID(); + final SourceConnection source = new SourceConnection() + .withName("source-" + sourceId) + .withTombstone(false) + .withConfiguration(Jsons.deserialize("{}")) + .withSourceDefinitionId(sourceDef.getSourceDefinitionId()) + .withWorkspaceId(workspaceId) + .withSourceId(sourceId); + configRepository.writeSourceConnectionNoSecrets(source); + return source; + } + + private DestinationConnection createDestinationConnection(final UUID workspaceId, final StandardDestinationDefinition destDef) + throws JsonValidationException, IOException { + final UUID destinationId = UUID.randomUUID(); + final DestinationConnection dest = new DestinationConnection() + .withName("source-" + destinationId) + .withTombstone(false) + .withConfiguration(Jsons.deserialize("{}")) + .withDestinationDefinitionId(destDef.getDestinationDefinitionId()) + .withWorkspaceId(workspaceId) + .withDestinationId(destinationId); + configRepository.writeDestinationConnectionNoSecrets(dest); + return dest; + } + + private StandardSync createStandardSync(final SourceConnection source, final DestinationConnection dest) throws IOException { + final UUID connectionId = UUID.randomUUID(); + final StandardSync sync = new StandardSync() + .withConnectionId(connectionId) + .withSourceId(source.getSourceId()) + .withDestinationId(dest.getDestinationId()) + .withName("standard-sync-" + connectionId) + .withManual(true) + .withNamespaceDefinition(NamespaceDefinitionType.CUSTOMFORMAT) + .withNamespaceFormat("") + .withPrefix("") + .withStatus(Status.ACTIVE) + .withGeography(Geography.AUTO) + .withBreakingChange(false); + standardSyncPersistence.writeStandardSync(sync); + return sync; + } + +} diff --git a/airbyte-config/init/bin/main/icons/rss.svg b/airbyte-config/init/bin/main/icons/rss.svg new file mode 100644 index 0000000000000..554d682248507 --- /dev/null +++ b/airbyte-config/init/bin/main/icons/rss.svg @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/airbyte-config/init/build.gradle b/airbyte-config/init/build.gradle index 1a6ceaee75b55..116970c2ad5eb 100644 --- a/airbyte-config/init/build.gradle +++ b/airbyte-config/init/build.gradle @@ -7,6 +7,7 @@ dependencies { implementation project(':airbyte-config:config-models') implementation project(':airbyte-config:config-persistence') + implementation project(':airbyte-persistence:job-persistence') implementation project(':airbyte-protocol:protocol-models') implementation project(':airbyte-commons-docker') implementation project(':airbyte-json-validation') diff --git a/airbyte-config/init/src/main/java/io/airbyte/config/init/ApplyDefinitionsHelper.java b/airbyte-config/init/src/main/java/io/airbyte/config/init/ApplyDefinitionsHelper.java index 8bd251ebd0dd8..896b8d3ced0fc 100644 --- a/airbyte-config/init/src/main/java/io/airbyte/config/init/ApplyDefinitionsHelper.java +++ b/airbyte-config/init/src/main/java/io/airbyte/config/init/ApplyDefinitionsHelper.java @@ -4,25 +4,43 @@ package io.airbyte.config.init; +import io.airbyte.commons.version.AirbyteProtocolVersion; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.persistence.job.JobPersistence; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.List; +import java.util.Optional; +import lombok.extern.slf4j.Slf4j; /** * Helper class used to apply actor definitions from a DefinitionsProvider to the database. This is * here to enable easy reuse of definition application logic in bootloader and cron. */ +@Slf4j public class ApplyDefinitionsHelper { private final ConfigRepository configRepository; private final DefinitionsProvider definitionsProvider; + private final JobPersistence jobPersistence; + // Remove once cloud has been migrated + @Deprecated(forRemoval = true) public ApplyDefinitionsHelper(final ConfigRepository configRepository, final DefinitionsProvider definitionsProvider) { this.configRepository = configRepository; this.definitionsProvider = definitionsProvider; + this.jobPersistence = null; + } + + public ApplyDefinitionsHelper(final ConfigRepository configRepository, + final DefinitionsProvider definitionsProvider, + final JobPersistence jobPersistence) { + this.configRepository = configRepository; + this.definitionsProvider = definitionsProvider; + this.jobPersistence = jobPersistence; } public void apply() throws JsonValidationException, IOException { @@ -35,23 +53,70 @@ public void apply() throws JsonValidationException, IOException { * @param updateAll - Whether we should overwrite all stored definitions */ public void apply(final boolean updateAll) throws JsonValidationException, IOException { + final Optional currentProtocolRange = getCurrentProtocolRange(); + if (updateAll) { final List latestSourceDefinitions = definitionsProvider.getSourceDefinitions(); - for (final StandardSourceDefinition def : latestSourceDefinitions) { + for (final StandardSourceDefinition def : filterStandardSourceDefinitions(currentProtocolRange, latestSourceDefinitions)) { configRepository.writeStandardSourceDefinition(def); } final List latestDestinationDefinitions = definitionsProvider.getDestinationDefinitions(); - for (final StandardDestinationDefinition def : latestDestinationDefinitions) { + for (final StandardDestinationDefinition def : filterStandardDestinationDefinitions(currentProtocolRange, latestDestinationDefinitions)) { configRepository.writeStandardDestinationDefinition(def); } } else { // todo (pedroslopez): Logic to apply definitions should be moved outside of the // DatabaseConfigPersistence class and behavior standardized configRepository.seedActorDefinitions( - definitionsProvider.getSourceDefinitions(), - definitionsProvider.getDestinationDefinitions()); + filterStandardSourceDefinitions(currentProtocolRange, definitionsProvider.getSourceDefinitions()), + filterStandardDestinationDefinitions(currentProtocolRange, definitionsProvider.getDestinationDefinitions())); } } + private List filterStandardDestinationDefinitions(final Optional protocolVersionRange, + final List destDefs) { + if (protocolVersionRange.isEmpty()) { + return destDefs; + } + + return destDefs.stream().filter(def -> { + final boolean isSupported = isProtocolVersionSupported(protocolVersionRange.get(), def.getSpec().getProtocolVersion()); + if (!isSupported) { + log.warn("Destination {} {} has an incompatible protocol version ({})... ignoring.", + def.getDestinationDefinitionId(), def.getName(), def.getSpec().getProtocolVersion()); + } + return isSupported; + }).toList(); + } + + private List filterStandardSourceDefinitions(final Optional protocolVersionRange, + final List sourceDefs) { + if (protocolVersionRange.isEmpty()) { + return sourceDefs; + } + + return sourceDefs.stream().filter(def -> { + final boolean isSupported = isProtocolVersionSupported(protocolVersionRange.get(), def.getSpec().getProtocolVersion()); + if (!isSupported) { + log.warn("Source {} {} has an incompatible protocol version ({})... ignoring.", + def.getSourceDefinitionId(), def.getName(), def.getSpec().getProtocolVersion()); + } + return isSupported; + }).toList(); + } + + private boolean isProtocolVersionSupported(final AirbyteProtocolVersionRange protocolVersionRange, final String protocolVersion) { + return protocolVersionRange.isSupported(AirbyteProtocolVersion.getWithDefault(protocolVersion)); + } + + private Optional getCurrentProtocolRange() throws IOException { + if (jobPersistence == null) { + // TODO Remove this once cloud has been migrated and job persistence is always defined + return Optional.empty(); + } + + return jobPersistence.getCurrentProtocolVersionRange(); + } + } diff --git a/airbyte-config/init/src/main/resources/icons/airtable.svg b/airbyte-config/init/src/main/resources/icons/airtable.svg index 93ce819b70ca9..cb591a5b4668d 100644 --- a/airbyte-config/init/src/main/resources/icons/airtable.svg +++ b/airbyte-config/init/src/main/resources/icons/airtable.svg @@ -1 +1,52 @@ - \ No newline at end of file + + + + + + + + + diff --git a/airbyte-config/init/src/main/resources/icons/alloydb.svg b/airbyte-config/init/src/main/resources/icons/alloydb.svg index 12034d4b89fad..d1d79c25a2476 100644 --- a/airbyte-config/init/src/main/resources/icons/alloydb.svg +++ b/airbyte-config/init/src/main/resources/icons/alloydb.svg @@ -1 +1,93 @@ - \ No newline at end of file + + + + + + + + + + + + + + + + diff --git a/airbyte-config/init/src/main/resources/icons/amplitude.svg b/airbyte-config/init/src/main/resources/icons/amplitude.svg index f08f394e2ca31..084f9dda9dcda 100644 --- a/airbyte-config/init/src/main/resources/icons/amplitude.svg +++ b/airbyte-config/init/src/main/resources/icons/amplitude.svg @@ -1,8 +1,50 @@ - - diff --git a/airbyte-config/init/src/main/resources/icons/apify.svg b/airbyte-config/init/src/main/resources/icons/apify.svg index 2c416e02d92cc..7f9225c5db7a2 100644 --- a/airbyte-config/init/src/main/resources/icons/apify.svg +++ b/airbyte-config/init/src/main/resources/icons/apify.svg @@ -1 +1,54 @@ - \ No newline at end of file + + + + + + + + diff --git a/airbyte-config/init/src/main/resources/icons/appfollow.svg b/airbyte-config/init/src/main/resources/icons/appfollow.svg index a182781eec30e..5a6b8500411e0 100644 --- a/airbyte-config/init/src/main/resources/icons/appfollow.svg +++ b/airbyte-config/init/src/main/resources/icons/appfollow.svg @@ -1,18 +1,49 @@ - - - - - - - - + + + + + + + + diff --git a/airbyte-config/init/src/main/resources/icons/asana.svg b/airbyte-config/init/src/main/resources/icons/asana.svg index 8553344f05cb2..d4922d79c8501 100644 --- a/airbyte-config/init/src/main/resources/icons/asana.svg +++ b/airbyte-config/init/src/main/resources/icons/asana.svg @@ -1,30 +1,67 @@ - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + diff --git a/airbyte-config/init/src/main/resources/icons/callrail.svg b/airbyte-config/init/src/main/resources/icons/callrail.svg new file mode 100644 index 0000000000000..09bf8ce0bb1d0 --- /dev/null +++ b/airbyte-config/init/src/main/resources/icons/callrail.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/airbyte-config/init/src/main/resources/icons/cockroachdb.svg b/airbyte-config/init/src/main/resources/icons/cockroachdb.svg index a25db6cf0deb3..070fec2bdec2b 100644 --- a/airbyte-config/init/src/main/resources/icons/cockroachdb.svg +++ b/airbyte-config/init/src/main/resources/icons/cockroachdb.svg @@ -1 +1,62 @@ - \ No newline at end of file + + + + + + + + + + + + + diff --git a/airbyte-config/init/src/main/resources/icons/commercetools.svg b/airbyte-config/init/src/main/resources/icons/commercetools.svg index 1332d3e3c8c06..e6702155e8824 100644 --- a/airbyte-config/init/src/main/resources/icons/commercetools.svg +++ b/airbyte-config/init/src/main/resources/icons/commercetools.svg @@ -1,8 +1,43 @@ - + - -host1:port1,host2:port2,....\ + \ Since these servers are just used for the initial connection to discover\ + \ the full cluster membership (which may change dynamically), this list\ + \ need not contain the full set of servers (you may want more than one,\ + \ though, in case a server is down)." + type: "string" + examples: + - "redpanda-broker1:9092,redpanda-broker2:9092" + buffer_memory: + title: "Buffer Memory" + description: "The total bytes of memory the producer can use to buffer records\ + \ waiting to be sent to the server." + type: "string" + examples: 33554432 + compression_type: + title: "Compression Type" + description: "The compression type for all data generated by the producer." + type: "string" + default: "none" + enum: + - "none" + - "gzip" + - "snappy" + - "lz4" + - "zstd" + batch_size: + title: "Batch Size" + description: "The producer will attempt to batch records together into fewer\ + \ requests whenever multiple records are being sent to the same partition." + type: "integer" + examples: + - 16384 + retries: + title: "Retries" + description: "Setting a value greater than zero will cause the client to\ + \ resend any record whose send fails with a potentially transient error." + type: "integer" + examples: + - 2147483647 + topic_num_partitions: + title: "Number of topic partitions" + description: "The number of topic partitions which will be created on topic\ + \ creation" + type: "integer" + examples: + - 10 + topic_replication_factor: + title: "Topic replication factor" + description: "The number of topics to which messages will be replicated" + type: "integer" + examples: + - 10 + socket_connection_setup_timeout_ms: + title: "Socket Connection Setup Timeout" + description: "The amount of time the client will wait for the socket connection\ + \ to be established." + type: "integer" + examples: + - 10000 + socket_connection_setup_timeout_max_ms: + title: "Socket Connection Setup Max Timeout" + description: "The maximum amount of time the client will wait for the socket\ + \ connection to be established. The connection setup timeout will increase\ + \ exponentially for each consecutive connection failure up to this maximum." + type: "integer" + examples: + - 30000 + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "append" - dockerImage: "airbyte/destination-rockset:0.1.4" spec: documentationUrl: "https://docs.airbyte.com/integrations/destinations/rockset" @@ -5355,6 +5366,193 @@ supported_destination_sync_modes: - "overwrite" - "append" +- dockerImage: "airbyte/destination-s3-glue:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/destinations/s3" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "glue_database" + - "glue_serialization_library" + properties: + access_key_id: + type: "string" + description: "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + s3_bucket_path: + title: "S3 Bucket Path" + description: "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 3 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" + order: 4 + format: + title: "Output Format" + type: "object" + description: "Format of the data output. See here for more details" + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + flatten_data: + title: "Flatten Data" + description: "If true data will be flattened and won't be nested in\ + \ the _airbyte_data field" + type: "boolean" + default: true + order: 5 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 6 + s3_path_format: + title: "S3 Path Format" + description: "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 7 + file_name_pattern: + type: "string" + description: "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 8 + glue_database: + type: "string" + description: "Name of the glue database for creating the tables, leave blank\ + \ if no integration" + title: "Glue database name" + examples: + - "airbyte_database" + order: 9 + glue_serialization_library: + title: "Serialization Library" + description: "The library that your query engine will use for reading and\ + \ writing data in your lake." + type: "string" + enum: + - "org.openx.data.jsonserde.JsonSerDe" + - "org.apache.hive.hcatalog.data.JsonSerDe" + default: "org.openx.data.jsonserde.JsonSerDe" + order: 10 + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "overwrite" + - "append" - dockerImage: "airbyte/destination-sftp-json:0.1.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/destinations/sftp-json" @@ -5408,7 +5606,7 @@ supported_destination_sync_modes: - "overwrite" - "append" -- dockerImage: "airbyte/destination-snowflake:0.4.38" +- dockerImage: "airbyte/destination-snowflake:0.4.40" spec: documentationUrl: "https://docs.airbyte.com/integrations/destinations/snowflake" connectionSpecification: diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index 6b6fca59f6c04..b61bb3a5a52bd 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -21,6 +21,13 @@ icon: airtable.svg sourceType: api releaseStage: alpha +- name: Aha + sourceDefinitionId: 81ca39dc-4534-4dd2-b848-b0cfd2c11fce + dockerRepository: airbyte/source-aha + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/aha + sourceType: api + releaseStage: alpha - name: Alpha Vantage sourceDefinitionId: db385323-9333-4fec-bec3-9e0ca9326c90 dockerRepository: airbyte/source-alpha-vantage @@ -47,7 +54,7 @@ - name: Amazon Ads sourceDefinitionId: c6b0a29e-1da9-4512-9002-7bfd0cba2246 dockerRepository: airbyte/source-amazon-ads - dockerImageTag: 0.1.24 + dockerImageTag: 0.1.25 documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-ads icon: amazonads.svg sourceType: api @@ -169,6 +176,21 @@ icon: braintree.svg sourceType: api releaseStage: alpha +- name: Breezometer + sourceDefinitionId: 7c37685e-8512-4901-addf-9afbef6c0de9 + dockerRepository: airbyte/source-breezometer + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/breezometer + sourceType: api + releaseStage: alpha +- name: CallRail + sourceDefinitionId: dc98a6ad-2dd1-47b6-9529-2ec35820f9c6 + dockerRepository: airbyte/source-callrail + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/callrail + icon: callrail.svg + sourceType: api + releaseStage: alpha - name: Cart.com sourceDefinitionId: bb1a6d31-6879-4819-a2bd-3eed299ea8e2 dockerRepository: airbyte/source-cart @@ -209,6 +231,13 @@ icon: cliskhouse.svg sourceType: database releaseStage: alpha +- name: ClickUp + sourceDefinitionId: 311a7a27-3fb5-4f7e-8265-5e4afe258b66 + dockerRepository: airbyte/source-clickup-api + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/click-up + sourceType: api + releaseStage: alpha - name: Close.com sourceDefinitionId: dfffecb7-9a13-43e9-acdc-b92af7997ca9 dockerRepository: airbyte/source-close-com @@ -225,6 +254,13 @@ icon: cockroachdb.svg sourceType: database releaseStage: alpha +- name: Coda + sourceDefinitionId: 27f910fd-f832-4b2e-bcfd-6ab342e434d8 + dockerRepository: airbyte/source-coda + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/coda + sourceType: api + releaseStage: alpha - name: Coin API sourceDefinitionId: 919984ef-53a2-479b-8ffe-9c1ddb9fc3f3 dockerRepository: airbyte/source-coin-api @@ -247,6 +283,13 @@ icon: commercetools.svg sourceType: api releaseStage: alpha +- name: ConfigCat + sourceDefinitionId: 4fd7565c-8b99-439b-80d0-2d965e1d958c + dockerRepository: airbyte/source-configcat + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/configcat + sourceType: api + releaseStage: alpha - name: Confluence sourceDefinitionId: cf40a7f8-71f8-45ce-a7fa-fca053e4028c dockerRepository: airbyte/source-confluence @@ -262,6 +305,21 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/convertkit sourceType: api releaseStage: alpha +- name: Copper + sourceDefinitionId: 44f3002f-2df9-4f6d-b21c-02cd3b47d0dc + dockerRepository: airbyte/source-copper + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/copper + sourceType: api + releaseStage: alpha +- name: Convex + sourceDefinitionId: c332628c-f55c-4017-8222-378cfafda9b2 + dockerRepository: airbyte/source-convex + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/convex + icon: convex.svg + sourceType: api + releaseStage: alpha - name: Courier sourceDefinitionId: 0541b2cd-2367-4986-b5f1-b79ff55439e4 dockerRepository: airbyte/source-courier @@ -292,6 +350,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/datadog sourceType: api releaseStage: alpha +- name: Datascope + sourceDefinitionId: 8e1ae2d2-4790-44d3-9d83-75b3fc3940ff + dockerRepository: airbyte/source-datascope + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/datascope + sourceType: api + releaseStage: alpha - name: Delighted sourceDefinitionId: cc88c43f-6f53-4e8a-8c4d-b284baaf9635 dockerRepository: airbyte/source-delighted @@ -331,6 +396,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/dv-360 sourceType: api releaseStage: alpha +- name: DynamoDB + sourceDefinitionId: 50401137-8871-4c5a-abb7-1f5fda35545a + dockerRepository: airbyte/source-dynamodb + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/dynamodb + sourceType: api + releaseStage: alpha - name: E2E Testing sourceDefinitionId: d53f9084-fa6b-4a5a-976c-5b8392f4ad8a dockerRepository: airbyte/source-e2e-test @@ -339,6 +411,13 @@ icon: airbyte.svg sourceType: api releaseStage: alpha +- name: EmailOctopus + sourceDefinitionId: 46b25e70-c980-4590-a811-8deaf50ee09f + dockerRepository: airbyte/source-emailoctopus + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/emailoctopus + sourceType: api + releaseStage: alpha - name: Exchange Rates Api sourceDefinitionId: e2b40e36-aa0e-4bed-b41b-bcea6fa348b1 dockerRepository: airbyte/source-exchange-rates @@ -366,14 +445,21 @@ - name: Faker sourceDefinitionId: dfd88b22-b603-4c3d-aad7-3701784586b1 dockerRepository: airbyte/source-faker - dockerImageTag: 0.2.0 + dockerImageTag: 0.2.1 documentationUrl: https://docs.airbyte.com/integrations/sources/faker sourceType: api releaseStage: alpha +- name: Fastbill + sourceDefinitionId: eb3e9c1c-0467-4eb7-a172-5265e04ccd0a + dockerRepository: airbyte/source-fastbill + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/fastbill + sourceType: api + releaseStage: alpha - name: Fauna sourceDefinitionId: 3825db3e-c94b-42ac-bd53-b5a9507ace2b dockerRepository: airbyte/source-fauna - dockerImageTag: dev + dockerImageTag: 0.1.0 documentationUrl: https://docs.airbyte.com/integrations/sources/fauna icon: fauna.svg sourceType: database @@ -381,7 +467,7 @@ - name: File sourceDefinitionId: 778daa7c-feaf-4db6-96f3-70fd645acc77 dockerRepository: airbyte/source-file - dockerImageTag: 0.2.28 + dockerImageTag: 0.2.31 documentationUrl: https://docs.airbyte.com/integrations/sources/file icon: file.svg sourceType: file @@ -401,7 +487,7 @@ - name: Freshdesk sourceDefinitionId: ec4b9503-13cb-48ab-a4ab-6ade4be46567 dockerRepository: airbyte/source-freshdesk - dockerImageTag: 0.3.6 + dockerImageTag: 0.3.8 documentationUrl: https://docs.airbyte.com/integrations/sources/freshdesk icon: freshdesk.svg sourceType: api @@ -422,6 +508,13 @@ icon: freshservice.svg sourceType: api releaseStage: alpha +- name: GetLago + sourceDefinitionId: e1a3866b-d3b2-43b6-b6d7-8c1ee4d7f53f + dockerRepository: airbyte/source-getlago + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/getlago + sourceType: api + releaseStage: alpha - name: Gridly sourceDefinitionId: 6cbea164-3237-433b-9abb-36d384ee4cbf dockerRepository: airbyte/source-gridly @@ -432,7 +525,7 @@ - name: GitHub sourceDefinitionId: ef69ef6e-aa7f-4af1-a01d-ef775033524e dockerRepository: airbyte/source-github - dockerImageTag: 0.3.7 + dockerImageTag: 0.3.8 documentationUrl: https://docs.airbyte.com/integrations/sources/github icon: github.svg sourceType: api @@ -453,6 +546,13 @@ icon: glassfrog.svg sourceType: api releaseStage: alpha +- name: GNews + sourceDefinitionId: ce38aec4-5a77-439a-be29-9ca44fd4e811 + dockerRepository: airbyte/source-gnews + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/gnews + sourceType: api + releaseStage: alpha - name: GoCardless sourceDefinitionId: ba15ac82-5c6a-4fb2-bf24-925c23a1180c dockerRepository: airbyte/source-gocardless @@ -460,10 +560,17 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/gocardless sourceType: api releaseStage: alpha +- name: Gong + sourceDefinitionId: 32382e40-3b49-4b99-9c5c-4076501914e7 + dockerRepository: airbyte/source-gong + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/gong + sourceType: api + releaseStage: alpha - name: Google Ads sourceDefinitionId: 253487c0-2246-43ba-a21f-5116b20a2c50 dockerRepository: airbyte/source-google-ads - dockerImageTag: 0.2.3 + dockerImageTag: 0.2.4 documentationUrl: https://docs.airbyte.com/integrations/sources/google-ads icon: google-adwords.svg sourceType: api @@ -503,7 +610,7 @@ - name: Google Sheets sourceDefinitionId: 71607ba1-c0ac-4799-8049-7f4b90dd50f7 dockerRepository: airbyte/source-google-sheets - dockerImageTag: 0.2.21 + dockerImageTag: 0.2.31 documentationUrl: https://docs.airbyte.com/integrations/sources/google-sheets icon: google-sheets.svg sourceType: file @@ -572,11 +679,18 @@ - name: HubSpot sourceDefinitionId: 36c891d9-4bd9-43ac-bad2-10e12756272c dockerRepository: airbyte/source-hubspot - dockerImageTag: 0.2.2 + dockerImageTag: 0.2.3 documentationUrl: https://docs.airbyte.com/integrations/sources/hubspot icon: hubspot.svg sourceType: api releaseStage: generally_available +- name: IP2Whois + sourceDefinitionId: f23b7b7c-d705-49a3-9042-09add3b104a5 + dockerRepository: airbyte/source-ip2whois + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/ip2whois + sourceType: api + releaseStage: alpha - name: IBM Db2 sourceDefinitionId: 447e0381-3780-4b46-bb62-00a4e3c8b8e2 dockerRepository: airbyte/source-db2 @@ -608,6 +722,13 @@ icon: intercom.svg sourceType: api releaseStage: generally_available +- name: Intruder + sourceDefinitionId: 3d15163b-11d8-412f-b808-795c9b2c3a3a + dockerRepository: airbyte/source-intruder + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/intruder + sourceType: api + releaseStage: alpha - name: Iterable sourceDefinitionId: 2e875208-0c0b-4ee4-9e92-1cb3156ea799 dockerRepository: airbyte/source-iterable @@ -627,19 +748,33 @@ - name: Jira sourceDefinitionId: 68e63de2-bb83-4c7e-93fa-a8a9051e3993 dockerRepository: airbyte/source-jira - dockerImageTag: 0.2.22 + dockerImageTag: 0.3.0 documentationUrl: https://docs.airbyte.com/integrations/sources/jira icon: jira.svg sourceType: api releaseStage: alpha +- name: K6 Cloud + sourceDefinitionId: e300ece7-b073-43a3-852e-8aff36a57f13 + dockerRepository: airbyte/source-k6-cloud + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/k6-cloud + sourceType: api + releaseStage: alpha - name: Kafka sourceDefinitionId: d917a47b-8537-4d0d-8c10-36a9928d4265 dockerRepository: airbyte/source-kafka - dockerImageTag: 0.2.0 + dockerImageTag: 0.2.2 documentationUrl: https://docs.airbyte.com/integrations/sources/kafka icon: kafka.svg sourceType: database releaseStage: alpha +- name: Klarna + sourceDefinitionId: 60c24725-00ae-490c-991d-55b78c3197e0 + dockerRepository: airbyte/source-klarna + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/klarna + sourceType: api + releaseStage: alpha - name: Klaviyo sourceDefinitionId: 95e8cffd-b8c4-4039-968e-d32fb4a69bde dockerRepository: airbyte/source-klaviyo @@ -655,6 +790,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/kyriba sourceType: api releaseStage: alpha +- name: LaunchDarkly + sourceDefinitionId: f96bb511-5e3c-48fc-b408-547953cd81a4 + dockerRepository: airbyte/source-launchdarkly + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/launchdarkly + sourceType: api + releaseStage: alpha - name: Lemlist sourceDefinitionId: 789f8e7a-2d28-11ec-8d3d-0242ac130003 dockerRepository: airbyte/source-lemlist @@ -738,6 +880,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/mailerlite sourceType: api releaseStage: alpha +- name: MailerSend + sourceDefinitionId: 2707d529-3c04-46eb-9c7e-40d4038df6f7 + dockerRepository: airbyte/source-mailersend + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/mailersend + sourceType: api + releaseStage: alpha - name: Mailgun sourceDefinitionId: 5b9cb09e-1003-4f9c-983d-5779d1b2cd51 dockerRepository: airbyte/source-mailgun @@ -778,6 +927,13 @@ icon: microsoft-teams.svg sourceType: api releaseStage: alpha +- name: Microsoft Dataverse + sourceDefinitionId: 9220e3de-3b60-4bb2-a46f-046d59ea235a + dockerRepository: airbyte/source-microsoft-dataverse + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/microsoft-dataverse + sourceType: api + releaseStage: alpha - name: Mixpanel sourceDefinitionId: 12928b32-bf0a-4f1e-964f-07e12e37153a dockerRepository: airbyte/source-mixpanel @@ -813,11 +969,18 @@ - name: MySQL sourceDefinitionId: 435bb9a5-7887-4809-aa58-28c27df0d7ad dockerRepository: airbyte/source-mysql - dockerImageTag: 1.0.11 + dockerImageTag: 1.0.13 documentationUrl: https://docs.airbyte.com/integrations/sources/mysql icon: mysql.svg sourceType: database releaseStage: beta +- name: n8n + sourceDefinitionId: 4a961f66-5e99-4430-8320-a73afe52f7a2 + dockerRepository: airbyte/source-n8n + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/n8n + sourceType: api + releaseStage: alpha - name: NASA sourceDefinitionId: 1a8667d7-7978-43cd-ba4d-d32cbd478971 dockerRepository: airbyte/source-nasa @@ -840,6 +1003,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/news-api sourceType: api releaseStage: alpha +- name: Newsdata + sourceDefinitionId: 60bd11d8-2632-4daa-a688-b47336d32093 + dockerRepository: airbyte/source-newsdata + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/newsdata + sourceType: api + releaseStage: alpha - name: Notion sourceDefinitionId: 6e00b415-b02e-4160-bf02-58176a0ae687 dockerRepository: airbyte/source-notion @@ -848,6 +1018,14 @@ icon: notion.svg sourceType: api releaseStage: generally_available +- name: New York Times + sourceDefinitionId: 0fae6a9a-04eb-44d4-96e1-e02d3dbc1d83 + dockerRepository: airbyte/source-nytimes + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/nytimes + icon: nytimes.svg + sourceType: api + releaseStage: alpha - name: Okta sourceDefinitionId: 1d4fdb25-64fc-4569-92da-fcdca79a8372 dockerRepository: airbyte/source-okta @@ -925,6 +1103,13 @@ icon: pagerduty.svg sourceType: api releaseStage: alpha +- name: PartnerStack + sourceDefinitionId: d30fb809-6456-484d-8e2c-ee12e0f6888d + dockerRepository: airbyte/source-partnerstack + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/partnerstack + sourceType: api + releaseStage: alpha - name: Paypal Transaction sourceDefinitionId: d913b0f2-cc51-4e55-a44c-8ba1697b9239 dockerRepository: airbyte/source-paypal-transaction @@ -952,7 +1137,7 @@ - name: Pinterest sourceDefinitionId: 5cb7e5fe-38c2-11ec-8d3d-0242ac130003 dockerRepository: airbyte/source-pinterest - dockerImageTag: 0.1.8 + dockerImageTag: 0.1.9 documentationUrl: https://docs.airbyte.com/integrations/sources/pinterest icon: pinterest.svg sourceType: api @@ -980,6 +1165,22 @@ icon: plaid.svg sourceType: api releaseStage: alpha +- name: Plausible + sourceDefinitionId: 603ba446-3d75-41d7-92f3-aba901f8b897 + dockerRepository: airbyte/source-plausible + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/plausible + icon: plausible.svg + sourceType: api + releaseStage: alpha +- name: Pocket + sourceDefinitionId: b0dd65f1-081f-4731-9c51-38e9e6aa0ebf + dockerRepository: airbyte/source-pocket + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/pocket + icon: pocket.svg + sourceType: api + releaseStage: alpha - name: PokeAPI sourceDefinitionId: 6371b14b-bc68-4236-bfbd-468e8df8e968 dockerRepository: airbyte/source-pokeapi @@ -988,6 +1189,13 @@ icon: pokeapi.svg sourceType: api releaseStage: alpha +- name: Polygon Stock API + sourceDefinitionId: 5807d72f-0abc-49f9-8fa5-ae820007032b + dockerRepository: airbyte/source-polygon-stock-api + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/polygon-stock-api + sourceType: api + releaseStage: alpha - name: PostHog sourceDefinitionId: af6d50ee-dddf-4126-a8ee-7faee990774f dockerRepository: airbyte/source-posthog @@ -999,19 +1207,26 @@ - name: Postgres sourceDefinitionId: decd338e-5647-4c0b-adf4-da0e75f5a750 dockerRepository: airbyte/source-postgres - dockerImageTag: 1.0.22 + dockerImageTag: 1.0.25 documentationUrl: https://docs.airbyte.com/integrations/sources/postgres icon: postgresql.svg sourceType: database releaseStage: generally_available -- name: Prestashop +- name: Postmark App + sourceDefinitionId: cde75ca1-1e28-4a0f-85bb-90c546de9f1f + dockerRepository: airbyte/source-postmarkapp + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/postmarkapp + sourceType: api + releaseStage: alpha +- name: PrestaShop sourceDefinitionId: d60a46d4-709f-4092-a6b7-2457f7d455f5 dockerRepository: airbyte/source-prestashop - dockerImageTag: 0.2.0 + dockerImageTag: 0.3.0 documentationUrl: https://docs.airbyte.com/integrations/sources/presta-shop icon: prestashop.svg sourceType: api - releaseStage: alpha + releaseStage: beta - name: Primetric sourceDefinitionId: f636c3c6-4077-45ac-b109-19fc62a283c1 dockerRepository: airbyte/source-primetric @@ -1027,6 +1242,27 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/public-apis sourceType: api releaseStage: alpha +- name: Punk API + sourceDefinitionId: dbe9b7ae-7b46-4e44-a507-02a343cf7230 + dockerRepository: airbyte/source-punk-api + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/punk-api + sourceType: api + releaseStage: alpha +- name: PyPI + sourceDefinitionId: 88ecd3a8-5f5b-11ed-9b6a-0242ac120002 + dockerRepository: airbyte/source-pypi + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/pypi + sourceType: api + releaseStage: alpha +- name: Qonto + sourceDefinitionId: f7c0b910-5f66-11ed-9b6a-0242ac120002 + dockerRepository: airbyte/source-qonto + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/public-qonto + sourceType: api + releaseStage: alpha - name: Qualaroo sourceDefinitionId: b08e4776-d1de-4e80-ab5c-1e51dad934a2 dockerRepository: airbyte/source-qualaroo @@ -1051,6 +1287,21 @@ icon: recharge.svg sourceType: api releaseStage: generally_available +- name: Recreation + sourceDefinitionId: 25d7535d-91e0-466a-aa7f-af81578be277 + dockerRepository: airbyte/source-recreation + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/recreation + icon: recreation.svg + sourceType: api + releaseStage: alpha +- name: Recruitee + sourceDefinitionId: 3b046ac7-d8d3-4eb3-b122-f96b2a16d8a8 + dockerRepository: airbyte/source-recruitee + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/recruitee + sourceType: api + releaseStage: alpha - name: Recurly sourceDefinitionId: cd42861b-01fc-4658-a8ab-5d11d0510f01 dockerRepository: airbyte/source-recurly @@ -1067,6 +1318,13 @@ icon: redshift.svg sourceType: database releaseStage: alpha +- name: Reply.io + sourceDefinitionId: 8cc6537e-f8a6-423c-b960-e927af76116e + dockerRepository: airbyte/source-reply-io + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/reply-io + sourceType: api + releaseStage: alpha - name: Retently sourceDefinitionId: db04ecd1-42e7-4115-9cec-95812905c626 dockerRepository: airbyte/source-retently @@ -1089,10 +1347,24 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/rki-covid sourceType: api releaseStage: alpha +- name: RSS + sourceDefinitionId: 0efee448-6948-49e2-b786-17db50647908 + dockerRepository: airbyte/source-rss + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/rss + icon: rss.svg +- name: Rocket.chat + sourceDefinitionId: 921d9608-3915-450b-8078-0af18801ea1b + dockerRepository: airbyte/source-rocket-chat + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/rocket-chat + icon: rocket-chat.svg + sourceType: api + releaseStage: alpha - name: S3 sourceDefinitionId: 69589781-7828-43c5-9f63-8925b1c1ccc2 dockerRepository: airbyte/source-s3 - dockerImageTag: 0.1.25 + dockerImageTag: 0.1.26 documentationUrl: https://docs.airbyte.com/integrations/sources/s3 icon: s3.svg sourceType: file @@ -1108,11 +1380,18 @@ - name: Salesforce sourceDefinitionId: b117307c-14b6-41aa-9422-947e34922962 dockerRepository: airbyte/source-salesforce - dockerImageTag: 1.0.24 + dockerImageTag: 1.0.26 documentationUrl: https://docs.airbyte.com/integrations/sources/salesforce icon: salesforce.svg sourceType: api releaseStage: generally_available +- name: SAP Fieldglass + sourceDefinitionId: ec5f3102-fb31-4916-99ae-864faf8e7e25 + dockerRepository: airbyte/source-sap-fieldglass + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/sap-fieldglass + sourceType: api + releaseStage: alpha - name: SearchMetrics sourceDefinitionId: 8d7ef552-2c0f-11ec-8d3d-0242ac130003 dockerRepository: airbyte/source-search-metrics @@ -1121,6 +1400,13 @@ icon: searchmetrics.svg sourceType: api releaseStage: alpha +- name: Secoda + sourceDefinitionId: da9fc6b9-8059-4be0-b204-f56e22e4d52d + dockerRepository: airbyte/source-secoda + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/secoda + sourceType: api + releaseStage: alpha - name: Sendgrid sourceDefinitionId: fbb5fbe2-16ad-4cf4-af7d-ff9d9c316c87 dockerRepository: airbyte/source-sendgrid @@ -1129,6 +1415,20 @@ icon: sendgrid.svg sourceType: api releaseStage: beta +- name: Senseforce + sourceDefinitionId: 39de93cb-1511-473e-a673-5cbedb9436af + dockerRepository: airbyte/source-senseforce + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/senseforce + sourceType: api + releaseStage: alpha +- name: Sendinblue + sourceDefinitionId: 2e88fa20-a2f6-43cc-bba6-98a0a3f244fb + dockerRepository: airbyte/source-sendinblue + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/sendinblue + sourceType: api + releaseStage: alpha - name: Shopify sourceDefinitionId: 9da77001-af33-4bcd-be46-6252bf9342b9 dockerRepository: airbyte/source-shopify @@ -1153,6 +1453,20 @@ icon: slack.svg sourceType: api releaseStage: generally_available +- name: Smaily + sourceDefinitionId: 781f8b1d-4e20-4842-a2c3-cd9b119d65fa + dockerRepository: airbyte/source-smaily + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/smaily + sourceType: api + releaseStage: alpha +- name: SmartEngage + sourceDefinitionId: 21cc4a17-a011-4485-8a3e-e2341a91ab9f + dockerRepository: airbyte/source-smartengage + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/smartengage + sourceType: api + releaseStage: alpha - name: Smartsheets sourceDefinitionId: 374ebc65-6636-4ea0-925c-7d35999a8ffc dockerRepository: airbyte/source-smartsheets @@ -1172,7 +1486,7 @@ - name: Snowflake sourceDefinitionId: e2d65910-8c8b-40a1-ae7d-ee2416b2bfa2 dockerRepository: airbyte/source-snowflake - dockerImageTag: 0.1.24 + dockerImageTag: 0.1.26 documentationUrl: https://docs.airbyte.com/integrations/sources/snowflake icon: snowflake.svg sourceType: database @@ -1207,6 +1521,13 @@ icon: strava.svg sourceType: api releaseStage: alpha +- name: Statuspage + sourceDefinitionId: 74cbd708-46c3-4512-9c93-abd5c3e9a94d + dockerRepository: airbyte/source-statuspage + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/statuspage + sourceType: api + releaseStage: alpha - name: Stripe sourceDefinitionId: e094cb9a-26de-4645-8761-65c0c425d1de dockerRepository: airbyte/source-stripe @@ -1241,11 +1562,11 @@ - name: Tempo sourceDefinitionId: d1aa448b-7c54-498e-ad95-263cbebcd2db dockerRepository: airbyte/source-tempo - dockerImageTag: 0.2.6 + dockerImageTag: 0.3.0 documentationUrl: https://docs.airbyte.com/integrations/sources/tempo icon: tempo.svg sourceType: api - releaseStage: alpha + releaseStage: beta - name: TiDB sourceDefinitionId: 0dad1a35-ccf8-4d03-b73e-6788c00b13ae dockerRepository: airbyte/source-tidb @@ -1270,6 +1591,27 @@ icon: timely.svg sourceType: api releaseStage: alpha +- name: TMDb + sourceDefinitionId: 6240848f-f795-45eb-8f5e-c7542822fc03 + dockerRepository: airbyte/source-tmdb + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/tmdb + sourceType: api + releaseStage: alpha +- name: Toggl + sourceDefinitionId: 7e7c844f-2300-4342-b7d3-6dd7992593cd + dockerRepository: airbyte/source-toggl + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/toggl + sourceType: api + releaseStage: alpha +- name: The Guardian API + sourceDefinitionId: d42bd69f-6bf0-4d0b-9209-16231af07a92 + dockerRepository: airbyte/source-the-guardian-api + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/the-guardian-api + sourceType: api + releaseStage: alpha - name: Trello sourceDefinitionId: 8da67652-004c-11ec-9a03-0242ac130003 dockerRepository: airbyte/source-trello @@ -1294,6 +1636,21 @@ icon: twilio.svg sourceType: api releaseStage: generally_available +- name: Twitter + sourceDefinitionId: d7fd4f40-5e5a-4b8b-918f-a73077f8c131 + dockerRepository: airbyte/source-twitter + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/twitter + sourceType: api + releaseStage: alpha +- name: Tyntec SMS + sourceDefinitionId: 3c0c3cd1-b3e0-464a-9090-d3ceb5f92346 + dockerRepository: airbyte/source-tyntec-sms + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/tyntec-sms + icon: tyntec.svg + sourceType: api + releaseStage: alpha - name: Typeform sourceDefinitionId: e7eff203-90bf-43e5-a240-19ea3056c474 dockerRepository: airbyte/source-typeform @@ -1318,6 +1675,21 @@ icon: youtube.svg sourceType: api releaseStage: beta +- sourceDefinitionId: 78752073-6d96-447d-8a93-2b6953f3c787 + name: YouTube Analytics Business + dockerRepository: airbyte/source-youtube-analytics-business + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/youtube-analytics-business + icon: youtube.svg + sourceType: api + releaseStage: alpha +- name: Vantage + sourceDefinitionId: 28ce1fbd-1e15-453f-aa9f-da6c4d928e92 + dockerRepository: airbyte/source-vantage + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/vantage + sourceType: api + releaseStage: alpha - name: VictorOps sourceDefinitionId: 7e20ce3e-d820-4327-ad7a-88f3927fd97a dockerRepository: farosai/airbyte-victorops-source @@ -1326,6 +1698,27 @@ icon: victorops.svg sourceType: api releaseStage: alpha +- name: Visma E-conomic + sourceDefinitionId: 42495935-95de-4f5c-ae08-8fac00f6b308 + dockerRepository: airbyte/source-visma-economic + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/visma-economic + sourceType: api + releaseStage: alpha +- name: Vitally + sourceDefinitionId: 6c6d8b0c-db35-4cd1-a7de-0ca8b080f5ac + dockerRepository: airbyte/source-vitally + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/vitally + sourceType: api + releaseStage: alpha +- name: Xero + sourceDefinitionId: 6fd1e833-dd6e-45ec-a727-ab917c5be892 + dockerRepository: airbyte/source-xero + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/xero + sourceType: api + releaseStage: alpha - name: xkcd sourceDefinitionId: 80fddd16-17bd-4c0c-bf4a-80df7863fc9d dockerRepository: airbyte/source-xkcd @@ -1333,6 +1726,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/xkcd sourceType: api releaseStage: alpha +- name: Weatherstack + sourceDefinitionId: 5db8292c-5f5a-11ed-9b6a-0242ac120002 + dockerRepository: airbyte/source-weatherstack + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/weatherstack + sourceType: api + releaseStage: alpha - name: Webflow sourceDefinitionId: ef580275-d9a9-48bb-af5e-db0f5855be04 dockerRepository: airbyte/source-webflow @@ -1371,6 +1771,13 @@ icon: wrike.svg sourceType: api releaseStage: alpha +- name: Zapier Supported Storage + sourceDefinitionId: b8c917bc-7d1b-4828-995f-6726820266d0 + dockerRepository: airbyte/source-zapier-supported-storage + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-supported-storage + sourceType: api + releaseStage: alpha - name: Zendesk Chat sourceDefinitionId: 40d24d0f-b8f9-4fe0-9e6c-b06c0f3f45e4 dockerRepository: airbyte/source-zendesk-chat @@ -1498,6 +1905,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/wikipedia-pageviews sourceType: api releaseStage: alpha +- name: WorkRamp + sourceDefinitionId: 05b0bce2-4ec4-4534-bb1a-5d0127bd91b7 + dockerRepository: airbyte/source-workramp + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/workramp + sourceType: api + releaseStage: alpha - name: Yandex Metrica sourceDefinitionId: 7865dce4-2211-4f6a-88e5-9d0fe161afe7 dockerRepository: airbyte/source-yandex-metrica @@ -1505,6 +1919,13 @@ documentationUrl: https://docs.airbyte.com/integrations/sources/yandex-metrica sourceType: api releaseStage: alpha +- name: Younium + sourceDefinitionId: 9c74c2d7-531a-4ebf-b6d8-6181f805ecdc + dockerRepository: airbyte/source-younium + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.com/integrations/sources/younium + sourceType: api + releaseStage: alpha - name: Zoom sourceDefinitionId: cbfd9856-1322-44fb-bcf1-0b39b7a8e92e dockerRepository: airbyte/source-zoom diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml index 15a3c8030b740..9ac655fdf4ead 100644 --- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml @@ -219,6 +219,29 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-aha:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/aha" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Aha Spec" + type: "object" + required: + - "api_key" + - "url" + additionalProperties: true + properties: + api_key: + type: "string" + description: "API Key" + title: "API Bearer Token" + url: + type: "string" + description: "URL" + title: "Aha Url Instance" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-alpha-vantage:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/alpha-vantage" @@ -725,7 +748,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-amazon-ads:0.1.24" +- dockerImage: "airbyte/source-amazon-ads:0.1.25" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/amazon-ads" connectionSpecification: @@ -770,26 +793,6 @@ type: "string" default: "NA" order: 4 - report_wait_timeout: - title: "Report Wait Timeout" - description: "Timeout duration in minutes for Reports. Default is 60 minutes." - default: 60 - examples: - - 60 - - 120 - order: 5 - type: "integer" - report_generation_max_retries: - title: "Report Generation Maximum Retries" - description: "Maximum retries Airbyte will attempt for fetching report data.\ - \ Default is 5." - default: 5 - examples: - - 5 - - 10 - - 15 - order: 6 - type: "integer" start_date: title: "Start Date" description: "The Start date for collecting reports, should not be more\ @@ -797,13 +800,13 @@ examples: - "2022-10-10" - "2022-10-22" - order: 7 + order: 5 type: "string" profiles: title: "Profile IDs" description: "Profile IDs you want to fetch data for. See docs for more details." - order: 8 + order: 6 type: "array" items: type: "integer" @@ -820,7 +823,7 @@ - "archived" type: "array" uniqueItems: true - order: 9 + order: 7 required: - "client_id" - "client_secret" @@ -1831,6 +1834,98 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-breezometer:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/breezometer" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Breezometer Spec" + type: "object" + required: + - "api_key" + - "latitude" + - "longitude" + additionalProperties: true + properties: + api_key: + type: "string" + tittle: "API Key" + description: "Your API Access Key. See here." + airbyte_secret: true + latitude: + type: "string" + tittle: "Latitude" + description: "Latitude of the monitored location." + examples: + - "54.675003" + longitude: + type: "string" + tittle: "Longitude" + description: "Longitude of the monitored location." + examples: + - "-113.550282" + days_to_forecast: + type: "integer" + tittle: "Days to Forecast" + description: "Number of days to forecast. Minimum 1, maximum 3. Valid for\ + \ Polen and Weather Forecast streams." + examples: + - 3 + hours_to_forecast: + type: "integer" + tittle: "Hours to Forecast" + description: "Number of hours to forecast. Minimum 1, maximum 96. Valid\ + \ for Air Quality Forecast stream." + examples: + - 30 + historic_hours: + type: "integer" + tittle: "Historic Hours" + description: "Number of hours retireve from Air Quality History stream.\ + \ Minimum 1, maximum 720." + examples: + - 30 + radius: + type: "integer" + tittle: "Radius" + description: "Desired radius from the location provided. Minimum 5, maximum\ + \ 100. Valid for Wildfires streams." + examples: + - 50 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-callrail:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/callrail" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Call Rail Spec" + type: "object" + required: + - "api_key" + - "account_id" + - "start_date" + additionalProperties: true + properties: + api_key: + type: "string" + description: "API access key" + airbyte_secret: true + account_id: + type: "string" + description: "Account ID" + airbyte_secret: true + start_date: + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "%Y-%m-%d" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-cart:0.2.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/cart" @@ -2191,6 +2286,46 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-clickup-api:0.1.0" + spec: + documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "ClickUp Api Spec" + type: "object" + required: + - "api_token" + additionalProperties: true + properties: + api_token: + type: "string" + description: "Every ClickUp API call required authentication. This field\ + \ is your personal API token. See here." + airbyte_secret: true + team_id: + type: "string" + description: "The ID of your team in ClickUp. Retrieve it from the `/team`\ + \ of the ClickUp API. See here." + space_id: + type: "string" + description: "The ID of your space in your workspace. Retrieve it from the\ + \ `/team/{team_id}/space` of the ClickUp API. See here." + folder_id: + type: "string" + description: "The ID of your folder in your space. Retrieve it from the\ + \ `/space/{space_id}/folder` of the ClickUp API. See here." + list_id: + type: "string" + description: "The ID of your list in your folder. Retrieve it from the `/folder/{folder_id}/list`\ + \ of the ClickUp API. See here." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-close-com:0.1.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/close-com" @@ -2280,6 +2415,26 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-coda:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/coda" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Required attributes for hitting apis" + type: "object" + required: + - "auth_token" + additionalProperties: true + properties: + auth_token: + title: "Authentication token" + type: "string" + description: "Bearer token" + airbyte_secret: true + order: 0 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-coin-api:0.1.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/coin-api" @@ -2432,6 +2587,32 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-configcat:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/configcat" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Configcat Spec" + type: "object" + required: + - "username" + - "password" + additionalProperties: true + properties: + username: + title: "Username" + type: "string" + description: "Basic auth user name. See here." + password: + title: "Password" + type: "string" + description: "Basic auth password. See here." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-confluence:0.1.1" spec: documentationUrl: "https://docsurl.com" @@ -2481,6 +2662,54 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-copper:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/copper" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Copper Spec" + type: "object" + required: + - "api_key" + - "user_email" + properties: + api_key: + type: "string" + title: "API Key" + description: "Copper API key" + airbyte_secret: true + user_email: + type: "string" + title: "User email" + description: "user email used to login in to Copper" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-convex:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/convex" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Convex Source Spec" + type: "object" + required: + - "deployment_url" + - "access_key" + properties: + deployment_url: + type: "string" + title: "Deployment Url" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + title: "Access Key" + description: "API access key used to retrieve data from Convex." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-courier:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/courier" @@ -2604,6 +2833,33 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-datascope:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/datascope" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Datascope Spec" + type: "object" + required: + - "api_key" + - "start_date" + additionalProperties: true + properties: + start_date: + title: "Start Date" + type: "string" + description: "Start date for the data to be replicated" + examples: + - "dd/mm/YYYY HH:MM" + pattern: "^[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}$" + api_key: + title: "Authorization" + type: "string" + description: "API Key" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-delighted:0.1.4" spec: documentationUrl: "https://docsurl.com" @@ -2840,6 +3096,75 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-dynamodb:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/dynamodb" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Dynamodb Source Spec" + type: "object" + required: + - "access_key_id" + - "secret_access_key" + additionalProperties: false + properties: + endpoint: + title: "Dynamodb Endpoint" + type: "string" + default: "" + description: "the URL of the Dynamodb database" + examples: + - "https://{aws_dynamo_db_url}.com" + region: + title: "Dynamodb Region" + type: "string" + default: "" + description: "The region of the Dynamodb database" + enum: + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" + access_key_id: + title: "Dynamodb Key Id" + type: "string" + description: "The access key id to access Dynamodb. Airbyte requires read\ + \ permissions to the database" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + secret_access_key: + title: "Dynamodb Access Key" + type: "string" + description: "The corresponding secret to the access key id." + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-e2e-test:2.1.3" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/e2e-test" @@ -2954,19 +3279,39 @@ supportsDBT: false supported_destination_sync_modes: [] protocol_version: "0.2.1" -- dockerImage: "airbyte/source-exchange-rates:1.2.7" +- dockerImage: "airbyte/source-emailoctopus:0.1.0" spec: - documentationUrl: "https://docs.airbyte.com/integrations/sources/exchangeratesapi" + documentationUrl: "https://docs.airbyte.com/integrations/sources/emailoctopus" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" - title: "exchangeratesapi.io Source Spec" + title: "EmailOctopus Spec" type: "object" required: - - "start_date" - - "access_key" + - "api_key" additionalProperties: true properties: - start_date: + api_key: + type: "string" + title: "EmailOctopus API key" + description: "EmailOctopus API Key. See the docs for information on how to generate this key." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-exchange-rates:1.2.7" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/exchangeratesapi" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "exchangeratesapi.io Source Spec" + type: "object" + required: + - "start_date" + - "access_key" + additionalProperties: true + properties: + start_date: type: "string" description: "Start getting data from that date." pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" @@ -3383,7 +3728,7 @@ oauthFlowInitParameters: [] oauthFlowOutputParameters: - - "access_token" -- dockerImage: "airbyte/source-faker:0.2.0" +- dockerImage: "airbyte/source-faker:0.2.1" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/faker" connectionSpecification: @@ -3429,7 +3774,30 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-fauna:dev" +- dockerImage: "airbyte/source-fastbill:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/fastbill" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Fastbill Spec" + type: "object" + required: + - "username" + - "api_key" + properties: + username: + title: "Username" + type: "string" + description: "Username for Fastbill account" + api_key: + title: "API Key" + type: "string" + description: "Fastbill API key" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-fauna:0.1.0" spec: documentationUrl: "https://github.com/fauna/airbyte/blob/source-fauna/docs/integrations/sources/fauna.md" connectionSpecification: @@ -3526,7 +3894,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-file:0.2.28" +- dockerImage: "airbyte/source-file:0.2.31" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/file" connectionSpecification: @@ -3831,7 +4199,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-freshdesk:0.3.6" +- dockerImage: "airbyte/source-freshdesk:0.3.8" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/freshdesk" connectionSpecification: @@ -3937,6 +4305,26 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-getlago:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/getlago" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Getlago Spec" + type: "object" + required: + - "api_key" + additionalProperties: true + properties: + api_key: + title: "API Key" + type: "string" + description: "Your API Key. See here." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-gridly:0.1.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/gridly" @@ -3958,7 +4346,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-github:0.3.7" +- dockerImage: "airbyte/source-github:0.3.8" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/github" connectionSpecification: @@ -4161,6 +4549,235 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-gnews:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/gnews" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Gnews Spec" + type: "object" + required: + - "api_key" + - "query" + additionalProperties: true + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + order: 0 + airbyte_secret: true + query: + type: "string" + order: 1 + title: "Query" + description: "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + language: + type: "string" + title: "Language" + decription: "This parameter allows you to specify the language of the news\ + \ articles returned by the API. You have to set as value the 2 letters\ + \ code of the language you want to filter." + order: 2 + enum: + - "ar" + - "zh" + - "nl" + - "en" + - "fr" + - "de" + - "el" + - "he" + - "hi" + - "it" + - "ja" + - "ml" + - "mr" + - "no" + - "pt" + - "ro" + - "ru" + - "es" + - "sv" + - "ta" + - "te" + - "uk" + country: + type: "string" + title: "Country" + description: "This parameter allows you to specify the country where the\ + \ news articles returned by the API were published, the contents of the\ + \ articles are not necessarily related to the specified country. You have\ + \ to set as value the 2 letters code of the country you want to filter." + order: 3 + enum: + - "au" + - "br" + - "ca" + - "cn" + - "eg" + - "fr" + - "de" + - "gr" + - "hk" + - "in" + - "ie" + - "il" + - "it" + - "jp" + - "nl" + - "no" + - "pk" + - "pe" + - "ph" + - "pt" + - "ro" + - "ru" + - "sg" + - "es" + - "se" + - "ch" + - "tw" + - "ua" + - "gb" + - "us" + in: + type: "array" + title: "In" + description: "This parameter allows you to choose in which attributes the\ + \ keywords are searched. The attributes that can be set are title, description\ + \ and content. It is possible to combine several attributes." + order: 4 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + nullable: + type: "array" + title: "Nullable" + description: "This parameter allows you to specify the attributes that you\ + \ allow to return null values. The attributes that can be set are title,\ + \ description and content. It is possible to combine several attributes" + order: 5 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + start_date: + type: "string" + title: "Start Date" + description: "This parameter allows you to filter the articles that have\ + \ a publication date greater than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + end_date: + type: "string" + title: "End Date" + description: "This parameter allows you to filter the articles that have\ + \ a publication date smaller than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + sortby: + type: "string" + title: "Sort By" + description: "This parameter allows you to choose with which type of sorting\ + \ the articles should be returned. Two values are possible:\n - publishedAt\ + \ = sort by publication date, the articles with the most recent publication\ + \ date are returned first\n - relevance = sort by best match to keywords,\ + \ the articles with the best match are returned first" + order: 7 + enum: + - "publishedAt" + - "relevance" + top_headlines_query: + type: "string" + order: 8 + title: "Top Headlines Query" + description: "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + top_headlines_topic: + type: "string" + title: "Top Headlines Topic" + description: "This parameter allows you to change the category for the request." + order: 9 + enum: + - "breaking-news" + - "world" + - "nation" + - "business" + - "technology" + - "entertainment" + - "sports" + - "science" + - "health" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-gocardless:0.1.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/gocardless" @@ -4209,7 +4826,42 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-google-ads:0.2.3" +- dockerImage: "airbyte/source-gong:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/gong" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Gong Spec" + type: "object" + required: + - "access_key" + - "access_key_secret" + additionalProperties: true + properties: + access_key: + type: "string" + title: "Gong Access Key" + description: "Gong Access Key" + airbyte_secret: true + access_key_secret: + type: "string" + title: "Gong Access Key Secret" + description: "Gong Access Key Secret" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: "The date from which to list calls, in the ISO-8601 format;\ + \ if not specified, the calls start with the earliest recorded call. For\ + \ web-conference calls recorded by Gong, the date denotes its scheduled\ + \ time, otherwise, it denotes its actual start time." + examples: + - "2018-02-18T08:00:00Z" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-google-ads:0.2.4" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/google-ads" connectionSpecification: @@ -4857,7 +5509,7 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" -- dockerImage: "airbyte/source-google-sheets:0.2.21" +- dockerImage: "airbyte/source-google-sheets:0.2.31" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/google-sheets" connectionSpecification: @@ -5324,7 +5976,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-hubspot:0.2.2" +- dockerImage: "airbyte/source-hubspot:0.2.3" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/hubspot" connectionSpecification: @@ -5439,6 +6091,32 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" +- dockerImage: "airbyte/source-ip2whois:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/ip2whois" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Ip2whois Spec" + type: "object" + additionalProperties: true + properties: + api_key: + title: "API key" + type: "string" + description: "Your API Key. See here." + airbyte_secret: true + domain: + title: "Domain" + type: "string" + description: "Domain name. See here." + examples: + - "www.google.com" + - "www.facebook.com" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-db2:0.1.16" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/db2" @@ -5644,6 +6322,26 @@ oauthFlowInitParameters: [] oauthFlowOutputParameters: - - "access_token" +- dockerImage: "airbyte/source-intruder:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/intruder" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Intruder Spec" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + access_token: + title: "API Access token" + type: "string" + description: "Your API Access token. See here." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-iterable:0.1.21" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/iterable" @@ -5722,7 +6420,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-jira:0.2.22" +- dockerImage: "airbyte/source-jira:0.3.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/jira" connectionSpecification: @@ -5752,11 +6450,6 @@ type: "string" title: "Email" description: "The user email for your Jira account." - max_results: - type: "number" - title: "Max Results" - description: "Pagination max results (only for users stream)" - default: 50 projects: type: "array" title: "Projects" @@ -5806,7 +6499,27 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-kafka:0.2.0" +- dockerImage: "airbyte/source-k6-cloud:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/k6-cloud" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "K6 Cloud Spec" + type: "object" + required: + - "api_token" + additionalProperties: true + properties: + api_token: + title: "Api Token" + type: "string" + description: "Your API Token. See here. The key is case sensitive." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-kafka:0.2.2" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/kafka" connectionSpecification: @@ -6098,16 +6811,59 @@ supported_destination_sync_modes: [] supported_source_sync_modes: - "append" -- dockerImage: "airbyte/source-klaviyo:0.1.10" +- dockerImage: "airbyte/source-klarna:0.1.0" spec: - documentationUrl: "https://docs.airbyte.com/integrations/sources/klaviyo" - changelogUrl: "https://docs.airbyte.com/integrations/sources/klaviyo" + documentationUrl: "https://docs.airbyte.com/integrations/sources/klarna" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" - title: "Klaviyo Spec" + title: "Klarna Spec" type: "object" + required: + - "region" + - "playground" + - "username" + - "password" + additionalProperties: true properties: - api_key: + region: + title: "Region" + type: "string" + enum: + - "eu" + - "us" + - "oc" + description: "Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs).\ + \ Supported 'eu', 'us', 'oc'" + playground: + title: "Playground" + type: "boolean" + description: "Propertie defining if connector is used against playground\ + \ or production environment" + default: false + username: + title: "Username" + type: "string" + description: "Consists of your Merchant ID (eid) - a unique number that\ + \ identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)" + password: + title: "Password" + type: "string" + description: "A string which is associated with your Merchant ID and is\ + \ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-klaviyo:0.1.10" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/klaviyo" + changelogUrl: "https://docs.airbyte.com/integrations/sources/klaviyo" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Klaviyo Spec" + type: "object" + properties: + api_key: title: "Api Key" description: "Klaviyo API Key. See our docs if you need help finding this key." @@ -6175,6 +6931,26 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-launchdarkly:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/launchdarkly" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Launchdarkly Spec" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + access_token: + title: "Access token" + type: "string" + description: "Your Access token. See here." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-lemlist:0.1.0" spec: documentationUrl: "https://docsurl.com" @@ -6774,6 +7550,37 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-mailersend:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/mailersend" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Mailersend Spec" + type: "object" + required: + - "api_token" + - "domain_id" + additionalProperties: true + properties: + api_token: + type: "string" + description: "Your API Token. See here." + airbyte_secret: true + domain_id: + type: "string" + description: "The domain entity in mailersend" + examples: + - "airbyte.com" + - "linkana.com" + start_date: + type: "number" + description: "Timestamp is assumed to be UTC." + examples: + - 123131321 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-mailgun:0.1.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/mailgun" @@ -7334,6 +8141,54 @@ path_in_connector_config: - "credentials" - "client_secret" +- dockerImage: "airbyte/source-microsoft-dataverse:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/microsoft-dataverse" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Microsoft Dataverse Spec" + type: "object" + required: + - "url" + - "tenant_id" + - "client_id" + - "client_secret_value" + additionalProperties: true + properties: + url: + type: "string" + description: "URL to Microsoft Dataverse API" + title: "URL" + examples: + - "https://.crm.dynamics.com" + order: 0 + tenant_id: + type: "string" + description: "Tenant Id of your Microsoft Dataverse Instance" + title: "Tenant Id" + airbyte_secret: true + order: 1 + client_id: + type: "string" + description: "App Registration Client Id" + title: "Client Id" + airbyte_secret: true + order: 2 + client_secret_value: + type: "string" + description: "App Registration Client Secret" + title: "Client Secret" + airbyte_secret: true + order: 3 + odata_maxpagesize: + type: "integer" + description: "Max number of results per page. Default=5000" + title: "Max page size" + default: 5000 + order: 4 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-mixpanel:0.1.29" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/mixpanel" @@ -7739,7 +8594,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-mysql:1.0.11" +- dockerImage: "airbyte/source-mysql:1.0.13" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/mysql" connectionSpecification: @@ -7822,9 +8677,6 @@ mode: type: "string" const: "preferred" - enum: - - "preferred" - default: "preferred" order: 0 - title: "required" description: "Require SSL mode." @@ -7834,9 +8686,6 @@ mode: type: "string" const: "required" - enum: - - "required" - default: "required" order: 0 - title: "Verify CA" description: "Verify CA SSL mode." @@ -7847,9 +8696,6 @@ mode: type: "string" const: "verify_ca" - enum: - - "verify_ca" - default: "verify_ca" order: 0 ca_certificate: type: "string" @@ -7892,9 +8738,6 @@ mode: type: "string" const: "verify_identity" - enum: - - "verify_identity" - default: "verify_identity" order: 0 ca_certificate: type: "string" @@ -7943,9 +8786,6 @@ method: type: "string" const: "STANDARD" - enum: - - "STANDARD" - default: "STANDARD" order: 0 - title: "Logical Replication (CDC)" description: "CDC uses the Binlog to detect inserts, updates, and deletes.\ @@ -7956,9 +8796,6 @@ method: type: "string" const: "CDC" - enum: - - "CDC" - default: "CDC" order: 0 initial_waiting_seconds: type: "integer" @@ -8083,6 +8920,28 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-n8n:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/n8n" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "N8n Spec" + type: "object" + required: + - "host" + - "api_key" + additionalProperties: true + properties: + host: + type: "string" + description: "Hostname of the n8n instance" + api_key: + type: "string" + description: "Your API KEY. See here" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-nasa:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/nasa-apod" @@ -8398,6 +9257,211 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-newsdata:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/newsdata" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Newsdata Spec" + type: "object" + required: + - "api_key" + additionalProperties: true + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + order: 0 + OneOf: + query: + type: "string" + description: "Keywords or phrases to search for in the news title and\ + \ content. Advanced Search options:\n - Search `Social`: query = \"\ + social\"\n - Search `Social Pizza`: query = \"social pizza\"\n - Search\ + \ `Social` but not with `pizza`: query = \"social -pizza\"\n - Search\ + \ `Social` but not with `pizza` and `wildfire`: query = \"social -pizza\ + \ -wildfire\"\n - Search `Social` and `pizza`: query = \"social AND\ + \ pizza\"\n - Search `Social` and `pizza` and `pasta`: query = \"social\ + \ AND pizza AND pasta\"\n - Search `Social` or `pizza`: query = \"\ + social OR pizza\"\n - Search `Social` or `pizza` but not `pasta`: query\ + \ = \"social OR pizza -pasta\"\n - Search `Social` or `pizza` or `pasta`:\ + \ query = \"social OR pizza OR pasta\"\nNote: You can't use AND and\ + \ OR in the same query." + order: 1 + query_in_title: + type: "string" + description: "Same as `query`, but restricting the search to only the\ + \ news title. It cannot be used along with `query`." + order: 1 + domain: + type: "array" + description: "Domains (maximum 5) to restrict the search to. Use the sources\ + \ stream to find top sources id." + maxitems: 5 + items: + type: "string" + order: 2 + country: + type: "array" + description: "2-letter ISO 3166-1 countries (maximum 5) to restrict the\ + \ search to." + maxitems: 5 + order: 3 + items: + type: "string" + enum: + - "ar" + - "au" + - "at" + - "bd" + - "by" + - "be" + - "br" + - "bg" + - "ca" + - "cl" + - "cn" + - "co" + - "cr" + - "cu" + - "cz" + - "dk" + - "do" + - "ec" + - "eg" + - "ee" + - "et" + - "fi" + - "fr" + - "de" + - "gr" + - "hk" + - "hu" + - "in" + - "id" + - "iq" + - "ie" + - "il" + - "it" + - "jp" + - "kz" + - "kw" + - "lv" + - "lb" + - "lt" + - "my" + - "mx" + - "ma" + - "mm" + - "nl" + - "nz" + - "ng" + - "kp" + - "no" + - "pk" + - "pe" + - "ph" + - "pl" + - "pt" + - "pr" + - "ro" + - "ru" + - "sa" + - "rs" + - "sg" + - "sk" + - "si" + - "za" + - "kr" + - "es" + - "se" + - "ch" + - "tw" + - "tz" + - "th" + - "tr" + - "ua" + - "ae" + - "gb" + - "us" + - "ve" + - "vi" + category: + type: "array" + description: "Categories (maximum 5) to restrict the search to." + maxitems: 5 + order: 4 + items: + type: "string" + enum: + - "business" + - "entertainment" + - "environment" + - "food" + - "health" + - "politics" + - "science" + - "sports" + - "technology" + - "top" + - "world" + language: + type: "array" + description: "Languages (maximum 5) to restrict the search to." + maxitems: 5 + order: 5 + items: + type: "string" + enum: + - "be" + - "am" + - "ar" + - "bn" + - "bs" + - "bg" + - "my" + - "ckb" + - "zh" + - "hr" + - "cs" + - "da" + - "nl" + - "en" + - "et" + - "fi" + - "fr" + - "de" + - "el" + - "he" + - "hi" + - "hu" + - "in" + - "it" + - "jp" + - "ko" + - "lv" + - "lt" + - "ms" + - "no" + - "pl" + - "pt" + - "ro" + - "ru" + - "sr" + - "sk" + - "sl" + - "es" + - "sw" + - "sv" + - "th" + - "tr" + - "uk" + - "ur" + - "vi" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-notion:0.1.10" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/notion" @@ -8478,6 +9542,62 @@ - - "client_secret" oauthFlowOutputParameters: - - "access_token" +- dockerImage: "airbyte/source-nytimes:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/nytimes" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Nytimes Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "period" + additionalProperties: true + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: "Start date to begin the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 1 + end_date: + type: "string" + title: "End Date" + description: "End date to stop the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 2 + period: + type: "integer" + title: "Period (used for Most Popular streams)" + description: "Period of time (in days)" + order: 3 + enum: + - 1 + - 7 + - 30 + share_type: + type: "string" + title: "Share Type (used for Most Popular Shared stream)" + description: "Share Type" + order: 4 + enum: + - "facebook" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-okta:0.1.13" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/okta" @@ -9244,6 +10364,39 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-partnerstack:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/partnerstack" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Partnerstack Spec" + type: "object" + required: + - "public_key" + - "private_key" + additionalProperties: true + properties: + public_key: + type: "string" + title: "Partnerstack Public key" + description: "The Live Public Key for a Partnerstack account." + airbyte_secret: true + private_key: + type: "string" + title: "Partnerstack Private key" + description: "The Live Private Key for a Partnerstack account." + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-paypal-transaction:0.1.10" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/paypal-transactions" @@ -9346,7 +10499,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-pinterest:0.1.8" +- dockerImage: "airbyte/source-pinterest:0.1.9" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/pinterest" connectionSpecification: @@ -9364,6 +10517,20 @@ \ it would be defaulted to latest allowed date by api (914 days from today)." examples: - "2022-07-28" + status: + title: "Status" + description: "Entity statuses based off of campaigns, ad_groups, and ads.\ + \ If you do not have a status set, it will be ignored completely." + type: + - "array" + - "null" + items: + type: "string" + enum: + - "ACTIVE" + - "PAUSED" + - "ARCHIVED" + uniqueItems: true credentials: title: "Authorization Method" type: "object" @@ -9597,29 +10764,245 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-pokeapi:0.1.5" +- dockerImage: "airbyte/source-plausible:0.1.0" spec: - documentationUrl: "https://docs.airbyte.com/integrations/sources/pokeapi" + documentationUrl: "https://docs.airbyte.com/integrations/sources/plausible" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" - title: "Pokeapi Spec" + title: "Plausible Spec" type: "object" required: - - "pokemon_name" - additionalProperties: false + - "api_key" + - "site_id" + additionalProperties: true properties: - pokemon_name: + api_key: type: "string" - title: "Pokemon Name" - description: "Pokemon requested from the API." - pattern: "^[a-z0-9_\\-]+$" - examples: - - "ditto" - - "luxray" + title: "Plausible API key" + description: "Plausible API Key. See the docs for information on how to generate this key." + airbyte_secret: true + site_id: + type: "string" + title: "Target website domain" + description: "The domain of the site you want to retrieve data for. Enter\ + \ the name of your site as configured on Plausible, i.e., excluding \"\ + https://\" and \"www\". Can be retrieved from the 'domain' field in your\ + \ Plausible site settings." + pattern: "^[A-Za-z0-9-.]+\\.[A-Z-a-z0-9-.]+" + examples: + - "airbyte.com" + - "docs.airbyte.com" + start_date: + type: "string" + title: "Data start date" + description: "Start date for data to retrieve, in ISO-8601 format." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-pocket:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/pocket" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Pocket Spec" + type: "object" + required: + - "consumer_key" + - "access_token" + additionalProperties: true + properties: + consumer_key: + type: "string" + title: "Consumer Key" + description: "Your application's Consumer Key." + airbyte_secret: true + order: 0 + access_token: + type: "string" + title: "Access Token" + description: "The user's Pocket access token." + airbyte_secret: true + order: 1 + state: + type: "string" + title: "State" + description: "Select the state of the items to retrieve." + order: 2 + enum: + - "unread" + - "archive" + - "all" + favorite: + type: "boolean" + title: "Is Favorite?" + description: "Retrieve only favorited items." + default: false + order: 3 + tag: + type: "string" + title: "Tag Name" + description: "Return only items tagged with this tag name. Use _untagged_\ + \ for retrieving only untagged items." + order: 4 + content_type: + type: "string" + title: "Content Type" + description: "Select the content type of the items to retrieve." + order: 5 + enum: + - "article" + - "video" + - "image" + sort: + type: "string" + title: "Sort By" + description: "Sort retrieved items by the given criteria." + order: 6 + enum: + - "newest" + - "oldest" + - "title" + - "site" + detail_type: + type: "string" + title: "Detail Type" + description: "Select the granularity of the information about each item." + order: 7 + enum: + - "simple" + - "complete" + search: + type: "string" + title: "Search Query" + description: "Only return items whose title or url contain the `search`\ + \ string." + order: 8 + domain: + type: "string" + title: "Domain" + description: "Only return items from a particular `domain`." + order: 9 + since: + type: "string" + title: "Since" + description: "Only return items modified since the given timestamp." + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}" + examples: + - "2022-10-20 14:14:14" + order: 10 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-pokeapi:0.1.5" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/pokeapi" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Pokeapi Spec" + type: "object" + required: + - "pokemon_name" + additionalProperties: false + properties: + pokemon_name: + type: "string" + title: "Pokemon Name" + description: "Pokemon requested from the API." + pattern: "^[a-z0-9_\\-]+$" + examples: + - "ditto" + - "luxray" - "snorlax" supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-polygon-stock-api:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/airtable" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Weather API Spec" + type: "object" + required: + - "apiKey" + - "stocksTicker" + - "multiplier" + - "timespan" + - "start_date" + - "end_date" + additionalProperties: true + properties: + apiKey: + title: "API Key" + type: "string" + description: "Your API ACCESS Key" + airbyte_secret: true + stocksTicker: + title: "Stock Ticker" + type: "string" + description: "The exchange symbol that this item is traded under." + examples: + - "IBM" + - "MSFT" + multiplier: + title: "Multiplier" + type: "integer" + description: "The size of the timespan multiplier." + examples: + - 1 + - 2 + timespan: + title: "Timespan" + type: "string" + description: "The size of the time window." + examples: + - "day" + start_date: + title: "Start Date" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: "The beginning date for the aggregate window." + examples: + - "2020-10-14" + end_date: + title: "End Date" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: "The target date for the aggregate window." + examples: + - "2020-10-14" + adjusted: + title: "Adjusted" + type: "string" + description: "Determines whether or not the results are adjusted for splits.\ + \ By default, results are adjusted and set to true. Set this to false\ + \ to get results that are NOT adjusted for splits." + examples: + - "true" + - "false" + sort: + title: "Sort" + type: "string" + description: "Sort the results by timestamp. asc will return results in\ + \ ascending order (oldest at the top), desc will return results in descending\ + \ order (newest at the top)." + examples: + - "asc" + - "desc" + limit: + title: "Limit" + type: "integer" + description: "The target date for the aggregate window." + examples: + - 100 + - 120 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-posthog:0.1.7" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/posthog" @@ -9655,7 +11038,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-postgres:1.0.22" +- dockerImage: "airbyte/source-postgres:1.0.25" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/postgres" connectionSpecification: @@ -9745,7 +11128,7 @@ order: 7 oneOf: - title: "disable" - additionalProperties: false + additionalProperties: true description: "Disable SSL." required: - "mode" @@ -9753,12 +11136,9 @@ mode: type: "string" const: "disable" - enum: - - "disable" - default: "disable" order: 0 - title: "allow" - additionalProperties: false + additionalProperties: true description: "Allow SSL mode." required: - "mode" @@ -9766,12 +11146,9 @@ mode: type: "string" const: "allow" - enum: - - "allow" - default: "allow" order: 0 - title: "prefer" - additionalProperties: false + additionalProperties: true description: "Prefer SSL mode." required: - "mode" @@ -9779,12 +11156,9 @@ mode: type: "string" const: "prefer" - enum: - - "prefer" - default: "prefer" order: 0 - title: "require" - additionalProperties: false + additionalProperties: true description: "Require SSL mode." required: - "mode" @@ -9792,12 +11166,9 @@ mode: type: "string" const: "require" - enum: - - "require" - default: "require" order: 0 - title: "verify-ca" - additionalProperties: false + additionalProperties: true description: "Verify-ca SSL mode." required: - "mode" @@ -9806,9 +11177,6 @@ mode: type: "string" const: "verify-ca" - enum: - - "verify-ca" - default: "verify-ca" order: 0 ca_certificate: type: "string" @@ -9839,7 +11207,7 @@ airbyte_secret: true order: 4 - title: "verify-full" - additionalProperties: false + additionalProperties: true description: "Verify-full SSL mode." required: - "mode" @@ -9848,9 +11216,6 @@ mode: type: "string" const: "verify-full" - enum: - - "verify-full" - default: "verify-full" order: 0 ca_certificate: type: "string" @@ -9895,9 +11260,6 @@ method: type: "string" const: "Standard" - enum: - - "Standard" - default: "Standard" order: 0 - title: "Logical Replication (CDC)" description: "Logical replication uses the Postgres write-ahead log (WAL)\ @@ -9913,9 +11275,6 @@ method: type: "string" const: "CDC" - enum: - - "CDC" - default: "CDC" order: 0 plugin: type: "string" @@ -9928,7 +11287,7 @@ enum: - "pgoutput" - "wal2json" - default: "pgoutput" + const: "pgoutput" order: 1 replication_slot: type: "string" @@ -10060,25 +11419,63 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-prestashop:0.2.0" +- dockerImage: "airbyte/source-postmarkapp:0.1.0" spec: documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Postmarkapp Spec" + type: "object" + required: + - "X-Postmark-Server-Token" + - "X-Postmark-Account-Token" + additionalProperties: true + properties: + X-Postmark-Server-Token: + title: "X-Postmark-Server-Token" + type: "string" + description: "API Key for server" + airbyte_secret: true + X-Postmark-Account-Token: + title: "X-Postmark-Account-Token" + type: "string" + description: "API Key for account" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-prestashop:0.3.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/presta-shop" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" title: "PrestaShop Spec" type: "object" required: - - "url" - "access_key" + - "url" + - "start_date" properties: - url: - type: "string" - description: "Shop URL without trailing slash (domain name or IP address)" access_key: type: "string" + title: "Access Key" description: "Your PrestaShop access key. See the docs for info on how to obtain this." + order: 0 airbyte_secret: true + url: + type: "string" + title: "Shop URL" + description: "Shop URL without trailing slash." + order: 1 + start_date: + type: "string" + title: "Start date" + description: "The Start date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-01" + order: 2 supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] @@ -10128,6 +11525,115 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-punk-api:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/punk-api" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Punk Api Spec" + type: "object" + required: + - "brewed_before" + - "brewed_after" + additionalProperties: true + properties: + id: + title: "Beers with specific ID" + type: "string" + description: "To extract specific data with Unique ID" + examples: + - 1 + - 22 + brewed_before: + title: "Brewed before data to get incremental reads" + type: "string" + description: "To extract specific data with Unique ID" + pattern: "^[0-9]{2}-[0-9]{4}$" + examples: + - "MM-YYYY" + brewed_after: + title: "Brewed after data to get incremental reads" + type: "string" + description: "To extract specific data with Unique ID" + pattern: "^[0-9]{2}-[0-9]{4}$" + examples: + - "MM-YYYY" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-pypi:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/pypi" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Pypi Spec" + type: "object" + required: + - "project_name" + additionalProperties: true + properties: + project_name: + type: "string" + title: "PyPI Package" + description: "Name of the project/package. Can only be in lowercase with\ + \ hyphen. This is the name used using pip command for installing the package." + examples: + - "sampleproject" + version: + title: "Package Version" + type: "string" + description: "Version of the project/package. Use it to find a particular\ + \ release instead of all releases." + examples: + - "1.2.0" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-qonto:0.1.0" + spec: + documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Qonto Spec" + type: "object" + required: + - "endpoint" + - "organization_slug" + - "secret_key" + - "iban" + properties: + endpoint: + title: "Endpoint" + type: "string" + description: "Please choose the right endpoint to use in this connection" + enum: + - "Production" + - "Test Mocked API Server" + organization_slug: + title: "Organization slug" + type: "string" + description: "Organization slug used in Qonto" + secret_key: + title: "Secret Key" + type: "string" + description: "Secret key of the Qonto account" + airbyte_secret: true + iban: + title: "IBAN" + type: "string" + description: "International Bank Account Number linked used with your Qonto\ + \ Account" + pattern: "^[A-Z0-9]*$" + start_date: + title: "Start date" + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-qualaroo:0.1.2" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/qualaroo" @@ -10275,6 +11781,55 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-recreation:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/recreation" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Recreation Spec" + type: "object" + required: + - "apikey" + additionalProperties: true + properties: + apikey: + title: "API Key" + type: "string" + description: "API Key" + airbyte_secret: true + query_campsites: + title: "Query Campsite" + type: "string" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-recruitee:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/recruitee" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Recruitee Spec" + type: "object" + required: + - "api_key" + - "company_id" + additionalProperties: true + properties: + api_key: + title: "API Key" + type: "string" + description: "Recruitee API Key. See here." + airbyte_secret: true + company_id: + title: "Company ID" + type: "integer" + description: "Recruitee Company ID. You can also find this ID on the Recruitee API\ + \ tokens page." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-recurly:0.4.1" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/recurly" @@ -10382,6 +11937,25 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-reply-io:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/reply-io" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Reply Io Spec" + type: "object" + required: + - "api_key" + additionalProperties: true + properties: + api_key: + type: "string" + title: "API Token" + description: "The API Token for Reply" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-retently:0.1.2" spec: documentationUrl: "https://docsurl.com" @@ -10570,7 +12144,56 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-s3:0.1.25" +- dockerImage: "airbyte/source-rss:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/rss" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "RSS Spec" + type: "object" + required: + - "url" + properties: + url: + type: "string" + description: "RSS Feed URL" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-rocket-chat:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/rocket-chat" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Rocket Chat Spec" + type: "object" + required: + - "token" + - "user_id" + - "endpoint" + additionalProperties: true + properties: + endpoint: + title: "Endpoint" + type: "string" + description: "Your rocket.chat instance URL." + examples: + - "https://airbyte-connector-poc.rocket.chat" + - "https://hey.yoursite.com" + token: + title: "Token" + type: "string" + description: "Your API Token. See here. The token is case sensitive." + airbyte_secret: true + user_id: + title: "User ID." + type: "string" + description: "Your User Id." + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-s3:0.1.26" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/s3" changelogUrl: "https://docs.airbyte.com/integrations/sources/s3" @@ -10903,7 +12526,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-salesforce:1.0.24" +- dockerImage: "airbyte/source-salesforce:1.0.26" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/salesforce" connectionSpecification: @@ -11032,6 +12655,24 @@ type: "string" path_in_connector_config: - "client_secret" +- dockerImage: "airbyte/source-sap-fieldglass:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/sap-fieldglass" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Sap Fieldglass Spec" + type: "object" + required: + - "api_key" + additionalProperties: true + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-search-metrics:0.1.1" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/seacrh-metrics" @@ -11101,42 +12742,158 @@ description: "Data generated in SearchMetrics after this date will be replicated.\ \ This date must be specified in the format YYYY-MM-DDT00:00:00Z." examples: - - "20200925" - pattern: "^[0-9]{4}[0-9]{2}[0-9]{2}$" + - "20200925" + pattern: "^[0-9]{4}[0-9]{2}[0-9]{2}$" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-secoda:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/secoda" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Secoda Spec" + type: "object" + required: + - "api_key" + additionalProperties: true + properties: + api_key: + title: "Api Key" + type: "string" + description: "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-sendgrid:0.2.16" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/sendgrid" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Sendgrid Spec" + type: "object" + required: + - "apikey" + additionalProperties: true + properties: + apikey: + title: "Sendgrid API key" + airbyte_secret: true + type: "string" + description: "API Key, use admin to generate this key." + order: 0 + start_time: + title: "Start time" + type: + - "integer" + - "string" + description: "Start time in ISO8601 format. Any data before this time point\ + \ will not be replicated." + examples: + - "2021-12-12" + - "2021-02-01 13:30:00" + - "2020-07-18T13:30:00.000Z" + - "2020-07-18 13:30:00+02:00" + order: 1 + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-senseforce:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/senseforce" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Senseforce Source Spec" + type: "object" + required: + - "access_token" + - "backend_url" + - "dataset_id" + - "start_date" + additionalProperties: true + properties: + access_token: + type: "string" + title: "API Access Token" + description: "Your API access token. See here. The toke is case sensitive." + airbyte_secret: true + backend_url: + type: "string" + title: "Senseforce backend URL" + examples: + - "https://galaxyapi.senseforce.io" + description: "Your Senseforce API backend URL. This is the URL shown during\ + \ the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the\ + \ term 'galaxy' in their ULR)" + dataset_id: + type: "string" + title: "Dataset ID" + examples: + - "8f418098-ca28-4df5-9498-0df9fe78eda7" + description: "The ID of the dataset you want to synchronize. The ID can\ + \ be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to\ + \ synchronize a specific dataset, each dataset you want to synchronize\ + \ needs to be implemented as a separate airbyte source)." + start_date: + type: "string" + title: "The first day (in UTC) when to read data from." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: "UTC date and time in the format 2017-01-25. Only data with\ + \ \"Timestamp\" after this date will be replicated. Important note: This\ + \ start date must be set to the first day of where your dataset provides\ + \ data. If your dataset has data from 2020-10-10 10:21:10, set the start_date\ + \ to 2020-10-10 or later" + examples: + - "2017-01-25" + slice_range: + type: "integer" + title: "Data request time increment in days" + default: 10 + minimum: 1 + maximum: 365 + examples: + - 1 + - 3 + - 10 + - 30 + - 180 + - 360 + airbyte_hidden: true + description: "The time increment used by the connector when requesting data\ + \ from the Senseforce API. The bigger the value is, the less requests\ + \ will be made and faster the sync will be. On the other hand, the more\ + \ seldom the state is persisted and the more likely one could run into\ + \ rate limites. Furthermore, consider that large chunks of time might\ + \ take a long time for the Senseforce query to return data - meaning it\ + \ could take in effect longer than with more smaller time slices. If there\ + \ are a lot of data per day, set this setting to 1. If there is only very\ + \ little data per day, you might change the setting to 10 or more." supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-sendgrid:0.2.16" +- dockerImage: "airbyte/source-sendinblue:0.1.0" spec: - documentationUrl: "https://docs.airbyte.com/integrations/sources/sendgrid" + documentationUrl: "https://docs.airbyte.com/integrations/sources/sendinblue" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" - title: "Sendgrid Spec" + title: "Sendinblue Spec" type: "object" required: - - "apikey" + - "api_key" additionalProperties: true properties: - apikey: - title: "Sendgrid API key" - airbyte_secret: true + api_key: + title: "API Key" type: "string" - description: "API Key, use admin to generate this key." - order: 0 - start_time: - title: "Start time" - type: - - "integer" - - "string" - description: "Start time in ISO8601 format. Any data before this time point\ - \ will not be replicated." - examples: - - "2021-12-12" - - "2021-02-01 13:30:00" - - "2020-07-18T13:30:00.000Z" - - "2020-07-18 13:30:00+02:00" - order: 1 + description: "Your API Key. See here." + airbyte_secret: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] @@ -11426,6 +13183,54 @@ oauthFlowOutputParameters: - - "access_token" - - "refresh_token" +- dockerImage: "airbyte/source-smaily:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/smaily" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Smaily Spec" + type: "object" + required: + - "api_subdomain" + - "api_username" + - "api_password" + additionalProperties: true + properties: + api_subdomain: + type: "string" + title: "API Subdomain" + description: "API Subdomain. See https://smaily.com/help/api/general/create-api-user/" + api_username: + type: "string" + title: "API User Username" + description: "API user username. See https://smaily.com/help/api/general/create-api-user/" + api_password: + type: "string" + title: "API User Password" + description: "API user password. See https://smaily.com/help/api/general/create-api-user/" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-smartengage:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/smartengage" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "SmartEngage Spec" + type: "object" + required: + - "api_key" + additionalProperties: true + properties: + api_key: + title: "API Key" + type: "string" + description: "API Key" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-smartsheets:0.1.12" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/smartsheets" @@ -11556,7 +13361,7 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" -- dockerImage: "airbyte/source-snowflake:0.1.24" +- dockerImage: "airbyte/source-snowflake:0.1.26" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/snowflake" connectionSpecification: @@ -11585,7 +13390,6 @@ auth_type: type: "string" const: "OAuth" - default: "OAuth" order: 0 client_id: type: "string" @@ -11622,7 +13426,6 @@ auth_type: type: "string" const: "username/password" - default: "username/password" order: 0 username: description: "The username you created to allow Airbyte to access\ @@ -12043,6 +13846,26 @@ type: "string" path_in_connector_config: - "client_secret" +- dockerImage: "airbyte/source-statuspage:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/statuspage" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Statuspage Spec" + type: "object" + required: + - "api_key" + additionalProperties: true + properties: + api_key: + title: "API Key" + type: "string" + description: "Your API Key. See here." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-stripe:0.1.40" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/stripe" @@ -12227,7 +14050,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-tempo:0.2.6" +- dockerImage: "airbyte/source-tempo:0.3.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/" connectionSpecification: @@ -12589,6 +14412,170 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-tmdb:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/tmdb" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Tmdb Spec" + type: "object" + required: + - "api_key" + - "movie_id" + - "query" + - "language" + additionalProperties: true + properties: + api_key: + title: "Unique key for establishing connection" + type: "string" + description: "API Key from tmdb account" + airbyte_secret: true + movie_id: + title: "Movie ID for targeting movies" + type: "string" + description: "Target movie ID, Mandate for movie streams (Example is 550)" + examples: + - 550 + - 560 + query: + title: "Query for search streams" + type: "string" + description: "Target movie ID, Mandate for search streams" + examples: + - "Marvel" + - "DC" + language: + title: "Language for filtering" + type: "string" + description: "Language expressed in ISO 639-1 scheme, Mandate for required\ + \ streams (Example en-US)" + examples: + - "en-US" + - "en-UK" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-toggl:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/toggl" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Toggl Spec" + type: "object" + required: + - "api_token" + - "organization_id" + - "workspace_id" + - "start_date" + - "end_date" + additionalProperties: true + properties: + api_token: + title: "API token" + type: "string" + description: "Your API Token. See here. The token is case sensitive." + airbyte_secret: true + organization_id: + title: "Organization ID" + type: "integer" + description: "Your organization id. See here." + workspace_id: + title: "Workspace ID" + type: "integer" + description: "Your workspace id. See here." + start_date: + title: "Start date" + type: "string" + description: "To retrieve time entries created after the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + end_date: + title: "End date" + type: "string" + description: "To retrieve time entries created before the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-the-guardian-api:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/the-guardian-api" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "The Guardian Api Spec" + type: "object" + required: + - "api_key" + - "start_date" + additionalProperties: true + properties: + api_key: + title: "API Key" + type: "string" + description: "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + start_date: + title: "Start Date" + type: "string" + description: "Use this to set the minimum date (YYYY-MM-DD) of the results.\ + \ Results older than the start_date will not be shown." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + query: + title: "Query" + type: "string" + description: "(Optional) The query (q) parameter filters the results to\ + \ only those that include that search term. The q parameter supports AND,\ + \ OR and NOT operators." + examples: + - "environment AND NOT water" + - "environment AND political" + - "amusement park" + - "political" + tag: + title: "Tag" + type: "string" + description: "(Optional) A tag is a piece of data that is used by The Guardian\ + \ to categorise content. Use this parameter to filter results by showing\ + \ only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation." + examples: + - "environment/recycling" + - "environment/plasticbags" + - "environment/energyefficiency" + section: + title: "Section" + type: "string" + description: "(Optional) Use this to filter the results by a particular\ + \ section. See here for a list of all sections, and here for the sections endpoint documentation." + examples: + - "media" + - "technology" + - "housing-network" + end_date: + title: "End Date" + type: "string" + description: "(Optional) Use this to set the maximum date (YYYY-MM-DD) of\ + \ the results. Results newer than the end_date will not be shown. Default\ + \ is set to the current date (today) for incremental syncs." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-trello:0.1.6" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/trello" @@ -12682,55 +14669,125 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] -- dockerImage: "airbyte/source-twilio:0.1.13" +- dockerImage: "airbyte/source-twilio:0.1.13" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/twilio" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Twilio Spec" + type: "object" + required: + - "account_sid" + - "auth_token" + - "start_date" + additionalProperties: true + properties: + account_sid: + title: "Account ID" + description: "Twilio account SID" + airbyte_secret: true + type: "string" + order: 1 + auth_token: + title: "Auth Token" + description: "Twilio Auth Token." + airbyte_secret: true + type: "string" + order: 2 + start_date: + title: "Replication Start Date" + description: "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-10-01T00:00:00Z" + type: "string" + order: 3 + lookback_window: + title: "Lookback window" + description: "How far into the past to look for records. (in minutes)" + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 4 + supportsIncremental: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: + - "append" +- dockerImage: "airbyte/source-twitter:0.1.0" + spec: + documentationUrl: "https://docsurl.com" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Twitter Spec" + type: "object" + required: + - "api_key" + - "query" + additionalProperties: true + properties: + api_key: + title: "API Key Token" + description: "API Key" + type: "string" + airbyte_secret: true + query: + title: "Twitter Search Query" + description: "Twitter Search query" + type: "string" + start_date: + title: "Start Date" + description: "From what date you want to start retrieving dta" + type: "string" + end_date: + title: "End Date" + description: "End data to retrieve data" + type: "string" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-tyntec-sms:0.1.0" spec: - documentationUrl: "https://docs.airbyte.com/integrations/sources/twilio" + documentationUrl: "https://docs.airbyte.com/integrations/sources/tyntec-sms" connectionSpecification: $schema: "http://json-schema.org/draft-07/schema#" - title: "Twilio Spec" + title: "Tyntec Sms Spec" type: "object" required: - - "account_sid" - - "auth_token" - - "start_date" + - "api_key" + - "to" + - "from" additionalProperties: true properties: - account_sid: - title: "Account ID" - description: "Twilio account SID" + api_key: + type: "string" + title: "Tyntec API Key" + description: "Your Tyntec API Key. See here" + order: 0 airbyte_secret: true + to: type: "string" + title: "SMS Message Recipient Phone" + description: "The phone number of the SMS message recipient (international)." order: 1 - auth_token: - title: "Auth Token" - description: "Twilio Auth Token." - airbyte_secret: true + from: type: "string" + title: "SMS Message Sender Phone" + description: "The phone number of the SMS message sender (international)." order: 2 - start_date: - title: "Replication Start Date" - description: "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ - \ data before this date will not be replicated." - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - examples: - - "2020-10-01T00:00:00Z" + message: type: "string" + title: "SMS Message Body" + description: "The content of the SMS message to be sent." order: 3 - lookback_window: - title: "Lookback window" - description: "How far into the past to look for records. (in minutes)" - examples: - - 60 - default: 0 - minimum: 0 - maximum: 576000 - type: "integer" - order: 4 - supportsIncremental: true supportsNormalization: false supportsDBT: false - supported_destination_sync_modes: - - "append" + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-typeform:0.1.9" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/typeform" @@ -12855,6 +14912,75 @@ - - "client_secret" oauthFlowOutputParameters: - - "refresh_token" +- dockerImage: "airbyte/source-youtube-analytics-business:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/youtube-analytics-business" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "YouTube Analytics Business Spec" + type: "object" + required: + - "credentials" + additionalProperties: true + properties: + credentials: + title: "Authenticate via OAuth 2.0" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A refresh token generated using the above client ID and\ + \ secret" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] + authSpecification: + auth_type: "oauth2.0" + oauth2Specification: + rootObject: + - "credentials" + oauthFlowInitParameters: + - - "client_id" + - - "client_secret" + oauthFlowOutputParameters: + - - "refresh_token" +- dockerImage: "airbyte/source-vantage:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/vantage" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Vantage Spec" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + access_token: + title: "API Access Token" + type: "string" + description: "Your API Access token. See here." + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "farosai/airbyte-victorops-source:0.1.23" spec: documentationUrl: "https://docs.faros.ai" @@ -12892,6 +15018,129 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-visma-economic:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/visma-economic" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Visma E-conomic Spec" + type: "object" + required: + - "app_secret_token" + - "agreement_grant_token" + properties: + app_secret_token: + title: "App Secret Token" + type: "string" + description: "Identification token for app accessing data" + airbyte_secret: true + agreement_grant_token: + title: "Agreement Grant Token" + type: "string" + description: "Identifier for the grant issued by an agreement" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-vitally:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/vitally" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Vitally Spec" + type: "object" + required: + - "api_key" + - "status" + additionalProperties: true + properties: + api_key: + type: "string" + title: "API Token" + description: "The API Token for a Vitally account." + airbyte_secret: true + status: + type: "string" + title: "Status" + description: "Status of the Vitally accounts. One of the following values;\ + \ active, churned, activeOrChurned." + enum: + - "active" + - "churned" + - "activeOrChurned" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-xero:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/xero" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Xero Spec" + type: "object" + required: + - "authentication" + - "start_date" + - "scopes" + - "tenant_id" + - "client_id" + - "client_secret" + additionalProperties: true + properties: + client_id: + title: "Client ID" + type: "string" + description: "Enter your Xero application's Client ID" + client_secret: + title: "Client Secret" + type: "string" + description: "Enter your Xero application's Client Secret" + airbyte_secret: true + tenant_id: + title: "Tenant ID" + type: "string" + description: "Enter your Xero organization's Tenant ID" + scopes: + title: "Scopes" + type: "string" + description: "Enter your required list of scopes (delimited by comma)" + authentication: + type: "object" + title: "Authentication" + description: "Type and additional credentials of the Xero API connection" + oneOf: + - title: "Authenticate via Xero (OAuth) (unsupported yet)" + type: "object" + required: + - "auth_type" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "oauth" + refresh_token: + title: "Refresh Token" + type: "string" + description: "Enter your Xero application's refresh token" + airbyte_secret: true + - title: "Custom Connections Authentication" + type: "object" + required: + - "auth_type" + properties: + auth_type: + type: "string" + const: "custom_connection" + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: "UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any\ + \ data with created_at before this data will not be synced." + examples: + - "2022-03-01T00:00:00Z" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-xkcd:0.1.1" spec: documentationUrl: "https://docs.airbyte.io/integrations/sources/xkcd" @@ -12903,6 +15152,53 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-weatherstack:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.io/integrations/sources/weatherstack" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Weatherstack Spec" + type: "object" + required: + - "access_key" + - "query" + - "historical_date" + properties: + is_paid_account: + order: 0 + title: "Is Paid Account" + description: "Toggle if you're using a Paid subscription" + type: "boolean" + default: false + access_key: + order: 1 + type: "string" + description: "API access key used to retrieve data from the Weatherstack\ + \ API.(https://weatherstack.com/product)" + airbyte_secret: true + query: + order: 2 + type: "string" + description: "A location to query such as city, IP, latitudeLongitude, or\ + \ zipcode. Multiple locations with semicolon seperated if using a professional\ + \ plan or higher. For more info- (https://weatherstack.com/documentation#query_parameter)" + examples: + - "New York" + - "London" + - "98101" + historical_date: + order: 3 + type: "string" + description: "This is required for enabling the Historical date API with\ + \ format- (YYYY-MM-DD). * Note, only supported by paid accounts" + examples: + - "2015-01-21" + default": "2000-01-01" + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2}" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-webflow:0.1.2" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/webflow" @@ -13062,6 +15358,25 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-zapier-supported-storage:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/zapier-supported-storage" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Zapier Supported Storage Spec" + type: "object" + required: + - "secret" + additionalProperties: true + properties: + secret: + title: "Secret Key" + type: "string" + description: "Secret key supplied by zapier" + airbyte_secret: true + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-zendesk-chat:0.1.11" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/zendesk-chat" @@ -14295,6 +16610,30 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-workramp:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/workramp" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Workramp Spec" + type: "object" + required: + - "api_key" + - "academy_id" + additionalProperties: true + properties: + api_key: + type: "string" + title: "API Token" + description: "The API Token for Workramp" + airbyte_secret: true + academy_id: + type: "string" + title: "Academy ID" + description: "The id of the Academy" + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-yandex-metrica:0.1.0" spec: documentationUrl: "https://docsurl.com" @@ -14340,6 +16679,40 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: [] +- dockerImage: "airbyte/source-younium:0.1.0" + spec: + documentationUrl: "https://docs.airbyte.com/integrations/sources/younium" + connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: "Younium Spec" + type: "object" + required: + - "username" + - "password" + - "legal_entity" + properties: + username: + title: "Username" + type: "string" + description: "Username for Younium account" + password: + title: "Password" + type: "string" + description: "Account password for younium account API key" + airbyte_secret: true + legal_entity: + title: "Legal Entity" + type: "string" + description: "Legal Entity that data should be pulled from" + playground: + title: "Playground environment" + type: "boolean" + description: "Property defining if connector is used against playground\ + \ or production environment" + default: false + supportsNormalization: false + supportsDBT: false + supported_destination_sync_modes: [] - dockerImage: "airbyte/source-zoom:0.1.0" spec: documentationUrl: "https://docs.airbyte.com/integrations/sources/zoom" diff --git a/airbyte-config/init/src/test/java/io/airbyte/config/init/ApplyDefinitionsHelperTest.java b/airbyte-config/init/src/test/java/io/airbyte/config/init/ApplyDefinitionsHelperTest.java index aeae4f5a15b7a..753256f2b3765 100644 --- a/airbyte-config/init/src/test/java/io/airbyte/config/init/ApplyDefinitionsHelperTest.java +++ b/airbyte-config/init/src/test/java/io/airbyte/config/init/ApplyDefinitionsHelperTest.java @@ -9,16 +9,23 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; +import io.airbyte.commons.version.Version; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.persistence.job.JobPersistence; +import io.airbyte.protocol.models.ConnectorSpecification; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.UUID; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; class ApplyDefinitionsHelperTest { @@ -29,41 +36,51 @@ class ApplyDefinitionsHelperTest { private static final String DOCUMENTATION_URL = "https://wwww.example.com"; private static final String DOCKER_REPOSITORY = "airbyte/connector"; private static final String DOCKER_TAG = "0.1.0"; + private static final String PROTOCOL_VERSION_1 = "1.0.0"; + private static final String PROTOCOL_VERSION_2 = "2.0.0"; public static final StandardSourceDefinition SOURCE_DEF1 = new StandardSourceDefinition() .withSourceDefinitionId(SOURCE_DEF_ID1) .withDockerRepository(DOCKER_REPOSITORY) .withDockerImageTag(DOCKER_TAG) .withName(CONNECT_NAME1) - .withDocumentationUrl(DOCUMENTATION_URL); + .withDocumentationUrl(DOCUMENTATION_URL) + .withSpec(new ConnectorSpecification().withProtocolVersion(PROTOCOL_VERSION_1)); public static final StandardSourceDefinition SOURCE_DEF2 = new StandardSourceDefinition() .withSourceDefinitionId(SOURCE_DEF_ID1) .withDockerRepository(DOCKER_REPOSITORY) .withDockerImageTag(DOCKER_TAG) .withName(CONNECT_NAME2) - .withDocumentationUrl(DOCUMENTATION_URL); + .withDocumentationUrl(DOCUMENTATION_URL) + .withSpec(new ConnectorSpecification().withProtocolVersion(PROTOCOL_VERSION_2)); + public static final StandardDestinationDefinition DEST_DEF1 = new StandardDestinationDefinition() .withDestinationDefinitionId(DEST_DEF_ID2) .withDockerRepository(DOCKER_REPOSITORY) .withDockerImageTag(DOCKER_TAG) .withName(CONNECT_NAME1) - .withDocumentationUrl(DOCUMENTATION_URL); + .withDocumentationUrl(DOCUMENTATION_URL) + .withSpec(new ConnectorSpecification().withProtocolVersion(PROTOCOL_VERSION_2)); + public static final StandardDestinationDefinition DEST_DEF2 = new StandardDestinationDefinition() .withDestinationDefinitionId(DEST_DEF_ID2) .withDockerRepository(DOCKER_REPOSITORY) .withDockerImageTag(DOCKER_TAG) .withName(CONNECT_NAME2) - .withDocumentationUrl(DOCUMENTATION_URL); + .withDocumentationUrl(DOCUMENTATION_URL) + .withSpec(new ConnectorSpecification().withProtocolVersion(PROTOCOL_VERSION_1)); private ConfigRepository configRepository; private DefinitionsProvider definitionsProvider; + private JobPersistence jobPersistence; private ApplyDefinitionsHelper applyDefinitionsHelper; @BeforeEach void setup() throws JsonValidationException, IOException { configRepository = mock(ConfigRepository.class); definitionsProvider = mock(DefinitionsProvider.class); + jobPersistence = mock(JobPersistence.class); - applyDefinitionsHelper = new ApplyDefinitionsHelper(configRepository, definitionsProvider); + applyDefinitionsHelper = new ApplyDefinitionsHelper(configRepository, definitionsProvider, jobPersistence); // default calls to empty. when(configRepository.listStandardDestinationDefinitions(true)).thenReturn(Collections.emptyList()); @@ -132,4 +149,24 @@ void testApplyOSS() throws JsonValidationException, IOException { verifyNoMoreInteractions(definitionsProvider); } + @ParameterizedTest + @ValueSource(booleans = {false, true}) + void testDefinitionsFiltering(final boolean updateAll) throws JsonValidationException, IOException { + when(jobPersistence.getCurrentProtocolVersionRange()) + .thenReturn(Optional.of(new AirbyteProtocolVersionRange(new Version("2.0.0"), new Version("3.0.0")))); + + when(definitionsProvider.getSourceDefinitions()).thenReturn(List.of(SOURCE_DEF1, SOURCE_DEF2)); + when(definitionsProvider.getDestinationDefinitions()).thenReturn(List.of(DEST_DEF1, DEST_DEF2)); + + applyDefinitionsHelper.apply(updateAll); + + if (updateAll) { + verify(configRepository).writeStandardSourceDefinition(SOURCE_DEF2); + verify(configRepository).writeStandardDestinationDefinition(DEST_DEF1); + verifyNoMoreInteractions(configRepository); + } else { + verify(configRepository).seedActorDefinitions(List.of(SOURCE_DEF2), List.of(DEST_DEF1)); + } + } + } diff --git a/airbyte-connector-builder-server/.coveragerc b/airbyte-connector-builder-server/.coveragerc new file mode 100644 index 0000000000000..034c0c0c28f5e --- /dev/null +++ b/airbyte-connector-builder-server/.coveragerc @@ -0,0 +1,3 @@ +[report] +# show lines missing coverage +show_missing = true diff --git a/airbyte-connector-builder-server/.dockerignore b/airbyte-connector-builder-server/.dockerignore new file mode 100644 index 0000000000000..5cea6d6cbdd19 --- /dev/null +++ b/airbyte-connector-builder-server/.dockerignore @@ -0,0 +1,4 @@ +build +!build/airbyte_api_client +.venv +connector_builder.egg-info diff --git a/airbyte-connector-builder-server/.gitignore b/airbyte-connector-builder-server/.gitignore new file mode 100644 index 0000000000000..2456084a5e540 --- /dev/null +++ b/airbyte-connector-builder-server/.gitignore @@ -0,0 +1,3 @@ +.coverage +.venv +state_*.yaml diff --git a/airbyte-connector-builder-server/.python-version b/airbyte-connector-builder-server/.python-version new file mode 100644 index 0000000000000..a9f8d1be337f7 --- /dev/null +++ b/airbyte-connector-builder-server/.python-version @@ -0,0 +1 @@ +3.9.11 diff --git a/airbyte-connector-builder-server/CHANGELOG.md b/airbyte-connector-builder-server/CHANGELOG.md new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-connector-builder-server/Dockerfile b/airbyte-connector-builder-server/Dockerfile new file mode 100644 index 0000000000000..03e47eeff4bf5 --- /dev/null +++ b/airbyte-connector-builder-server/Dockerfile @@ -0,0 +1,14 @@ +FROM python:3.9-slim as base + +RUN apt-get upgrade \ + && pip install --upgrade pip + +WORKDIR /home/connector-builder-server +COPY . ./ + +RUN pip install --no-cache-dir . + +ENTRYPOINT ["uvicorn", "connector_builder.entrypoint:app", "--host", "0.0.0.0", "--port", "80"] + +LABEL io.airbyte.version=0.40.19 +LABEL io.airbyte.name=airbyte/connector-builder-server diff --git a/airbyte-connector-builder-server/README.md b/airbyte-connector-builder-server/README.md new file mode 100644 index 0000000000000..d620e1eabf13a --- /dev/null +++ b/airbyte-connector-builder-server/README.md @@ -0,0 +1,29 @@ +# Connector builder + + +## Getting started + +Set up the virtual environment and install dependencies +```bash +python -m venv .venv +source .venv/bin/activate +pip install . +``` + +Then run the server +```bash +uvicorn connector_builder.entrypoint:app --host 0.0.0.0 --port 8080 +``` + +The server is now reachable on localhost:8080 + +### OpenAPI generation + +```bash +openapi-generator generate -i ../connector-builder-server/src/main/openapi/openapi.yaml -g python-fastapi -c openapi/generator_config.yaml -o build/server -t openapi/templates +``` + +Or you can run it via Gradle by running this from the Airbyte project root: +```bash +./gradlew :airbyte-connector-builder-server:generateOpenApiPythonServer +``` diff --git a/airbyte-connector-builder-server/build.gradle b/airbyte-connector-builder-server/build.gradle new file mode 100644 index 0000000000000..f73ddb7ecadd0 --- /dev/null +++ b/airbyte-connector-builder-server/build.gradle @@ -0,0 +1,61 @@ +import org.openapitools.generator.gradle.plugin.tasks.GenerateTask + +plugins { + id "org.openapi.generator" version "5.3.1" + id 'airbyte-python-docker' + id 'airbyte-docker' +} + +airbytePythonDocker { + moduleDirectory 'connector_builder' +} + +task generateOpenApiPythonServer(type: GenerateTask) { + outputs.upToDateWhen { false } + + def generatedCodeDir = "$buildDir/airbyte_connector_builder_server" + inputSpec = "$rootDir.absolutePath/airbyte-connector-builder-server/src/main/openapi/openapi.yaml" + outputDir = generatedCodeDir + + generatorName = "python-fastapi" + configFile = "$projectDir/openapi/generator_config.yaml" + templateDir = "$projectDir/openapi/templates" + packageName = "connector_builder.generated" + + // After we generate, we're only interested in the API declaration and the generated pydantic models. + // So we copy those from the build/ directory + doLast { + def sourceDir = "$generatedCodeDir/src/connector_builder/generated/" + def targetDir = "$projectDir/connector_builder/generated" + mkdir targetDir + copy { + from "$sourceDir/apis" + include "*_interface.py", "__init__.py" + into "$targetDir/apis" + } + copy { + from "$sourceDir/models" + include "*.py" + into "$targetDir/models" + } + } +} + +project.build.dependsOn(generateOpenApiPythonServer) + +// java modules such as airbyte-server can use copyGeneratedTar to copy the files to the docker image +// We cannot do this here because we don't generate a tar file +// Instead, we copy the files into the build directory so they can be copied to the docker container +task prepareBuild(type: Copy) { + from layout.projectDirectory.file(".") + exclude '.*' + exclude 'build' + + + into layout.buildDirectory.dir("docker") +} + +tasks.named("buildDockerImage") { + dependsOn prepareBuild + dependsOn copyDocker +} diff --git a/airbyte-connector-builder-server/connector_builder/__init__.py b/airbyte-connector-builder-server/connector_builder/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-connector-builder-server/connector_builder/entrypoint.py b/airbyte-connector-builder-server/connector_builder/entrypoint.py new file mode 100644 index 0000000000000..642a742b534b7 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/entrypoint.py @@ -0,0 +1,25 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from connector_builder.generated.apis.default_api_interface import initialize_router +from connector_builder.impl.default_api import DefaultApiImpl + +app = FastAPI( + title="Connector Builder Server API", + description="Connector Builder Server API ", + version="1.0.0", +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(initialize_router(DefaultApiImpl())) diff --git a/airbyte-connector-builder-server/connector_builder/generated/apis/__init__.py b/airbyte-connector-builder-server/connector_builder/generated/apis/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-connector-builder-server/connector_builder/generated/apis/default_api_interface.py b/airbyte-connector-builder-server/connector_builder/generated/apis/default_api_interface.py new file mode 100644 index 0000000000000..70e031d41df93 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/apis/default_api_interface.py @@ -0,0 +1,147 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# +# This file was auto-generated from Airbyte's custom OpenAPI templates. Do not edit it manually. +# coding: utf-8 + +import inspect +from abc import ABC, abstractmethod +from typing import Callable, Dict, List # noqa: F401 + +from fastapi import ( # noqa: F401 + APIRouter, + Body, + Cookie, + Depends, + Form, + Header, + Path, + Query, + Response, + Security, + status, +) + +from connector_builder.generated.models.extra_models import TokenModel # noqa: F401 + + +from connector_builder.generated.models.invalid_input_exception_info import InvalidInputExceptionInfo +from connector_builder.generated.models.known_exception_info import KnownExceptionInfo +from connector_builder.generated.models.stream_read import StreamRead +from connector_builder.generated.models.stream_read_request_body import StreamReadRequestBody +from connector_builder.generated.models.streams_list_read import StreamsListRead +from connector_builder.generated.models.streams_list_request_body import StreamsListRequestBody + + +class DefaultApi(ABC): + """ + NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + Do not edit the class manually. + """ + + @abstractmethod + async def get_manifest_template( + self, + ) -> str: + """ + Return a connector manifest template to use as the default value for the yaml editor + """ + + @abstractmethod + async def list_streams( + self, + streams_list_request_body: StreamsListRequestBody = Body(None, description=""), + ) -> StreamsListRead: + """ + List all streams present in the connector manifest, along with their specific request URLs + """ + + @abstractmethod + async def read_stream( + self, + stream_read_request_body: StreamReadRequestBody = Body(None, description=""), + ) -> StreamRead: + """ + Reads a specific stream in the source. TODO in a later phase - only read a single slice of data. + """ + + +def _assert_signature_is_set(method: Callable) -> None: + """ + APIRouter().add_api_route expects the input method to have a signature. It gets signatures + by running inspect.signature(method) under the hood. + + In the case that an instance method does not declare "self" as an input parameter (due to developer error + for example), then the call to inspect.signature() raises a ValueError and fails. + + Ideally, we'd automatically detect & correct this problem. To do that, we'd need to do + setattr(method, "__signature__", ) but that's not possible because instance + methods (i.e the input to this function) are object subclasses, and you can't use setattr on objects + (https://stackoverflow.com/a/12839070/3237889) + + The workaround this method implements is to raise an exception at runtime if the input method fails + when inspect.signature() is called. This is good enough because the error will be detected + immediately when the developer tries to run the server, so builds should very quickly fail and this + will practically never make it to a production scenario. + """ + try: + inspect.signature(method) + except ValueError as e: + # Based on empirical observation, the call to inspect fails with a ValueError + # with exactly one argument: "invalid method signature" + if e.args and len(e.args) == 1 and e.args[0] == "invalid method signature": + # I couldn't figure out how to setattr on a "method" object to populate the signature. For now just kick + # it back to the developer and tell them to set the "self" variable + raise Exception(f"Method {method.__name__} in class {type(method.__self__).__name__} must declare the variable 'self'. ") + else: + raise + + +def initialize_router(api: DefaultApi) -> APIRouter: + router = APIRouter() + + _assert_signature_is_set(api.get_manifest_template) + router.add_api_route( + "/v1/manifest_template", + endpoint=api.get_manifest_template, + methods=["GET"], + responses={ + 200: {"model": str, "description": "Successful operation"}, + }, + tags=["default"], + summary="Return a connector manifest template to use as the default value for the yaml editor", + response_model_by_alias=True, + ) + + _assert_signature_is_set(api.list_streams) + router.add_api_route( + "/v1/streams/list", + endpoint=api.list_streams, + methods=["POST"], + responses={ + 200: {"model": StreamsListRead, "description": "Successful operation"}, + 400: {"model": KnownExceptionInfo, "description": "Exception occurred; see message for details."}, + 422: {"model": InvalidInputExceptionInfo, "description": "Input failed validation"}, + }, + tags=["default"], + summary="List all streams present in the connector manifest, along with their specific request URLs", + response_model_by_alias=True, + ) + + _assert_signature_is_set(api.read_stream) + router.add_api_route( + "/v1/stream/read", + endpoint=api.read_stream, + methods=["POST"], + responses={ + 200: {"model": StreamRead, "description": "Successful operation"}, + 400: {"model": KnownExceptionInfo, "description": "Exception occurred; see message for details."}, + 422: {"model": InvalidInputExceptionInfo, "description": "Input failed validation"}, + }, + tags=["default"], + summary="Reads a specific stream in the source. TODO in a later phase - only read a single slice of data.", + response_model_by_alias=True, + ) + + + return router diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/__init__.py b/airbyte-connector-builder-server/connector_builder/generated/models/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/extra_models.py b/airbyte-connector-builder-server/connector_builder/generated/models/extra_models.py new file mode 100644 index 0000000000000..a3a283fb842b3 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/extra_models.py @@ -0,0 +1,8 @@ +# coding: utf-8 + +from pydantic import BaseModel + +class TokenModel(BaseModel): + """Defines a token model.""" + + sub: str diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/http_request.py b/airbyte-connector-builder-server/connector_builder/generated/models/http_request.py new file mode 100644 index 0000000000000..e72d94e9aaa98 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/http_request.py @@ -0,0 +1,30 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 + + +class HttpRequest(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + HttpRequest - a model defined in OpenAPI + + url: The url of this HttpRequest. + parameters: The parameters of this HttpRequest [Optional]. + body: The body of this HttpRequest [Optional]. + headers: The headers of this HttpRequest [Optional]. + """ + + url: str + parameters: Optional[Dict[str, Any]] = None + body: Optional[Dict[str, Any]] = None + headers: Optional[Dict[str, Any]] = None + +HttpRequest.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/http_response.py b/airbyte-connector-builder-server/connector_builder/generated/models/http_response.py new file mode 100644 index 0000000000000..49a81ebeb628d --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/http_response.py @@ -0,0 +1,28 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 + + +class HttpResponse(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + HttpResponse - a model defined in OpenAPI + + status: The status of this HttpResponse. + body: The body of this HttpResponse [Optional]. + headers: The headers of this HttpResponse [Optional]. + """ + + status: int + body: Optional[Dict[str, Any]] = None + headers: Optional[Dict[str, Any]] = None + +HttpResponse.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_exception_info.py b/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_exception_info.py new file mode 100644 index 0000000000000..878690e098259 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_exception_info.py @@ -0,0 +1,31 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 +from connector_builder.generated.models.invalid_input_property import InvalidInputProperty + + +class InvalidInputExceptionInfo(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + InvalidInputExceptionInfo - a model defined in OpenAPI + + message: The message of this InvalidInputExceptionInfo. + exception_class_name: The exception_class_name of this InvalidInputExceptionInfo [Optional]. + exception_stack: The exception_stack of this InvalidInputExceptionInfo [Optional]. + validation_errors: The validation_errors of this InvalidInputExceptionInfo. + """ + + message: str + exception_class_name: Optional[str] = None + exception_stack: Optional[List[str]] = None + validation_errors: List[InvalidInputProperty] + +InvalidInputExceptionInfo.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_property.py b/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_property.py new file mode 100644 index 0000000000000..35e17c1120901 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/invalid_input_property.py @@ -0,0 +1,28 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 + + +class InvalidInputProperty(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + InvalidInputProperty - a model defined in OpenAPI + + property_path: The property_path of this InvalidInputProperty. + invalid_value: The invalid_value of this InvalidInputProperty [Optional]. + message: The message of this InvalidInputProperty [Optional]. + """ + + property_path: str + invalid_value: Optional[str] = None + message: Optional[str] = None + +InvalidInputProperty.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/known_exception_info.py b/airbyte-connector-builder-server/connector_builder/generated/models/known_exception_info.py new file mode 100644 index 0000000000000..d349fddc9cb57 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/known_exception_info.py @@ -0,0 +1,28 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 + + +class KnownExceptionInfo(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + KnownExceptionInfo - a model defined in OpenAPI + + message: The message of this KnownExceptionInfo. + exception_class_name: The exception_class_name of this KnownExceptionInfo [Optional]. + exception_stack: The exception_stack of this KnownExceptionInfo [Optional]. + """ + + message: str + exception_class_name: Optional[str] = None + exception_stack: Optional[List[str]] = None + +KnownExceptionInfo.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read.py new file mode 100644 index 0000000000000..8a3b8412d7e44 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read.py @@ -0,0 +1,27 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 +from connector_builder.generated.models.stream_read_slices import StreamReadSlices + + +class StreamRead(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + StreamRead - a model defined in OpenAPI + + logs: The logs of this StreamRead. + slices: The slices of this StreamRead. + """ + + logs: List[object] + slices: List[StreamReadSlices] + +StreamRead.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_pages.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_pages.py new file mode 100644 index 0000000000000..e79feb8fc3822 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_pages.py @@ -0,0 +1,30 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 +from connector_builder.generated.models.http_request import HttpRequest +from connector_builder.generated.models.http_response import HttpResponse + + +class StreamReadPages(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + StreamReadPages - a model defined in OpenAPI + + records: The records of this StreamReadPages. + request: The request of this StreamReadPages [Optional]. + response: The response of this StreamReadPages [Optional]. + """ + + records: List[object] + request: Optional[HttpRequest] = None + response: Optional[HttpResponse] = None + +StreamReadPages.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_request_body.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_request_body.py new file mode 100644 index 0000000000000..e57be491017d8 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_request_body.py @@ -0,0 +1,30 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 + + +class StreamReadRequestBody(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + StreamReadRequestBody - a model defined in OpenAPI + + manifest: The manifest of this StreamReadRequestBody. + stream: The stream of this StreamReadRequestBody. + config: The config of this StreamReadRequestBody. + state: The state of this StreamReadRequestBody [Optional]. + """ + + manifest: Dict[str, Any] + stream: str + config: Dict[str, Any] + state: Optional[Dict[str, Any]] = None + +StreamReadRequestBody.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slice_descriptor.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slice_descriptor.py new file mode 100644 index 0000000000000..d8cbe98f031a4 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slice_descriptor.py @@ -0,0 +1,26 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 + + +class StreamReadSliceDescriptor(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + StreamReadSliceDescriptor - a model defined in OpenAPI + + start_datetime: The start_datetime of this StreamReadSliceDescriptor [Optional]. + list_item: The list_item of this StreamReadSliceDescriptor [Optional]. + """ + + start_datetime: Optional[datetime] = None + list_item: Optional[str] = None + +StreamReadSliceDescriptor.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slices.py b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slices.py new file mode 100644 index 0000000000000..1cfddd2a9efbd --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/stream_read_slices.py @@ -0,0 +1,30 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 +from connector_builder.generated.models.stream_read_pages import StreamReadPages +from connector_builder.generated.models.stream_read_slice_descriptor import StreamReadSliceDescriptor + + +class StreamReadSlices(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + StreamReadSlices - a model defined in OpenAPI + + pages: The pages of this StreamReadSlices. + slice_descriptor: The slice_descriptor of this StreamReadSlices [Optional]. + state: The state of this StreamReadSlices [Optional]. + """ + + pages: List[StreamReadPages] + slice_descriptor: Optional[StreamReadSliceDescriptor] = None + state: Optional[Dict[str, Any]] = None + +StreamReadSlices.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read.py b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read.py new file mode 100644 index 0000000000000..746b0daccb44d --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read.py @@ -0,0 +1,25 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 +from connector_builder.generated.models.streams_list_read_streams import StreamsListReadStreams + + +class StreamsListRead(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + StreamsListRead - a model defined in OpenAPI + + streams: The streams of this StreamsListRead. + """ + + streams: List[StreamsListReadStreams] + +StreamsListRead.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read_streams.py b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read_streams.py new file mode 100644 index 0000000000000..643de2043e073 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_read_streams.py @@ -0,0 +1,26 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 + + +class StreamsListReadStreams(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + StreamsListReadStreams - a model defined in OpenAPI + + name: The name of this StreamsListReadStreams. + url: The url of this StreamsListReadStreams. + """ + + name: str + url: str + +StreamsListReadStreams.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_request_body.py b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_request_body.py new file mode 100644 index 0000000000000..1cee99805f05c --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/generated/models/streams_list_request_body.py @@ -0,0 +1,26 @@ +# coding: utf-8 + +from __future__ import annotations +from datetime import date, datetime # noqa: F401 + +import re # noqa: F401 +from typing import Any, Dict, List, Optional # noqa: F401 + +from pydantic import AnyUrl, BaseModel, EmailStr, validator # noqa: F401 + + +class StreamsListRequestBody(BaseModel): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + + StreamsListRequestBody - a model defined in OpenAPI + + manifest: The manifest of this StreamsListRequestBody. + config: The config of this StreamsListRequestBody. + """ + + manifest: Dict[str, Any] + config: Dict[str, Any] + +StreamsListRequestBody.update_forward_refs() diff --git a/airbyte-connector-builder-server/connector_builder/impl/default_api.py b/airbyte-connector-builder-server/connector_builder/impl/default_api.py new file mode 100644 index 0000000000000..fba0626ace936 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/impl/default_api.py @@ -0,0 +1,204 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json +import logging +from json import JSONDecodeError +from typing import Any, Dict, Iterable, Optional, Union +from urllib.parse import parse_qs, urljoin, urlparse + +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, Type +from fastapi import Body, HTTPException +from jsonschema import ValidationError + +from connector_builder.generated.apis.default_api_interface import DefaultApi +from connector_builder.generated.models.http_request import HttpRequest +from connector_builder.generated.models.http_response import HttpResponse +from connector_builder.generated.models.stream_read import StreamRead +from connector_builder.generated.models.stream_read_pages import StreamReadPages +from connector_builder.generated.models.stream_read_request_body import StreamReadRequestBody +from connector_builder.generated.models.stream_read_slices import StreamReadSlices +from connector_builder.generated.models.streams_list_read import StreamsListRead +from connector_builder.generated.models.streams_list_read_streams import StreamsListReadStreams +from connector_builder.generated.models.streams_list_request_body import StreamsListRequestBody +from connector_builder.impl.low_code_cdk_adapter import LowCodeSourceAdapter + + +class DefaultApiImpl(DefaultApi): + logger = logging.getLogger("airbyte.connector-builder") + + async def get_manifest_template(self) -> str: + return """version: "0.1.0" +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://example.com" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + customers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "customers" + primary_key: "id" + path: "/example" + +streams: + - "*ref(definitions.customers_stream)" + +check: + stream_names: + - "customers" + +spec: + documentation_url: https://docsurl.com + connection_specification: + title: Source Name Spec # 'TODO: Replace this with the name of your source.' + type: object + required: + - api_key + additionalProperties: true + properties: + # 'TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.': + api_key: + type: string + description: API Key +""" + + async def list_streams(self, streams_list_request_body: StreamsListRequestBody = Body(None, description="")) -> StreamsListRead: + """ + Takes in a low code manifest and a config to resolve the list of streams that are available for testing + :param streams_list_request_body: Input parameters to retrieve the list of available streams + :return: Stream objects made up of a stream name and the HTTP URL it will send requests to + """ + adapter = self._create_low_code_adapter(manifest=streams_list_request_body.manifest) + + stream_list_read = [] + try: + for http_stream in adapter.get_http_streams(streams_list_request_body.config): + stream_list_read.append( + StreamsListReadStreams( + name=http_stream.name, + url=urljoin(http_stream.url_base, http_stream.path()), + ) + ) + except Exception as error: + raise HTTPException(status_code=400, detail=f"Could not list streams with with error: {str(error)}") + return StreamsListRead(streams=stream_list_read) + + async def read_stream(self, stream_read_request_body: StreamReadRequestBody = Body(None, description="")) -> StreamRead: + """ + Using the provided manifest and config, invokes a sync for the specified stream and returns groups of Airbyte messages + that are produced during the read operation + :param stream_read_request_body: Input parameters to trigger the read operation for a stream + :return: Airbyte record messages produced by the sync grouped by slice and page + """ + adapter = self._create_low_code_adapter(manifest=stream_read_request_body.manifest) + + single_slice = StreamReadSlices(pages=[]) + log_messages = [] + try: + for message_group in self._get_message_groups( + adapter.read_stream(stream_read_request_body.stream, stream_read_request_body.config) + ): + if isinstance(message_group, AirbyteLogMessage): + log_messages.append({"message": message_group.message}) + else: + single_slice.pages.append(message_group) + except Exception as error: + # TODO: We're temporarily using FastAPI's default exception model. Ideally we should use exceptions defined in the OpenAPI spec + raise HTTPException(status_code=400, detail=f"Could not perform read with with error: {str(error)}") + + return StreamRead(logs=log_messages, slices=[single_slice]) + + def _get_message_groups(self, messages: Iterable[AirbyteMessage]) -> Iterable[Union[StreamReadPages, AirbyteLogMessage]]: + """ + Message groups are partitioned according to when request log messages are received. Subsequent response log messages + and record messages belong to the prior request log message and when we encounter another request, append the latest + message group. + + Messages received from the CDK read operation will always arrive in the following order: + {type: LOG, log: {message: "request: ..."}} + {type: LOG, log: {message: "response: ..."}} + ... 0 or more record messages + {type: RECORD, record: {data: ...}} + {type: RECORD, record: {data: ...}} + Repeats for each request/response made + + Note: The exception is that normal log messages can be received at any time which are not incorporated into grouping + """ + first_page = True + current_records = [] + current_page_request: Optional[HttpRequest] = None + current_page_response: Optional[HttpResponse] = None + for message in messages: + if first_page and message.type == Type.LOG and message.log.message.startswith("request:"): + first_page = False + request = self._create_request_from_log_message(message.log) + current_page_request = request + elif message.type == Type.LOG and message.log.message.startswith("request:"): + if not current_page_request or not current_page_response: + raise ValueError("Every message grouping should have at least one request and response") + yield StreamReadPages(request=current_page_request, response=current_page_response, records=current_records) + current_page_request = self._create_request_from_log_message(message.log) + current_records = [] + elif message.type == Type.LOG and message.log.message.startswith("response:"): + current_page_response = self._create_response_from_log_message(message.log) + elif message.type == Type.LOG: + yield message.log + elif message.type == Type.RECORD: + current_records.append(message.record.data) + else: + if not current_page_request or not current_page_response: + raise ValueError("Every message grouping should have at least one request and response") + yield StreamReadPages(request=current_page_request, response=current_page_response, records=current_records) + + def _create_request_from_log_message(self, log_message: AirbyteLogMessage) -> Optional[HttpRequest]: + # TODO: As a temporary stopgap, the CDK emits request data as a log message string. Ideally this should come in the + # form of a custom message object defined in the Airbyte protocol, but this unblocks us in the immediate while the + # protocol change is worked on. + raw_request = log_message.message.partition("request:")[2] + try: + request = json.loads(raw_request) + url = urlparse(request.get("url", "")) + full_path = f"{url.scheme}://{url.hostname}{url.path}" if url else "" + parameters = parse_qs(url.query) or None + return HttpRequest(url=full_path, headers=request.get("headers"), parameters=parameters, body=request.get("body")) + except JSONDecodeError as error: + self.logger.warning(f"Failed to parse log message into request object with error: {error}") + return None + + def _create_response_from_log_message(self, log_message: AirbyteLogMessage) -> Optional[HttpResponse]: + # TODO: As a temporary stopgap, the CDK emits response data as a log message string. Ideally this should come in the + # form of a custom message object defined in the Airbyte protocol, but this unblocks us in the immediate while the + # protocol change is worked on. + raw_response = log_message.message.partition("response:")[2] + try: + response = json.loads(raw_response) + body = json.loads(response.get("body", "{}")) + return HttpResponse(status=response.get("status_code"), body=body, headers=response.get("headers")) + except JSONDecodeError as error: + self.logger.warning(f"Failed to parse log message into response object with error: {error}") + return None + + @staticmethod + def _create_low_code_adapter(manifest: Dict[str, Any]) -> LowCodeSourceAdapter: + try: + return LowCodeSourceAdapter(manifest=manifest) + except Exception as error: + # TODO: We're temporarily using FastAPI's default exception model. Ideally we should use exceptions defined in the OpenAPI spec + raise HTTPException(status_code=400, detail=f"Invalid connector manifest with error: {str(error)}") diff --git a/airbyte-connector-builder-server/connector_builder/impl/low_code_cdk_adapter.py b/airbyte-connector-builder-server/connector_builder/impl/low_code_cdk_adapter.py new file mode 100644 index 0000000000000..cea1e99f27ba2 --- /dev/null +++ b/airbyte-connector-builder-server/connector_builder/impl/low_code_cdk_adapter.py @@ -0,0 +1,49 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Any, Dict, Iterable, List + +from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog +from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream +from airbyte_cdk.sources.declarative.yaml_declarative_source import ManifestDeclarativeSource +from airbyte_cdk.sources.streams.http import HttpStream + + +class LowCodeSourceAdapter: + def __init__(self, manifest: Dict[str, Any]): + # Request and response messages are only emitted for a sources that have debug turned on + self._source = ManifestDeclarativeSource(manifest, debug=True) + + def get_http_streams(self, config: Dict[str, Any]) -> List[HttpStream]: + http_streams = [] + for stream in self._source.streams(config=config): + if isinstance(stream, DeclarativeStream): + if isinstance(stream.retriever, HttpStream): + http_streams.append(stream.retriever) + else: + raise TypeError( + f"A declarative stream should only have a retriever of type HttpStream, but received: {stream.retriever.__class__}") + else: + raise TypeError(f"A declarative source should only contain streams of type DeclarativeStream, but received: {stream.__class__}") + return http_streams + + def read_stream(self, stream: str, config: Dict[str, Any]) -> Iterable[AirbyteMessage]: + configured_catalog = ConfiguredAirbyteCatalog.parse_obj( + { + "streams": [ + { + "stream": { + "name": stream, + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + } + ] + } + ) + generator = self._source.read(logger=self._source.logger, config=config, catalog=configured_catalog) + for message in generator: + yield message diff --git a/airbyte-connector-builder-server/gradle.properties b/airbyte-connector-builder-server/gradle.properties new file mode 100644 index 0000000000000..7c28b7f249211 --- /dev/null +++ b/airbyte-connector-builder-server/gradle.properties @@ -0,0 +1 @@ +dockerImageName=connector-builder-server \ No newline at end of file diff --git a/airbyte-connector-builder-server/integration_tests/test_integration_test.py b/airbyte-connector-builder-server/integration_tests/test_integration_test.py new file mode 100644 index 0000000000000..d770b3dd27eb7 --- /dev/null +++ b/airbyte-connector-builder-server/integration_tests/test_integration_test.py @@ -0,0 +1,7 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +def test_test(): + assert True diff --git a/airbyte-connector-builder-server/openapi/README.md b/airbyte-connector-builder-server/openapi/README.md new file mode 100644 index 0000000000000..e6ceb3c4e9aac --- /dev/null +++ b/airbyte-connector-builder-server/openapi/README.md @@ -0,0 +1,25 @@ +# OpenAPI templates +This directory contains custom OpenAPI templates used to generate Python code for the FastAPI. + +**But why?** + +At the time we made this service (Nov 2022), no OSS OpenAPI generators enabled spec-first development. So we made these custom templates. + +For the full context, see: https://github.com/airbytehq/airbyte/issues/17813 + +## How we're using templates +At a high level, the expected usage pattern for these templates is to generate code using the `python-fastapi` OpenAPI generator, then copy the `models` module and the `apis` into your project. This flow should work continuously i.e: as your spec evolves, it is safe to re-do this operation. + +The only change we're making to `python-fastapi` is to define an abstract class `AbstractApi` in which every method corresponds to an API endpoint. The developer is expected to extend the class and use that to instantiate the `APIRouter` provided to FastAPI. + +The existing `python-fastapi` OpenAPI generator does a pretty good job generating Pydantic models for entities declared in the OpenAPI spec, so we take those as-is. + +## Making changes to the templates +Please make sure you are at least familiar with the [User-defined Templates](https://openapi-generator.tech/docs/customization#user-defined-templates) section of the OpenAPI docs before you start iterating. + +Relevant OpenAPI docs: +- https://openapi-generator.tech/docs/customization +- https://openapi-generator.tech/docs/templating +- https://openapi-generator.tech/docs/debugging + +Happy templating! diff --git a/airbyte-connector-builder-server/openapi/generator_config.yaml b/airbyte-connector-builder-server/openapi/generator_config.yaml new file mode 100644 index 0000000000000..b36cfbe63ac38 --- /dev/null +++ b/airbyte-connector-builder-server/openapi/generator_config.yaml @@ -0,0 +1,4 @@ +files: + api_interfaces.mustache: + templateType: API + destinationFilename: _interface.py diff --git a/airbyte-connector-builder-server/openapi/templates/api_interfaces.mustache b/airbyte-connector-builder-server/openapi/templates/api_interfaces.mustache new file mode 100644 index 0000000000000..06a5f93ee3a8e --- /dev/null +++ b/airbyte-connector-builder-server/openapi/templates/api_interfaces.mustache @@ -0,0 +1,121 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# +# This file was auto-generated from Airbyte's custom OpenAPI templates. Do not edit it manually. +# coding: utf-8 + +import inspect +from abc import ABC, abstractmethod +from typing import Callable, Dict, List # noqa: F401 + +from fastapi import ( # noqa: F401 + APIRouter, + Body, + Cookie, + Depends, + Form, + Header, + Path, + Query, + Response, + Security, + status, +) + +from {{modelPackage}}.extra_models import TokenModel # noqa: F401 + + +{{#imports}} +{{import}} +{{/imports}} + + +{{#operations}} +class {{classname}}(ABC): + """ + NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + Do not edit the class manually. + """ + + {{#operation}} + @abstractmethod + async def {{operationId}}( + self, + {{#allParams}} + {{>endpoint_argument_definition}}, + {{/allParams}} + {{#hasAuthMethods}} + {{#authMethods}} + token_{{name}}: TokenModel = Security( + get_token_{{name}}{{#isOAuth}}, scopes=[{{#scopes}}"{{scope}}"{{^-last}}, {{/-last}}{{/scopes}}]{{/isOAuth}} + ), + {{/authMethods}} + {{/hasAuthMethods}} + ) -> {{returnType}}{{^returnType}}None{{/returnType}}: + """ + {{summary}} + """ + + {{/operation}} + {{/operations}} + +def _assert_signature_is_set(method: Callable) -> None: + """ + APIRouter().add_api_route expects the input method to have a signature. It gets signatures + by running inspect.signature(method) under the hood. + + In the case that an instance method does not declare "self" as an input parameter (due to developer error + for example), then the call to inspect.signature() raises a ValueError and fails. + + Ideally, we'd automatically detect & correct this problem. To do that, we'd need to do + setattr(method, "__signature__", ) but that's not possible because instance + methods (i.e the input to this function) are object subclasses, and you can't use setattr on objects + (https://stackoverflow.com/a/12839070/3237889) + + The workaround this method implements is to raise an exception at runtime if the input method fails + when inspect.signature() is called. This is good enough because the error will be detected + immediately when the developer tries to run the server, so builds should very quickly fail and this + will practically never make it to a production scenario. + """ + try: + inspect.signature(method) + except ValueError as e: + # Based on empirical observation, the call to inspect fails with a ValueError + # with exactly one argument: "invalid method signature" + if e.args and len(e.args) == 1 and e.args[0] == "invalid method signature": + # I couldn't figure out how to setattr on a "method" object to populate the signature. For now just kick + # it back to the developer and tell them to set the "self" variable + raise Exception(f"Method {method.__name__} in class {type(method.__self__).__name__} must declare the variable 'self'. ") + else: + raise + + +{{#operations}} +def initialize_router(api: {{classname}}) -> APIRouter: + router = APIRouter() + + {{#operation}} + _assert_signature_is_set(api.{{operationId}}) + router.add_api_route( + "{{path}}", + endpoint=api.{{operationId}}, + methods=["{{#lambda.uppercase}}{{httpMethod}}{{/lambda.uppercase}}"], + responses={ + {{#responses}} + {{code}}: {{=<% %>=}}{<%#dataType%>"model": <%dataType%>, "description": "<%message%>"<%/dataType%><%^dataType%>"description": "<%message%>"<%/dataType%>}<%={{ }}=%>, + {{/responses}} + }, + tags=[{{#tags}}"{{name}}"{{^-last}},{{/-last}}{{/tags}}], + {{#summary}} + summary="{{.}}", + {{/summary}} + {{#description}} + description = "{{.}}", + {{/description}} + response_model_by_alias=True, + ) + + {{/operation}} + {{/operations}} + + return router diff --git a/airbyte-connector-builder-server/pytest.ini b/airbyte-connector-builder-server/pytest.ini new file mode 100644 index 0000000000000..58f2d9ae315e8 --- /dev/null +++ b/airbyte-connector-builder-server/pytest.ini @@ -0,0 +1,7 @@ +[pytest] +log_cli = 1 +log_cli_level = INFO +log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) +log_cli_date_format=%Y-%m-%d %H:%M:%S +markers = + integration: marks tests as integration test (deselect with '-m "not integration"') \ No newline at end of file diff --git a/airbyte-connector-builder-server/run_format.sh b/airbyte-connector-builder-server/run_format.sh new file mode 100755 index 0000000000000..75f996ad67f06 --- /dev/null +++ b/airbyte-connector-builder-server/run_format.sh @@ -0,0 +1,21 @@ +cd $1 + +# Pasted from https://github.com/airbytehq/airbyte/blob/master/buildSrc/src/main/groovy/airbyte-python.gradle#L85-L96 +pip install 'mccabe==0.6.1' +pip install 'flake8==4.0.1' +pip install 'pyproject-flake8==0.0.1a2' +pip install 'black==22.3.0' +pip install 'mypy==0.930' +pip install 'isort==5.6.4' +pip install 'pytest==6.1.2' +pip install 'coverage[toml]==6.3.1' + +# Format and static analysis +# FIXME: isort formats python files differently from gradlew format +python -m isort --settings-file=pyproject.toml ./ +python -m isort --settings-file=pyproject.toml --diff --quiet ./ +python -m black --config pyproject.toml ./ +python -m black --config pyproject.toml ./ --diff --quiet +python -m pflake8 --config pyproject.toml ./ +python -m pflake8 --config pyproject.toml ./ --diff --quiet +python -m mypy --config pyproject.toml ./ diff --git a/airbyte-connector-builder-server/run_tests.sh b/airbyte-connector-builder-server/run_tests.sh new file mode 100755 index 0000000000000..44143fb895c87 --- /dev/null +++ b/airbyte-connector-builder-server/run_tests.sh @@ -0,0 +1,10 @@ +cd $1 + +# Install dependencies +pip install -e . +pip install -e '.[main]' +pip install -e '.[tests]' + +# Run the tests +python -m coverage run -m pytest unit_tests -c pytest.ini +python -m coverage run -m pytest integration_tests -c pytest.ini diff --git a/airbyte-connector-builder-server/setup.py b/airbyte-connector-builder-server/setup.py new file mode 100644 index 0000000000000..0275f75e20380 --- /dev/null +++ b/airbyte-connector-builder-server/setup.py @@ -0,0 +1,57 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pathlib + +from setuptools import find_packages, setup + +# The directory containing this file +HERE = pathlib.Path(__file__).parent + +# The text of the README file +README = (HERE / "README.md").read_text() + +setup( + name="connector-builder-server", + version="0.40.19", + description="", + long_description=README, + author="Airbyte", + author_email="contact@airbyte.io", + license="MIT", + url="https://github.com/airbytehq/airbyte", + classifiers=[ + # This information is used when browsing on PyPi. + # Dev Status + "Development Status :: 3 - Alpha", + # Project Audience + "Intended Audience :: Developers", + "Topic :: Scientific/Engineering", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: MIT License", + # Python Version Support + "Programming Language :: Python :: 3.8", + ], + keywords="connect-builder", + project_urls={ + "Documentation": "https://docs.airbyte.io/", + "Source": "https://github.com/airbytehq/airbyte", + "Tracker": "https://github.com/airbytehq/airbyte/issues", + }, + packages=find_packages(exclude=("unit_tests", "integration_tests", "docs")), + package_data={}, + install_requires=["airbyte-cdk~=0.8", "fastapi", "uvicorn"], + python_requires=">=3.9.11", + extras_require={ + "tests": [ + "MyPy~=0.812", + "pytest~=6.2.5", + "pytest-cov", + "pytest-mock", + "pytest-recording", + "requests-mock", + "pre-commit", + ], + }, +) diff --git a/connector-builder-server/src/main/openapi/openapi.yaml b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml similarity index 98% rename from connector-builder-server/src/main/openapi/openapi.yaml rename to airbyte-connector-builder-server/src/main/openapi/openapi.yaml index d396c7a6a756b..97ae958a7ba3c 100644 --- a/connector-builder-server/src/main/openapi/openapi.yaml +++ b/airbyte-connector-builder-server/src/main/openapi/openapi.yaml @@ -60,7 +60,7 @@ paths: /v1/manifest_template: get: summary: Return a connector manifest template to use as the default value for the yaml editor - operationId: template + operationId: getManifestTemplate responses: "200": description: Successful operation @@ -189,11 +189,15 @@ components: type: object required: - manifest + - config properties: manifest: type: object description: The config-based connector manifest contents # $ref: "#/components/schemas/ConnectorManifest" + config: + type: object + description: The config blob containing the user inputs for testing StreamsListRead: type: object required: @@ -213,7 +217,6 @@ components: description: The name of the stream url: type: string - format: uri description: The URL to which read requests will be made for this stream # --- Potential addition for a later phase --- # slices: diff --git a/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_default_api.py b/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_default_api.py new file mode 100644 index 0000000000000..a729c202aed76 --- /dev/null +++ b/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_default_api.py @@ -0,0 +1,466 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import asyncio +import json +from unittest.mock import MagicMock, patch + +import pytest +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, Level, Type +from fastapi import HTTPException + +from connector_builder.generated.models.http_request import HttpRequest +from connector_builder.generated.models.http_response import HttpResponse +from connector_builder.generated.models.stream_read import StreamRead +from connector_builder.generated.models.stream_read_pages import StreamReadPages +from connector_builder.generated.models.stream_read_request_body import StreamReadRequestBody +from connector_builder.generated.models.streams_list_read import StreamsListRead +from connector_builder.generated.models.streams_list_read_streams import StreamsListReadStreams +from connector_builder.generated.models.streams_list_request_body import StreamsListRequestBody +from connector_builder.impl.default_api import DefaultApiImpl + +MANIFEST = { + "version": "0.1.0", + "definitions": { + "selector": {"extractor": {"field_pointer": ["items"]}}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "hashiras_stream": { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "hashiras", "path": "/hashiras"}, + }, + "breathing_techniques_stream": { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "breathing-techniques", "path": "/breathing_techniques"}, + }, + }, + "streams": [ + { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "hashiras", "path": "/hashiras"}, + }, + { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "breathing-techniques", "path": "/breathing_techniques"}, + }, + ], + "check": {"stream_names": ["hashiras"], "class_name": "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream"}, +} + +CONFIG = {"rank": "upper-six"} + + +def request_log_message(request: dict) -> AirbyteMessage: + return AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message=f"request:{json.dumps(request)}")) + + +def response_log_message(response: dict) -> AirbyteMessage: + return AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message=f"response:{json.dumps(response)}")) + + +def record_message(stream: str, data: dict) -> AirbyteMessage: + return AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream=stream, data=data, emitted_at=1234)) + + +def test_list_streams(): + expected_streams = [ + StreamsListReadStreams(name="hashiras", url="https://demonslayers.com/api/v1/hashiras"), + StreamsListReadStreams(name="breathing-techniques", url="https://demonslayers.com/api/v1/breathing_techniques"), + ] + + api = DefaultApiImpl() + streams_list_request_body = StreamsListRequestBody(manifest=MANIFEST, config=CONFIG) + loop = asyncio.get_event_loop() + actual_streams = loop.run_until_complete(api.list_streams(streams_list_request_body)) + + for i, expected_stream in enumerate(expected_streams): + assert actual_streams.streams[i] == expected_stream + + +def test_list_streams_with_interpolated_urls(): + manifest = { + "version": "0.1.0", + "streams": [ + { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://{{ config['rank'] }}.muzan.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "demons", "path": "/demons"}, + } + ], + "check": {"stream_names": ["demons"], "class_name": "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream"}, + } + + expected_streams = StreamsListRead(streams=[StreamsListReadStreams(name="demons", url="https://upper-six.muzan.com/api/v1/demons")]) + + api = DefaultApiImpl() + streams_list_request_body = StreamsListRequestBody(manifest=manifest, config=CONFIG) + loop = asyncio.get_event_loop() + actual_streams = loop.run_until_complete(api.list_streams(streams_list_request_body)) + + assert actual_streams == expected_streams + + +def test_list_streams_with_unresolved_interpolation(): + manifest = { + "version": "0.1.0", + "streams": [ + { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://{{ config['not_in_config'] }}.muzan.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "demons", "path": "/demons"}, + } + ], + "check": {"stream_names": ["demons"], "class_name": "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream"}, + } + + # The interpolated string {{ config['not_in_config'] }} doesn't resolve to anything so it ends up blank during interpolation + expected_streams = StreamsListRead(streams=[StreamsListReadStreams(name="demons", url="https://.muzan.com/api/v1/demons")]) + + api = DefaultApiImpl() + + streams_list_request_body = StreamsListRequestBody(manifest=manifest, config=CONFIG) + loop = asyncio.get_event_loop() + actual_streams = loop.run_until_complete(api.list_streams(streams_list_request_body)) + + assert actual_streams == expected_streams + + +def test_read_stream(): + request = { + "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", + "headers": {"Content-Type": "application/json"}, + "body": {"custom": "field"}, + } + response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} + expected_pages = [ + StreamReadPages( + request=HttpRequest( + url="https://demonslayers.com/api/v1/hashiras", + parameters={"era": ["taisho"]}, + headers={"Content-Type": "application/json"}, + body={"custom": "field"}, + ), + response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), + records=[{"name": "Shinobu Kocho"}, {"name": "Muichiro Tokito"}], + ), + StreamReadPages( + request=HttpRequest( + url="https://demonslayers.com/api/v1/hashiras", + parameters={"era": ["taisho"]}, + headers={"Content-Type": "application/json"}, + body={"custom": "field"}, + ), + response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), + records=[{"name": "Mitsuri Kanroji"}], + ), + ] + + mock_source_adapter = MagicMock() + mock_source_adapter.read_stream.return_value = [ + request_log_message(request), + response_log_message(response), + record_message("hashiras", {"name": "Shinobu Kocho"}), + record_message("hashiras", {"name": "Muichiro Tokito"}), + request_log_message(request), + response_log_message(response), + record_message("hashiras", {"name": "Mitsuri Kanroji"}), + ] + + with patch.object(DefaultApiImpl, "_create_low_code_adapter", return_value=mock_source_adapter): + api = DefaultApiImpl() + + loop = asyncio.get_event_loop() + actual_response: StreamRead = loop.run_until_complete( + api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) + ) + + single_slice = actual_response.slices[0] + for i, actual_page in enumerate(single_slice.pages): + assert actual_page == expected_pages[i] + + +def test_read_stream_with_logs(): + request = { + "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", + "headers": {"Content-Type": "application/json"}, + "body": {"custom": "field"}, + } + response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} + expected_pages = [ + StreamReadPages( + request=HttpRequest( + url="https://demonslayers.com/api/v1/hashiras", + parameters={"era": ["taisho"]}, + headers={"Content-Type": "application/json"}, + body={"custom": "field"}, + ), + response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), + records=[{"name": "Shinobu Kocho"}, {"name": "Muichiro Tokito"}], + ), + StreamReadPages( + request=HttpRequest( + url="https://demonslayers.com/api/v1/hashiras", + parameters={"era": ["taisho"]}, + headers={"Content-Type": "application/json"}, + body={"custom": "field"}, + ), + response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), + records=[{"name": "Mitsuri Kanroji"}], + ), + ] + expected_logs = [ + {"message": "log message before the request"}, + {"message": "log message during the page"}, + {"message": "log message after the response"}, + ] + + mock_source_adapter = MagicMock() + mock_source_adapter.read_stream.return_value = [ + AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="log message before the request")), + request_log_message(request), + response_log_message(response), + record_message("hashiras", {"name": "Shinobu Kocho"}), + AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="log message during the page")), + record_message("hashiras", {"name": "Muichiro Tokito"}), + AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="log message after the response")), + ] + + with patch.object(DefaultApiImpl, "_create_low_code_adapter", return_value=mock_source_adapter): + api = DefaultApiImpl() + + loop = asyncio.get_event_loop() + actual_response: StreamRead = loop.run_until_complete( + api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) + ) + + single_slice = actual_response.slices[0] + for i, actual_page in enumerate(single_slice.pages): + assert actual_page == expected_pages[i] + + for i, actual_log in enumerate(actual_response.logs): + assert actual_log == expected_logs[i] + + +def test_read_stream_no_records(): + request = { + "url": "https://demonslayers.com/api/v1/hashiras?era=taisho", + "headers": {"Content-Type": "application/json"}, + "body": {"custom": "field"}, + } + response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} + expected_pages = [ + StreamReadPages( + request=HttpRequest( + url="https://demonslayers.com/api/v1/hashiras", + parameters={"era": ["taisho"]}, + headers={"Content-Type": "application/json"}, + body={"custom": "field"}, + ), + response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), + records=[], + ), + StreamReadPages( + request=HttpRequest( + url="https://demonslayers.com/api/v1/hashiras", + parameters={"era": ["taisho"]}, + headers={"Content-Type": "application/json"}, + body={"custom": "field"}, + ), + response=HttpResponse(status=200, headers={"field": "value"}, body={"name": "field"}), + records=[], + ), + ] + + mock_source_adapter = MagicMock() + mock_source_adapter.read_stream.return_value = [ + request_log_message(request), + response_log_message(response), + request_log_message(request), + response_log_message(response), + ] + + with patch.object(DefaultApiImpl, "_create_low_code_adapter", return_value=mock_source_adapter): + api = DefaultApiImpl() + + loop = asyncio.get_event_loop() + actual_response: StreamRead = loop.run_until_complete( + api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) + ) + + single_slice = actual_response.slices[0] + for i, actual_page in enumerate(single_slice.pages): + assert actual_page == expected_pages[i] + + +def test_invalid_manifest(): + invalid_manifest = { + "version": "0.1.0", + "definitions": { + "selector": {"extractor": {"field_pointer": ["items"]}}, + "requester": {"http_method": "GET"}, + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"http_method": "GET"}, + }, + "hashiras_stream": { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"http_method": "GET"}, + }, + "$options": {"name": "hashiras", "path": "/hashiras"}, + }, + }, + "check": {"stream_names": ["hashiras"], "class_name": "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream"}, + } + + expected_status_code = 400 + expected_detail = "Invalid connector manifest with error: 'streams' is a required property" + + api = DefaultApiImpl() + loop = asyncio.get_event_loop() + with pytest.raises(HTTPException) as actual_exception: + loop.run_until_complete( + api.read_stream(StreamReadRequestBody(manifest=invalid_manifest, config={}, stream="hashiras")) + ) + + assert actual_exception.value.status_code == expected_status_code + assert expected_detail in actual_exception.value.detail + + +def test_read_stream_invalid_group_format(): + response = {"status_code": 200, "headers": {"field": "value"}, "body": '{"name": "field"}'} + + mock_source_adapter = MagicMock() + mock_source_adapter.read_stream.return_value = [ + response_log_message(response), + record_message("hashiras", {"name": "Shinobu Kocho"}), + record_message("hashiras", {"name": "Muichiro Tokito"}), + ] + + with patch.object(DefaultApiImpl, "_create_low_code_adapter", return_value=mock_source_adapter): + api = DefaultApiImpl() + + loop = asyncio.get_event_loop() + with pytest.raises(HTTPException) as actual_exception: + loop.run_until_complete( + api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config=CONFIG, stream="hashiras")) + ) + + assert actual_exception.value.status_code == 400 + assert actual_exception.value.detail == "Could not perform read with with error: Every message grouping should have at least one request and response" + + +def test_read_stream_returns_error_if_stream_does_not_exist(): + expected_status_code = 400 + expected_detail = "Could not perform read with with error: \"The requested stream not_in_manifest was not found in the source. Available streams: dict_keys(['hashiras', 'breathing-techniques'])\"" + + api = DefaultApiImpl() + loop = asyncio.get_event_loop() + with pytest.raises(HTTPException) as actual_exception: + loop.run_until_complete( + api.read_stream(StreamReadRequestBody(manifest=MANIFEST, config={}, stream="not_in_manifest")) + ) + + assert actual_exception.value.status_code == expected_status_code + assert expected_detail in actual_exception.value.detail + + +@pytest.mark.parametrize( + "log_message, expected_request", + [ + pytest.param( + 'request:{"url": "https://nichirin.com/v1/swords?color=orange", "headers": {"field": "name"}, "body":{"key": "value"}}', + HttpRequest( + url="https://nichirin.com/v1/swords", parameters={"color": ["orange"]}, headers={"field": "name"}, body={"key": "value"} + ), + id="test_create_request_with_all_fields", + ), + pytest.param( + 'request:{"url": "https://nichirin.com/v1/swords?color=orange", "headers": {"field": "name"}}', + HttpRequest(url="https://nichirin.com/v1/swords", parameters={"color": ["orange"]}, headers={"field": "name"}), + id="test_create_request_with_no_body", + ), + pytest.param( + 'request:{"url": "https://nichirin.com/v1/swords?color=orange", "body":{"key": "value"}}', + HttpRequest(url="https://nichirin.com/v1/swords", parameters={"color": ["orange"]}, body={"key": "value"}), + id="test_create_request_with_no_headers", + ), + pytest.param( + 'request:{"url": "https://nichirin.com/v1/swords", "headers": {"field": "name"}, "body":{"key": "value"}}', + HttpRequest(url="https://nichirin.com/v1/swords", headers={"field": "name"}, body={"key": "value"}), + id="test_create_request_with_no_parameters", + ), + pytest.param("request:{invalid_json: }", None, id="test_invalid_json_still_does_not_crash"), + pytest.param("just a regular log message", None, id="test_no_request:_prefix_does_not_crash"), + ], +) +def test_create_request_from_log_message(log_message, expected_request): + airbyte_log_message = AirbyteLogMessage(level=Level.INFO, message=log_message) + api = DefaultApiImpl() + actual_request = api._create_request_from_log_message(airbyte_log_message) + + assert actual_request == expected_request + + +@pytest.mark.parametrize( + "log_message, expected_response", + [ + pytest.param( + {"status_code": 200, "headers": {"field": "name"}, "body": '{"id":"fire", "owner": "kyojuro_rengoku"}'}, + HttpResponse(status=200, headers={"field": "name"}, body={"id": "fire", "owner": "kyojuro_rengoku"}), + id="test_create_response_with_all_fields", + ), + pytest.param( + {"status_code": 200, "headers": {"field": "name"}}, + HttpResponse(status=200, body={}, headers={"field": "name"}), + id="test_create_response_with_no_body", + ), + pytest.param( + {"status_code": 200, "body": '{"id":"fire", "owner": "kyojuro_rengoku"}'}, + HttpResponse(status=200, body={"id": "fire", "owner": "kyojuro_rengoku"}), + id="test_create_response_with_no_headers", + ), + pytest.param("request:{invalid_json: }", None, id="test_invalid_json_still_does_not_crash"), + pytest.param("just a regular log message", None, id="test_no_response:_prefix_does_not_crash"), + ], +) +def test_create_response_from_log_message(log_message, expected_response): + if isinstance(log_message, str): + response_message = log_message + else: + response_message = f"response:{json.dumps(log_message)}" + + airbyte_log_message = AirbyteLogMessage(level=Level.INFO, message=response_message) + api = DefaultApiImpl() + actual_response = api._create_response_from_log_message(airbyte_log_message) + + assert actual_response == expected_response diff --git a/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_low_code_cdk_adapter.py b/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_low_code_cdk_adapter.py new file mode 100644 index 0000000000000..c4803e49111e0 --- /dev/null +++ b/airbyte-connector-builder-server/unit_tests/connector_builder/impl/test_low_code_cdk_adapter.py @@ -0,0 +1,290 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from abc import ABC +from typing import Any, List, Mapping, Optional, Union +from unittest.mock import MagicMock + +import pytest +import requests +from airbyte_cdk.models import AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, Level, Type +from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream +from airbyte_cdk.sources.declarative.parsers.undefined_reference_exception import UndefinedReferenceException +from airbyte_cdk.sources.streams.http import HttpStream + +from connector_builder.impl.low_code_cdk_adapter import LowCodeSourceAdapter + + +class MockConcreteStream(HttpStream, ABC): + """ + Test class used to verify errors are correctly thrown when the adapter receives unexpected outputs + """ + + def primary_key(self) -> Optional[Union[str, List[str], List[List[str]]]]: + return None + + def url_base(self) -> str: + return "" + + def path( + self, + *, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> str: + return "" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def parse_response( + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Optional[str]: + return None + + +MANIFEST = { + "version": "0.1.0", + "definitions": { + "selector": {"extractor": {"field_pointer": ["items"]}}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "hashiras_stream": { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "hashiras", "path": "/hashiras"}, + }, + "breathing_techniques_stream": { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "breathing-techniques", "path": "/breathing_techniques"}, + }, + }, + "streams": [ + { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "hashiras", "path": "/hashiras"}, + }, + { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"url_base": "https://demonslayers.com/api/v1/", "http_method": "GET"}, + }, + "$options": {"name": "breathing-techniques", "path": "/breathing_techniques"}, + }, + ], + "check": {"stream_names": ["hashiras"], "class_name": "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream"}, +} + +MANIFEST_WITH_REFERENCES = { + "version": "0.1.0", + "definitions": { + "selector": { + "extractor": { + "field_pointer": [] + } + }, + "requester": { + "url_base": "https://demonslayers.com/api/v1/", + "http_method": "GET", + "authenticator": { + "type": "BearerAuthenticator", + "api_token": "{{ config['api_key'] }}" + } + }, + "retriever": { + "record_selector": { + "$ref": "*ref(definitions.selector)" + }, + "paginator": { + "type": "NoPagination" + }, + "requester": { + "$ref": "*ref(definitions.requester)" + } + }, + "base_stream": { + "retriever": { + "$ref": "*ref(definitions.retriever)" + } + }, + "ranks_stream": { + "$ref": "*ref(definitions.base_stream)", + "$options": { + "name": "ranks", + "primary_key": "id", + "path": "/ranks" + } + } + }, + "streams": ["*ref(definitions.ranks_stream)"], + "check": { + "stream_names": ["ranks"] + }, + "spec": { + "documentation_url": "https://docsurl.com", + "connection_specification": { + "title": "Source Name Spec", + "type": "object", + "required": ["api_key"], + "additionalProperties": True, + "properties": { + "api_key": { + "type": "string", + "description": "API Key" + } + } + } + } +} + +INVALID_MANIFEST = { + "version": "0.1.0", + "definitions": { + "selector": {"extractor": {"field_pointer": ["items"]}}, + "requester": {"http_method": "GET"}, + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"http_method": "GET"}, + }, + "hashiras_stream": { + "retriever": { + "record_selector": {"extractor": {"field_pointer": ["items"]}}, + "paginator": {"type": "NoPagination"}, + "requester": {"http_method": "GET"}, + }, + "$options": {"name": "hashiras", "path": "/hashiras"}, + }, + }, + "check": {"stream_names": ["hashiras"], "class_name": "airbyte_cdk.sources.declarative.checks.check_stream.CheckStream"}, +} + + +def test_get_http_streams(): + expected_urls = {"https://demonslayers.com/api/v1/breathing_techniques", "https://demonslayers.com/api/v1/hashiras"} + + adapter = LowCodeSourceAdapter(MANIFEST) + actual_streams = adapter.get_http_streams(config={}) + actual_urls = {http_stream.url_base + http_stream.path() for http_stream in actual_streams} + + assert len(actual_streams) == len(expected_urls) + assert actual_urls == expected_urls + + +def test_get_http_manifest_with_references(): + expected_urls = {"https://demonslayers.com/api/v1/ranks"} + + adapter = LowCodeSourceAdapter(MANIFEST_WITH_REFERENCES) + actual_streams = adapter.get_http_streams(config={}) + actual_urls = {http_stream.url_base + http_stream.path() for http_stream in actual_streams} + + assert len(actual_streams) == len(expected_urls) + assert actual_urls == expected_urls + + +def test_get_http_streams_non_declarative_streams(): + non_declarative_stream = MockConcreteStream() + + mock_source = MagicMock() + mock_source.streams.return_value = [non_declarative_stream] + + adapter = LowCodeSourceAdapter(MANIFEST) + adapter._source = mock_source + with pytest.raises(TypeError): + adapter.get_http_streams(config={}) + + +def test_get_http_streams_non_http_stream(): + declarative_stream_non_http_retriever = DeclarativeStream(name="hashiras", primary_key="id", retriever=MagicMock(), config={}, + options={}) + + mock_source = MagicMock() + mock_source.streams.return_value = [declarative_stream_non_http_retriever] + + adapter = LowCodeSourceAdapter(MANIFEST) + adapter._source = mock_source + with pytest.raises(TypeError): + adapter.get_http_streams(config={}) + + +def test_read_streams(): + expected_messages = [ + AirbyteMessage( + type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="request:{'url': 'https://demonslayers.com/v1/hashiras'}") + ), + AirbyteMessage(type=Type.LOG, log=AirbyteLogMessage(level=Level.INFO, message="response:{'status': 200}")), + AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage(data={"name": "Tengen Uzui", "breathing_technique": "sound"}, emitted_at=1234, stream="hashiras"), + ), + AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage( + data={"name": "Kyojuro Rengoku", "breathing_technique": "fire"}, emitted_at=1234, stream="hashiras" + ), + ), + AirbyteMessage( + type=Type.RECORD, + record=AirbyteRecordMessage(data={"name": "Giyu Tomioka", "breathing_technique": "water"}, emitted_at=1234, stream="hashiras"), + ), + ] + mock_source = MagicMock() + mock_source.read.return_value = expected_messages + + adapter = LowCodeSourceAdapter(MANIFEST) + adapter._source = mock_source + actual_messages = list(adapter.read_stream("hashiras", {})) + + for i, expected_message in enumerate(expected_messages): + assert actual_messages[i] == expected_message + + +def test_read_streams_invalid_reference(): + invalid_reference_manifest = { + "version": "0.1.0", + "definitions": { + "selector": { + "extractor": { + "field_pointer": [] + } + }, + "ranks_stream": { + "$ref": "*ref(definitions.base_stream)", + "$options": { + "name": "ranks", + "primary_key": "id", + "path": "/ranks" + } + } + }, + "streams": ["*ref(definitions.ranks_stream)"], + "check": { + "stream_names": ["ranks"] + } + } + + with pytest.raises(UndefinedReferenceException): + LowCodeSourceAdapter(invalid_reference_manifest) diff --git a/airbyte-connector-builder-server/unit_tests/test_unit_test.py b/airbyte-connector-builder-server/unit_tests/test_unit_test.py new file mode 100644 index 0000000000000..d770b3dd27eb7 --- /dev/null +++ b/airbyte-connector-builder-server/unit_tests/test_unit_test.py @@ -0,0 +1,7 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +def test_test(): + assert True diff --git a/airbyte-container-orchestrator/Dockerfile b/airbyte-container-orchestrator/Dockerfile index 8aed70dfb3fcf..6c2fbb889626b 100644 --- a/airbyte-container-orchestrator/Dockerfile +++ b/airbyte-container-orchestrator/Dockerfile @@ -10,7 +10,7 @@ RUN curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/s && chmod +x kubectl && mv kubectl /usr/local/bin/ # Don't change this manually. Bump version expects to make moves based on this string -ARG VERSION=0.40.18 +ARG VERSION=0.40.19 ENV APPLICATION airbyte-container-orchestrator ENV VERSION=${VERSION} diff --git a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java index a695ae7052305..11580c575faaa 100644 --- a/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java +++ b/airbyte-container-orchestrator/src/main/java/io/airbyte/container_orchestrator/ReplicationJobOrchestrator.java @@ -12,6 +12,7 @@ import datadog.trace.api.Trace; import io.airbyte.commons.features.FeatureFlags; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.logging.MdcScope; import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider; import io.airbyte.commons.protocol.AirbyteMessageVersionedMigratorFactory; import io.airbyte.commons.temporal.TemporalUtils; @@ -120,7 +121,8 @@ public Optional runJob() throws Exception { final AirbyteSource airbyteSource = WorkerConstants.RESET_JOB_SOURCE_DOCKER_IMAGE_STUB.equals(sourceLauncherConfig.getDockerImage()) ? new EmptyAirbyteSource( featureFlags.useStreamCapableState()) - : new DefaultAirbyteSource(sourceLauncher, getStreamFactory(sourceLauncherConfig.getProtocolVersion())); + : new DefaultAirbyteSource(sourceLauncher, + getStreamFactory(sourceLauncherConfig.getProtocolVersion(), DefaultAirbyteSource.CONTAINER_LOG_MDC_BUILDER)); MetricClientFactory.initialize(MetricEmittingApps.WORKER); final MetricClient metricClient = MetricClientFactory.getMetricClient(); @@ -132,7 +134,8 @@ public Optional runJob() throws Exception { Math.toIntExact(jobRunConfig.getAttemptId()), airbyteSource, new NamespacingMapper(syncInput.getNamespaceDefinition(), syncInput.getNamespaceFormat(), syncInput.getPrefix()), - new DefaultAirbyteDestination(destinationLauncher, getStreamFactory(destinationLauncherConfig.getProtocolVersion()), + new DefaultAirbyteDestination(destinationLauncher, getStreamFactory(destinationLauncherConfig.getProtocolVersion(), + DefaultAirbyteDestination.CONTAINER_LOG_MDC_BUILDER), new VersionedAirbyteMessageBufferedWriterFactory(serDeProvider, migratorFactory, destinationLauncherConfig.getProtocolVersion())), new AirbyteMessageTracker(), new RecordSchemaValidator(WorkerUtils.mapStreamNamesToSchemas(syncInput)), @@ -146,10 +149,10 @@ public Optional runJob() throws Exception { return Optional.of(Jsons.serialize(replicationOutput)); } - private AirbyteStreamFactory getStreamFactory(final Version protocolVersion) { + private AirbyteStreamFactory getStreamFactory(final Version protocolVersion, final MdcScope.Builder mdcScope) { return protocolVersion != null - ? new VersionedAirbyteStreamFactory<>(serDeProvider, migratorFactory, protocolVersion) - : new DefaultAirbyteStreamFactory(); + ? new VersionedAirbyteStreamFactory(serDeProvider, migratorFactory, protocolVersion, mdcScope) + : new DefaultAirbyteStreamFactory(mdcScope); } } diff --git a/airbyte-cron/Dockerfile b/airbyte-cron/Dockerfile index edfd9e552682e..546b3f04b67e8 100644 --- a/airbyte-cron/Dockerfile +++ b/airbyte-cron/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_IMAGE=airbyte/airbyte-base-java-image:1.0 FROM ${JDK_IMAGE} AS cron -ARG VERSION=0.40.18 +ARG VERSION=0.40.19 ENV APPLICATION airbyte-cron ENV VERSION ${VERSION} diff --git a/airbyte-db/db-lib/build.gradle b/airbyte-db/db-lib/build.gradle index 89e2bbd6d4ea1..331b6ac436308 100644 --- a/airbyte-db/db-lib/build.gradle +++ b/airbyte-db/db-lib/build.gradle @@ -51,6 +51,10 @@ dependencies { // MongoDB implementation 'org.mongodb:mongodb-driver-sync:4.3.0' + + // MySQL + implementation 'mysql:mysql-connector-java:8.0.30' + } task(newConfigsMigration, dependsOn: 'classes', type: JavaExec) { diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java index f4f33dc455f8c..f6a0280502e66 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/factory/DataSourceFactory.java @@ -4,12 +4,15 @@ package io.airbyte.db.factory; +import static org.postgresql.PGProperty.CONNECT_TIMEOUT; + import com.google.common.base.Preconditions; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import java.io.Closeable; import java.time.Duration; import java.util.Map; +import java.util.Objects; import javax.sql.DataSource; /** @@ -61,7 +64,7 @@ public static DataSource create(final String username, .withJdbcUrl(jdbcConnectionString) .withPassword(password) .withUsername(username) - .withConnectionTimeoutMs(DataSourceBuilder.getConnectionTimeoutMs(connectionProperties)) + .withConnectionTimeoutMs(DataSourceBuilder.getConnectionTimeoutMs(connectionProperties, driverClassName)) .build(); } @@ -196,12 +199,23 @@ private DataSourceBuilder() {} * * @param connectionProperties custom jdbc_url_parameters containing information on connection * properties + * @param driverClassName name of the JDBC driver * @return DataSourceBuilder class used to create dynamic fields for DataSource */ - private static long getConnectionTimeoutMs(final Map connectionProperties) { + private static long getConnectionTimeoutMs(final Map connectionProperties, String driverClassName) { + // TODO: the usage of CONNECT_TIMEOUT is Postgres specific, may need to extend for other databases + if (driverClassName.equals(DatabaseDriver.POSTGRESQL.getDriverClassName())) { + final String pgPropertyConnectTimeout = CONNECT_TIMEOUT.getName(); + // If the PGProperty.CONNECT_TIMEOUT was set by the user, then take its value, if not take the + // default + if (connectionProperties.containsKey(pgPropertyConnectTimeout) + && (Long.parseLong(connectionProperties.get(pgPropertyConnectTimeout)) >= 0)) { + return Duration.ofSeconds(Long.parseLong(connectionProperties.get(pgPropertyConnectTimeout))).toMillis(); + } else { + return Duration.ofSeconds(Long.parseLong(Objects.requireNonNull(CONNECT_TIMEOUT.getDefaultValue()))).toMillis(); + } + } final Duration connectionTimeout; - // TODO: the usage of CONNECT_TIMEOUT_KEY is Postgres specific, may need to extend for other - // databases connectionTimeout = connectionProperties.containsKey(CONNECT_TIMEOUT_KEY) ? Duration.ofSeconds(Long.parseLong(connectionProperties.get(CONNECT_TIMEOUT_KEY))) : CONNECT_TIMEOUT_DEFAULT; diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_003__AddIndexToConnectionStatus.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_003__AddIndexToConnectionStatus.java new file mode 100644 index 0000000000000..cb5d248c9f670 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_003__AddIndexToConnectionStatus.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_40_18_003__AddIndexToConnectionStatus extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_18_003__AddIndexToConnectionStatus.class); + private static final String CONNECTION_TABLE = "connection"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + try (final DSLContext ctx = DSL.using(context.getConnection())) { + ctx.createIndexIfNotExists("connection_status_idx").on(CONNECTION_TABLE, "status").execute(); + } + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_004__BackfillActorDefinitionWorkspaceGrant.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_004__BackfillActorDefinitionWorkspaceGrant.java new file mode 100644 index 0000000000000..9e1c5e7ed3f49 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V0_40_18_004__BackfillActorDefinitionWorkspaceGrant.java @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +// TODO: update migration description in the class name +public class V0_40_18_004__BackfillActorDefinitionWorkspaceGrant extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger( + V0_40_18_004__BackfillActorDefinitionWorkspaceGrant.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + + var customActorDefinitionIds = ctx.fetch("SELECT id FROM actor_definition WHERE public is false and tombstone is false;"); + var existingWorkspaces = ctx.fetch("SELECT id FROM WORKSPACE where tombstone is false;"); + + // Update for all custom connectors - set custom field to true; + ctx.execute("UPDATE actor_definition" + + " SET custom = true " + + " WHERE public is false and tombstone is false;"); + + for (final var customActorDefinitionIdRecord : customActorDefinitionIds) { + for (final var existingWorkspaceRecord : existingWorkspaces) { + // Populate a record for new table; + var customActorDefinitionIdValue = customActorDefinitionIdRecord.getValue("id", UUID.class); + var existingWorkspaceIdValue = existingWorkspaceRecord.getValue("id", UUID.class); + + ctx.execute("INSERT INTO actor_definition_workspace_grant(workspace_id, actor_definition_id) VALUES ({0}, {1})", + existingWorkspaceIdValue, customActorDefinitionIdValue); + } + } + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_001__AddIndexToAttemptsAndJobsStatus.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_001__AddIndexToAttemptsAndJobsStatus.java new file mode 100644 index 0000000000000..f6f26e43b72fb --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_001__AddIndexToAttemptsAndJobsStatus.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V0_40_18_001__AddIndexToAttemptsAndJobsStatus extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_18_001__AddIndexToAttemptsAndJobsStatus.class); + private static final String ATTEMPTS_TABLE = "attempts"; + private static final String JOBS_TABLE = "jobs"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + try (final DSLContext ctx = DSL.using(context.getConnection())) { + ctx.createIndexIfNotExists("attempts_status_idx").on(ATTEMPTS_TABLE, "status").execute(); + ctx.createIndexIfNotExists("jobs_status_idx").on(JOBS_TABLE, "status").execute(); + } + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_002__AddProgressBarStats.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_002__AddProgressBarStats.java new file mode 100644 index 0000000000000..244830dd985db --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/jobs/migrations/V0_40_18_002__AddProgressBarStats.java @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.jobs.migrations; + +import static org.jooq.impl.DSL.currentOffsetDateTime; +import static org.jooq.impl.DSL.field; +import static org.jooq.impl.DSL.foreignKey; +import static org.jooq.impl.DSL.primaryKey; +import static org.jooq.impl.DSL.unique; + +import java.time.OffsetDateTime; +import java.util.UUID; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The estimated columns contains the overall estimated records and bytes for an attempt. + *

+ * The new stream_stats table contains the estimated and emitted records/bytes for an attempt at the + * per-stream level. This lets us track per-stream stats as an attempt is in progress. + */ +public class V0_40_18_002__AddProgressBarStats extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V0_40_18_002__AddProgressBarStats.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + try (final DSLContext ctx = DSL.using(context.getConnection())) { + addEstimatedColumnsToSyncStats(ctx); + addStreamStatsTable(ctx); + } + } + + private static void addEstimatedColumnsToSyncStats(final DSLContext ctx) { + ctx.alterTable("sync_stats") + .add( + field("estimated_records", SQLDataType.BIGINT.nullable(true)), + field("estimated_bytes", SQLDataType.BIGINT.nullable(true))) + .execute(); + } + + private static void addStreamStatsTable(final DSLContext ctx) { + // Metadata Columns + final Field id = field("id", SQLDataType.UUID.nullable(false)); + final Field attemptId = field("attempt_id", SQLDataType.INTEGER.nullable(false)); + final Field streamNamespace = field("stream_namespace", SQLDataType.VARCHAR.nullable(false)); + final Field streamName = field("stream_name", SQLDataType.VARCHAR.nullable(false)); + + // Stats Columns + final Field recordsEmitted = field("records_emitted", SQLDataType.BIGINT.nullable(true)); + final Field bytesEmitted = field("bytes_emitted", SQLDataType.BIGINT.nullable(true)); + final Field estimatedRecords = field("estimated_records", SQLDataType.BIGINT.nullable(true)); + final Field estimatedBytes = field("estimated_bytes", SQLDataType.BIGINT.nullable(true)); + + // Time Columns + final Field createdAt = + field("created_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + final Field updatedAt = + field("updated_at", SQLDataType.TIMESTAMPWITHTIMEZONE.nullable(false).defaultValue(currentOffsetDateTime())); + + ctx.createTableIfNotExists("stream_stats") + .columns( + id, attemptId, streamNamespace, streamName, recordsEmitted, bytesEmitted, estimatedRecords, estimatedBytes, createdAt, updatedAt) + .constraints( + primaryKey(id), + foreignKey(attemptId).references("attempts", "id").onDeleteCascade(), + // Prevent duplicate stat records of the same stream and attempt. + unique("attempt_id", "stream_name")) + .execute(); + + // Create an index on attempt_id, since all read queries on this table as of this migration will be + // WHERE clauses on the attempt id. + ctx.createIndex("index").on("stream_stats", "attempt_id").execute(); + + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java index e359a2331889d..5529b35668c43 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/jdbc/JdbcUtils.java @@ -47,6 +47,7 @@ public class JdbcUtils { // NOTE: this is the plural version of SCHEMA_KEY public static final String SCHEMAS_KEY = "schemas"; public static final String SSL_KEY = "ssl"; + public static final List SSL_MODE_DISABLE = List.of("disable", "disabled"); public static final String SSL_MODE_KEY = "ssl_mode"; public static final String TLS_KEY = "tls"; public static final String USERNAME_KEY = "username"; @@ -111,10 +112,17 @@ public static Map parseJdbcParameters(final String jdbcPropertie * (e.g. non-zero integers, string true, etc) * * @param config A configuration used to check Jdbc connection - * @return true: if ssl has not been set or it has been set with true, false: in all other cases + * @return true: if ssl has not been set and ssl mode not equals disabled or it has been set with + * true, false: in all other cases */ public static boolean useSsl(final JsonNode config) { - return !config.has(SSL_KEY) || config.get(SSL_KEY).asBoolean(); + if (!config.has(SSL_KEY)) { + if (config.has(SSL_MODE_KEY) && config.get(SSL_MODE_KEY).has(MODE_KEY)) { + return !SSL_MODE_DISABLE.contains(config.get(SSL_MODE_KEY).get(MODE_KEY).asText()); + } else + return true; + } else + return config.get(SSL_KEY).asBoolean(); } } diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/mongodb/MongoUtils.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/mongodb/MongoUtils.java index b0ccd752facf8..6f0b8cf5a9c5b 100644 --- a/airbyte-db/db-lib/src/main/java/io/airbyte/db/mongodb/MongoUtils.java +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/mongodb/MongoUtils.java @@ -6,7 +6,15 @@ import static java.util.Arrays.asList; import static org.bson.BsonType.ARRAY; +import static org.bson.BsonType.DATE_TIME; +import static org.bson.BsonType.DECIMAL128; import static org.bson.BsonType.DOCUMENT; +import static org.bson.BsonType.DOUBLE; +import static org.bson.BsonType.INT32; +import static org.bson.BsonType.INT64; +import static org.bson.BsonType.OBJECT_ID; +import static org.bson.BsonType.STRING; +import static org.bson.BsonType.TIMESTAMP; import static org.bson.codecs.configuration.CodecRegistries.fromProviders; import com.fasterxml.jackson.databind.JsonNode; @@ -20,6 +28,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.MoreIterators; import io.airbyte.db.DataTypeUtils; +import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.protocol.models.CommonField; import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.TreeNode; @@ -28,6 +37,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Set; import org.bson.BsonBinary; import org.bson.BsonDateTime; import org.bson.BsonDocument; @@ -53,6 +63,29 @@ public class MongoUtils { private static final Logger LOGGER = LoggerFactory.getLogger(MongoUtils.class); + // Shared constants + public static final String MONGODB_SERVER_URL = "mongodb://%s%s:%s/%s?authSource=admin&ssl=%s"; + public static final String MONGODB_CLUSTER_URL = "mongodb+srv://%s%s/%s?retryWrites=true&w=majority&tls=true"; + public static final String MONGODB_REPLICA_URL = "mongodb://%s%s/%s?authSource=admin&directConnection=false&ssl=true"; + public static final String USER = "user"; + public static final String INSTANCE_TYPE = "instance_type"; + public static final String INSTANCE = "instance"; + public static final String CLUSTER_URL = "cluster_url"; + public static final String SERVER_ADDRESSES = "server_addresses"; + public static final String REPLICA_SET = "replica_set"; + + // MongodbDestination specific constants + public static final String AUTH_TYPE = "auth_type"; + public static final String AUTHORIZATION = "authorization"; + public static final String LOGIN_AND_PASSWORD = "login/password"; + public static final String AIRBYTE_DATA_HASH = "_airbyte_data_hash"; + + // MongodbSource specific constants + public static final String AUTH_SOURCE = "auth_source"; + public static final String PRIMARY_KEY = "_id"; + public static final Set ALLOWED_CURSOR_TYPES = Set.of(DOUBLE, STRING, DOCUMENT, OBJECT_ID, DATE_TIME, + INT32, TIMESTAMP, INT64, DECIMAL128); + private static final String MISSING_TYPE = "missing"; private static final String NULL_TYPE = "null"; public static final String AIRBYTE_SUFFIX = "_aibyte_transform"; @@ -136,6 +169,14 @@ private static ObjectNode readDocument(final BsonReader reader, final ObjectNode return jsonNodes; } + /** + * Determines whether TLS/SSL should be enabled for a standalone instance of MongoDB. + */ + public static boolean tlsEnabledForStandaloneInstance(final JsonNode config, final JsonNode instanceConfig) { + return config.has(JdbcUtils.TLS_KEY) ? config.get(JdbcUtils.TLS_KEY).asBoolean() + : (instanceConfig.has(JdbcUtils.TLS_KEY) ? instanceConfig.get(JdbcUtils.TLS_KEY).asBoolean() : true); + } + public static void transformToStringIfMarked(final ObjectNode jsonNodes, final List columnNames, final String fieldName) { if (columnNames.contains(fieldName + AIRBYTE_SUFFIX)) { final JsonNode data = jsonNodes.get(fieldName); diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt index 0f8b27322c228..33298d85df3bf 100644 --- a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt @@ -280,6 +280,7 @@ create index "airbyte_configs_migrations_s_idx" on "public"."airbyte_configs_mig create index "connection_destination_id_idx" on "public"."connection"("destination_id" asc); create unique index "connection_pkey" on "public"."connection"("id" asc); create index "connection_source_id_idx" on "public"."connection"("source_id" asc); +create index "connection_status_idx" on "public"."connection"("status" asc); create index "connection_operation_connection_id_idx" on "public"."connection_operation"("connection_id" asc); create unique index "connection_operation_pkey" on "public"."connection_operation"( "id" asc, diff --git a/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt index 15cd985a91184..628285ccbd3bd 100644 --- a/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/jobs_database/schema_dump.txt @@ -61,6 +61,20 @@ create table "public"."normalization_summaries"( constraint "normalization_summaries_pkey" primary key ("id") ); +create table "public"."stream_stats"( + "id" uuid not null, + "attempt_id" int4 not null, + "stream_namespace" varchar(2147483647) not null, + "stream_name" varchar(2147483647) not null, + "records_emitted" int8 null, + "bytes_emitted" int8 null, + "estimated_records" int8 null, + "estimated_bytes" int8 null, + "created_at" timestamptz(35) not null default null, + "updated_at" timestamptz(35) not null default null, + constraint "stream_stats_pkey" + primary key ("id") +); create table "public"."sync_stats"( "id" uuid not null, "attempt_id" int8 not null, @@ -75,6 +89,8 @@ create table "public"."sync_stats"( "max_seconds_between_state_message_emitted_and_committed" int8 null, "created_at" timestamptz(35) not null default null, "updated_at" timestamptz(35) not null default null, + "estimated_records" int8 null, + "estimated_bytes" int8 null, constraint "sync_stats_pkey" primary key ("id") ); @@ -82,6 +98,10 @@ alter table "public"."normalization_summaries" add constraint "normalization_summaries_attempt_id_fkey" foreign key ("attempt_id") references "public"."attempts" ("id"); +alter table "public"."stream_stats" + add constraint "stream_stats_attempt_id_fkey" + foreign key ("attempt_id") + references "public"."attempts" ("id"); alter table "public"."sync_stats" add constraint "sync_stats_attempt_id_fkey" foreign key ("attempt_id") @@ -90,6 +110,7 @@ create unique index "airbyte_jobs_migrations_pk" on "public"."airbyte_jobs_migra create index "airbyte_jobs_migrations_s_idx" on "public"."airbyte_jobs_migrations"("success" asc); create unique index "airbyte_metadata_pkey" on "public"."airbyte_metadata"("key" asc); create unique index "attempts_pkey" on "public"."attempts"("id" asc); +create index "attempts_status_idx" on "public"."attempts"("status" asc); create unique index "job_attempt_idx" on "public"."attempts"( "job_id" asc, "attempt_number" asc @@ -97,7 +118,14 @@ create unique index "job_attempt_idx" on "public"."attempts"( create index "jobs_config_type_idx" on "public"."jobs"("config_type" asc); create unique index "jobs_pkey" on "public"."jobs"("id" asc); create index "jobs_scope_idx" on "public"."jobs"("scope" asc); +create index "jobs_status_idx" on "public"."jobs"("status" asc); create unique index "normalization_summaries_pkey" on "public"."normalization_summaries"("id" asc); create index "normalization_summary_attempt_id_idx" on "public"."normalization_summaries"("attempt_id" asc); +create index "index" on "public"."stream_stats"("attempt_id" asc); +create unique index "stream_stats_attempt_id_stream_name_key" on "public"."stream_stats"( + "attempt_id" asc, + "stream_name" asc +); +create unique index "stream_stats_pkey" on "public"."stream_stats"("id" asc); create index "attempt_id_idx" on "public"."sync_stats"("attempt_id" asc); create unique index "sync_stats_pkey" on "public"."sync_stats"("id" asc); diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java index 760be9514dd9b..c23fac22a54ad 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/factory/DataSourceFactoryTest.java @@ -17,12 +17,15 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.testcontainers.containers.MySQLContainer; /** * Test suite for the {@link DataSourceFactory} class. */ class DataSourceFactoryTest extends CommonFactoryTest { + private static final String CONNECT_TIMEOUT = "connectTimeout"; + static String database; static String driverClassName; static String host; @@ -45,7 +48,7 @@ static void setup() { @Test void testCreatingDataSourceWithConnectionTimeoutSetAboveDefault() { final Map connectionProperties = Map.of( - "connectTimeout", "61"); + CONNECT_TIMEOUT, "61"); final DataSource dataSource = DataSourceFactory.create( username, password, @@ -58,9 +61,9 @@ void testCreatingDataSourceWithConnectionTimeoutSetAboveDefault() { } @Test - void testCreatingDataSourceWithConnectionTimeoutSetBelowDefault() { + void testCreatingPostgresDataSourceWithConnectionTimeoutSetBelowDefault() { final Map connectionProperties = Map.of( - "connectTimeout", "30"); + CONNECT_TIMEOUT, "30"); final DataSource dataSource = DataSourceFactory.create( username, password, @@ -69,13 +72,31 @@ void testCreatingDataSourceWithConnectionTimeoutSetBelowDefault() { connectionProperties); assertNotNull(dataSource); assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(60000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); + assertEquals(30000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); + } + + @Test + void testCreatingMySQLDataSourceWithConnectionTimeoutSetBelowDefault() { + try (MySQLContainer mySQLContainer = new MySQLContainer<>("mysql:8.0")) { + mySQLContainer.start(); + final Map connectionProperties = Map.of( + CONNECT_TIMEOUT, "30"); + final DataSource dataSource = DataSourceFactory.create( + mySQLContainer.getUsername(), + mySQLContainer.getPassword(), + mySQLContainer.getDriverClassName(), + mySQLContainer.getJdbcUrl(), + connectionProperties); + assertNotNull(dataSource); + assertEquals(HikariDataSource.class, dataSource.getClass()); + assertEquals(60000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); + } } @Test void testCreatingDataSourceWithConnectionTimeoutSetWithZero() { final Map connectionProperties = Map.of( - "connectTimeout", "0"); + CONNECT_TIMEOUT, "0"); final DataSource dataSource = DataSourceFactory.create( username, password, @@ -88,7 +109,7 @@ void testCreatingDataSourceWithConnectionTimeoutSetWithZero() { } @Test - void testCreatingDataSourceWithConnectionTimeoutNotSet() { + void testCreatingPostgresDataSourceWithConnectionTimeoutNotSet() { final Map connectionProperties = Map.of(); final DataSource dataSource = DataSourceFactory.create( username, @@ -98,7 +119,25 @@ void testCreatingDataSourceWithConnectionTimeoutNotSet() { connectionProperties); assertNotNull(dataSource); assertEquals(HikariDataSource.class, dataSource.getClass()); - assertEquals(60000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); + assertEquals(10000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); + } + + @Test + void testCreatingMySQLDataSourceWithConnectionTimeoutNotSet() { + try (MySQLContainer mySQLContainer = new MySQLContainer<>("mysql:8.0")) { + mySQLContainer.start(); + final Map connectionProperties = Map.of(); + final DataSource dataSource = DataSourceFactory.create( + mySQLContainer.getUsername(), + mySQLContainer.getPassword(), + mySQLContainer.getDriverClassName(), + mySQLContainer.getJdbcUrl(), + connectionProperties); + assertNotNull(dataSource); + assertEquals(HikariDataSource.class, dataSource.getClass()); + assertEquals(60000, ((HikariDataSource) dataSource).getHikariConfigMXBean().getConnectionTimeout()); + } + } @Test diff --git a/airbyte-db/db-lib/src/test/java/io/airbyte/db/jdbc/TestJdbcUtils.java b/airbyte-db/db-lib/src/test/java/io/airbyte/db/jdbc/TestJdbcUtils.java index d6a4b139a2bf0..92435db61ce4b 100644 --- a/airbyte-db/db-lib/src/test/java/io/airbyte/db/jdbc/TestJdbcUtils.java +++ b/airbyte-db/db-lib/src/test/java/io/airbyte/db/jdbc/TestJdbcUtils.java @@ -210,6 +210,42 @@ void testUssSslWithSslSetAndValueIntegerTrue() { assertTrue(sslSet); } + @Test + void testUseSslWithEmptySslKeyAndSslModeVerifyFull() { + final JsonNode config = Jsons.jsonNode(ImmutableMap.builder() + .put("host", PSQL_DB.getHost()) + .put("port", PSQL_DB.getFirstMappedPort()) + .put("database", dbName) + .put("username", PSQL_DB.getUsername()) + .put("password", PSQL_DB.getPassword()) + .put("ssl_mode", ImmutableMap.builder() + .put("mode", "verify-full") + .put("ca_certificate", "test_ca_cert") + .put("client_certificate", "test_client_cert") + .put("client_key", "test_client_key") + .put("client_key_password", "test_pass") + .build()) + .build()); + final boolean sslSet = JdbcUtils.useSsl(config); + assertTrue(sslSet); + } + + @Test + void testUseSslWithEmptySslKeyAndSslModeDisable() { + final JsonNode config = Jsons.jsonNode(ImmutableMap.builder() + .put("host", PSQL_DB.getHost()) + .put("port", PSQL_DB.getFirstMappedPort()) + .put("database", dbName) + .put("username", PSQL_DB.getUsername()) + .put("password", PSQL_DB.getPassword()) + .put("ssl_mode", ImmutableMap.builder() + .put("mode", "disable") + .build()) + .build()); + final boolean sslSet = JdbcUtils.useSsl(config); + assertFalse(sslSet); + } + private static void createTableWithAllTypes(final Connection connection) throws SQLException { // jdbctype not included because they are not directly supported in postgres: TINYINT, LONGVARCHAR, // VARBINAR, LONGVARBINARY diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/BaseS3Destination.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/BaseS3Destination.java index 0119ad41511fb..e655b2ddd5248 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/BaseS3Destination.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/BaseS3Destination.java @@ -28,11 +28,11 @@ public abstract class BaseS3Destination extends BaseConnector implements Destina private final NamingConventionTransformer nameTransformer; - public BaseS3Destination() { + protected BaseS3Destination() { this(new S3DestinationConfigFactory()); } - public BaseS3Destination(final S3DestinationConfigFactory configFactory) { + protected BaseS3Destination(final S3DestinationConfigFactory configFactory) { this.configFactory = configFactory; this.nameTransformer = new S3NameTransformer(); } diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3ConsumerFactory.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3ConsumerFactory.java index 4586ce7389ef0..dc49e05ec6a87 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3ConsumerFactory.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3ConsumerFactory.java @@ -9,7 +9,6 @@ import io.airbyte.commons.functional.CheckedBiFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer; import io.airbyte.integrations.destination.buffered_stream_consumer.OnCloseFunction; @@ -18,6 +17,7 @@ import io.airbyte.integrations.destination.record_buffer.SerializedBufferingStrategy; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.DestinationSyncMode; @@ -66,8 +66,7 @@ private static List createWriteConfigs(final BlobStorageOperations .collect(Collectors.toList()); } - private static Function toWriteConfig( - final BlobStorageOperations storageOperations, + private static Function toWriteConfig(final BlobStorageOperations storageOperations, final NamingConventionTransformer namingResolver, final S3DestinationConfig s3Config) { return stream -> { diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConstants.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConstants.java index 89641d9357adf..3806e2e7d9709 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConstants.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/S3DestinationConstants.java @@ -16,6 +16,7 @@ public final class S3DestinationConstants { // gzip compression for CSV and JSONL public static final String COMPRESSION_ARG_NAME = "compression"; public static final String COMPRESSION_TYPE_ARG_NAME = "compression_type"; + public static final String FLATTEN_DATA = "flatten_data"; public static final CompressionType DEFAULT_COMPRESSION_TYPE = CompressionType.GZIP; private S3DestinationConstants() {} diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/SerializedBufferFactory.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/SerializedBufferFactory.java index 3d1552675026f..07221f113b1b0 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/SerializedBufferFactory.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/SerializedBufferFactory.java @@ -6,7 +6,6 @@ import io.airbyte.commons.functional.CheckedBiFunction; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.record_buffer.BufferStorage; import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; import io.airbyte.integrations.destination.s3.avro.AvroSerializedBuffer; @@ -16,6 +15,7 @@ import io.airbyte.integrations.destination.s3.jsonl.JsonLSerializedBuffer; import io.airbyte.integrations.destination.s3.jsonl.S3JsonlFormatConfig; import io.airbyte.integrations.destination.s3.parquet.ParquetSerializedBuffer; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.concurrent.Callable; import java.util.function.Function; diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/AvroSerializedBuffer.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/AvroSerializedBuffer.java index b925df4b956a8..b410716f9688b 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/AvroSerializedBuffer.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/avro/AvroSerializedBuffer.java @@ -5,11 +5,11 @@ package io.airbyte.integrations.destination.s3.avro; import io.airbyte.commons.functional.CheckedBiFunction; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.record_buffer.BaseSerializedBuffer; import io.airbyte.integrations.destination.record_buffer.BufferStorage; import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.io.IOException; import java.io.OutputStream; diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/CsvSerializedBuffer.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/CsvSerializedBuffer.java index 2a63471e2371c..3fe0696945167 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/CsvSerializedBuffer.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/csv/CsvSerializedBuffer.java @@ -5,12 +5,12 @@ package io.airbyte.integrations.destination.s3.csv; import io.airbyte.commons.functional.CheckedBiFunction; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.record_buffer.BaseSerializedBuffer; import io.airbyte.integrations.destination.record_buffer.BufferStorage; import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; import io.airbyte.integrations.destination.s3.util.CompressionType; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.io.IOException; import java.io.OutputStream; diff --git a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/JsonLSerializedBuffer.java b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/JsonLSerializedBuffer.java index 1f0a177d1982e..08e3c907c0b4b 100644 --- a/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/JsonLSerializedBuffer.java +++ b/airbyte-integrations/bases/base-java-s3/src/main/java/io/airbyte/integrations/destination/s3/jsonl/JsonLSerializedBuffer.java @@ -4,12 +4,13 @@ package io.airbyte.integrations.destination.s3.jsonl; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.commons.functional.CheckedBiFunction; import io.airbyte.commons.jackson.MoreMappers; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.destination.record_buffer.BaseSerializedBuffer; import io.airbyte.integrations.destination.record_buffer.BufferStorage; @@ -17,10 +18,12 @@ import io.airbyte.integrations.destination.s3.S3DestinationConstants; import io.airbyte.integrations.destination.s3.util.CompressionType; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.io.OutputStream; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; +import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; @@ -30,10 +33,13 @@ public class JsonLSerializedBuffer extends BaseSerializedBuffer { private PrintWriter printWriter; - protected JsonLSerializedBuffer(final BufferStorage bufferStorage, final boolean gzipCompression) throws Exception { + private final boolean flattenData; + + protected JsonLSerializedBuffer(final BufferStorage bufferStorage, final boolean gzipCompression, final boolean flattenData) throws Exception { super(bufferStorage); // we always want to compress jsonl files withCompression(gzipCompression); + this.flattenData = flattenData; } @Override @@ -46,7 +52,12 @@ protected void writeRecord(final AirbyteRecordMessage recordMessage) { final ObjectNode json = MAPPER.createObjectNode(); json.put(JavaBaseConstants.COLUMN_NAME_AB_ID, UUID.randomUUID().toString()); json.put(JavaBaseConstants.COLUMN_NAME_EMITTED_AT, recordMessage.getEmittedAt()); - json.set(JavaBaseConstants.COLUMN_NAME_DATA, recordMessage.getData()); + if (flattenData) { + Map data = MAPPER.convertValue(recordMessage.getData(), new TypeReference<>() {}); + json.setAll(data); + } else { + json.set(JavaBaseConstants.COLUMN_NAME_DATA, recordMessage.getData()); + } printWriter.println(Jsons.serialize(json)); } @@ -66,7 +77,8 @@ public static CheckedBiFunction extends CheckedBiConsumer, Exception> { diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/BufferingStrategy.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/BufferingStrategy.java index b638906666282..6483c0f9867b6 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/BufferingStrategy.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/BufferingStrategy.java @@ -4,8 +4,8 @@ package io.airbyte.integrations.destination.record_buffer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; /** * High-level interface used by diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java index 70ca30da88b18..ebb4151ebf56b 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategy.java @@ -4,12 +4,12 @@ package io.airbyte.integrations.destination.record_buffer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.buffered_stream_consumer.CheckAndRemoveRecordWriter; import io.airbyte.integrations.destination.buffered_stream_consumer.RecordSizeEstimator; import io.airbyte.integrations.destination.buffered_stream_consumer.RecordWriter; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.ArrayList; import java.util.HashMap; import java.util.List; diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategy.java b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategy.java index 71f9ddde36e92..9ab420a23126f 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategy.java +++ b/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategy.java @@ -7,8 +7,8 @@ import io.airbyte.commons.functional.CheckedBiConsumer; import io.airbyte.commons.functional.CheckedBiFunction; import io.airbyte.commons.string.Strings; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.ArrayList; import java.util.HashMap; diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java index 154481db5c597..5b9bd4cb183f7 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/base/IntegrationRunnerTest.java @@ -4,6 +4,8 @@ package io.airbyte.integrations.base; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -19,6 +21,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.io.IOs; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.AutoCloseableIterators; @@ -222,6 +225,67 @@ void testRead() throws Exception { verify(jsonSchemaValidator).validate(any(), any()); } + @Test + void testReadException() throws Exception { + final IntegrationConfig intConfig = IntegrationConfig.read(configPath, configuredCatalogPath, + statePath); + final ConfigErrorException configErrorException = new ConfigErrorException("Invalid configuration"); + + when(cliParser.parse(ARGS)).thenReturn(intConfig); + when(source.read(CONFIG, CONFIGURED_CATALOG, STATE)).thenThrow(configErrorException); + + final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); + when(source.spec()).thenReturn(expectedConnSpec); + when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); + + final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); + final Throwable throwable = catchThrowable(() -> new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS)); + + assertThat(throwable).isInstanceOf(ConfigErrorException.class); + verify(source).read(CONFIG, CONFIGURED_CATALOG, STATE); + } + + @Test + void testCheckNestedException() throws Exception { + final IntegrationConfig intConfig = IntegrationConfig.check(configPath); + final AirbyteConnectionStatus output = new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage("Invalid configuration"); + final ConfigErrorException configErrorException = new ConfigErrorException("Invalid configuration"); + final RuntimeException runtimeException = new RuntimeException(new RuntimeException(configErrorException)); + + when(cliParser.parse(ARGS)).thenReturn(intConfig); + when(source.check(CONFIG)).thenThrow(runtimeException); + + final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); + when(source.spec()).thenReturn(expectedConnSpec); + when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); + final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); + new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS); + + verify(source).check(CONFIG); + verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.CONNECTION_STATUS).withConnectionStatus(output)); + verify(jsonSchemaValidator).validate(any(), any()); + } + + @Test + void testCheckRuntimeException() throws Exception { + final IntegrationConfig intConfig = IntegrationConfig.check(configPath); + final AirbyteConnectionStatus output = new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage("Runtime Error"); + final RuntimeException runtimeException = new RuntimeException("Runtime Error"); + + when(cliParser.parse(ARGS)).thenReturn(intConfig); + when(source.check(CONFIG)).thenThrow(runtimeException); + + final ConnectorSpecification expectedConnSpec = mock(ConnectorSpecification.class); + when(source.spec()).thenReturn(expectedConnSpec); + when(expectedConnSpec.getConnectionSpecification()).thenReturn(CONFIG); + final JsonSchemaValidator jsonSchemaValidator = mock(JsonSchemaValidator.class); + new IntegrationRunner(cliParser, stdoutConsumer, null, source, jsonSchemaValidator).run(ARGS); + + verify(source).check(CONFIG); + verify(stdoutConsumer).accept(new AirbyteMessage().withType(Type.CONNECTION_STATUS).withConnectionStatus(output)); + verify(jsonSchemaValidator).validate(any(), any()); + } + @Test void testWrite() throws Exception { final IntegrationConfig intConfig = IntegrationConfig.write(configPath, configuredCatalogPath); diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java index 58ab5fa56dc1d..0a2561e94086e 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/buffered_stream_consumer/BufferedStreamConsumerTest.java @@ -20,12 +20,12 @@ import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.commons.functional.CheckedFunction; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.record_buffer.InMemoryRecordBufferingStrategy; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.Field; diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java index 330b3c998e11c..3a611c3c35f6e 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/InMemoryRecordBufferingStrategyTest.java @@ -12,10 +12,10 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.buffered_stream_consumer.RecordWriter; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.List; import org.junit.jupiter.api.Test; diff --git a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java index 2de320114ebed..37497324c0023 100644 --- a/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java +++ b/airbyte-integrations/bases/base-java/src/test/java/io/airbyte/integrations/destination/record_buffer/SerializedBufferingStrategyTest.java @@ -17,9 +17,9 @@ import io.airbyte.commons.functional.CheckedBiConsumer; import io.airbyte.commons.functional.CheckedBiFunction; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/airbyte-integrations/bases/debezium-v1-9-6/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java b/airbyte-integrations/bases/debezium-v1-9-6/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java index 47209ada28f78..7d5ba25b23bbf 100644 --- a/airbyte-integrations/bases/debezium-v1-9-6/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java +++ b/airbyte-integrations/bases/debezium-v1-9-6/src/main/java/io/airbyte/integrations/debezium/CdcTargetPosition.java @@ -5,6 +5,7 @@ package io.airbyte.integrations.debezium; import com.fasterxml.jackson.databind.JsonNode; +import io.debezium.engine.ChangeEvent; /** * This interface is used to define the target position at the beginning of the sync so that once we @@ -15,6 +16,41 @@ */ public interface CdcTargetPosition { + /** + * Reads a position value (lsn) from a change event and compares it to target lsn + * + * @param valueAsJson json representation of a change event + * @return true if event lsn is equal or greater than targer lsn, or if last snapshot event + */ boolean reachedTargetPosition(JsonNode valueAsJson); + /** + * Returns a position value (lsn) from a heartbeat event. + * + * @param heartbeatEvent a heartbeat change event + * @return the lsn value in a heartbeat change event or null + */ + default Long getHeartbeatPosition(final ChangeEvent heartbeatEvent) { + throw new UnsupportedOperationException(); + } + + /** + * Checks if a specified lsn has reached the target lsn. + * + * @param lsn an lsn value + * @return true if lsn is equal or greater than target lsn + */ + default boolean reachedTargetPosition(final Long lsn) { + throw new UnsupportedOperationException(); + } + + /** + * Indicates whether the implementation supports heartbeat position. + * + * @return true if heartbeats are supported + */ + default boolean isHeartbeatSupported() { + return false; + } + } diff --git a/airbyte-integrations/bases/debezium-v1-9-6/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java b/airbyte-integrations/bases/debezium-v1-9-6/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java index f1793838cae55..a244eab227d74 100644 --- a/airbyte-integrations/bases/debezium-v1-9-6/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java +++ b/airbyte-integrations/bases/debezium-v1-9-6/src/main/java/io/airbyte/integrations/debezium/internals/DebeziumRecordIterator.java @@ -13,6 +13,7 @@ import io.airbyte.integrations.debezium.CdcTargetPosition; import io.debezium.engine.ChangeEvent; import java.time.Duration; +import java.time.LocalDateTime; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; @@ -46,6 +47,8 @@ public class DebeziumRecordIterator extends AbstractIterator> queue, final CdcTargetPosition targetPosition, @@ -61,8 +64,17 @@ public DebeziumRecordIterator(final LinkedBlockingQueue computeNext() { // keep trying until the publisher is closed or until the queue is empty. the latter case is @@ -71,7 +83,11 @@ protected ChangeEvent computeNext() { while (!MoreBooleans.isTruthy(publisherStatusSupplier.get()) || !queue.isEmpty()) { final ChangeEvent next; try { - final Duration waitTime = receivedFirstRecord ? SUBSEQUENT_RECORD_WAIT_TIME : firstRecordWaitTime; + // #18987: waitTime is still required with heartbeats for backward + // compatibility with connectors not implementing heartbeat + // yet (MySql, MSSql), And also due to postgres taking a long time + // initially staying on "searching for WAL resume position" + final Duration waitTime = receivedFirstRecord ? SUBSEQUENT_RECORD_WAIT_TIME : this.firstRecordWaitTime; next = queue.poll(waitTime.getSeconds(), TimeUnit.SECONDS); } catch (final InterruptedException e) { throw new RuntimeException(e); @@ -79,26 +95,60 @@ protected ChangeEvent computeNext() { // if within the timeout, the consumer could not get a record, it is time to tell the producer to // shutdown. + // #18987: Noticed in testing that it's possible for DBZ to be stuck "Searching for WAL resume + // position" + // when no changes exist. In that case queue will pop after timeout with null value for next if (next == null) { - LOGGER.info("Closing cause next is returned as null"); + LOGGER.info("Closing: queue returned null event"); requestClose(); LOGGER.info("no record found. polling again."); continue; } + if (targetPosition.isHeartbeatSupported()) { + // check if heartbeat and read hearbeat position + LOGGER.debug("checking heartbeat lsn for: {}", next); + final Long heartbeatPos = targetPosition.getHeartbeatPosition(next); + if (heartbeatPos != null) { + // wrap up sync if heartbeat position crossed the target OR heartbeat position hasn't changed for + // too long + if (targetPosition.reachedTargetPosition(heartbeatPos) + || (heartbeatPos.equals(this.lastHeartbeatPosition) && heartbeatPosNotChanging())) { + LOGGER.info("Closing: Heartbeat indicates sync is done"); + requestClose(); + } + if (!heartbeatPos.equals(this.lastHeartbeatPosition)) { + this.tsLastHeartbeat = LocalDateTime.now(); + this.lastHeartbeatPosition = heartbeatPos; + } + continue; + } + } + final JsonNode eventAsJson = Jsons.deserialize(next.value()); hasSnapshotFinished = hasSnapshotFinished(eventAsJson); // if the last record matches the target file position, it is time to tell the producer to shutdown. + if (!signalledClose && shouldSignalClose(eventAsJson)) { + LOGGER.info("Closing: Change event reached target position"); requestClose(); } + this.tsLastHeartbeat = null; + this.lastHeartbeatPosition = null; receivedFirstRecord = true; return next; } return endOfData(); } + private boolean heartbeatPosNotChanging() { + final Duration tbt = Duration.between(this.tsLastHeartbeat, LocalDateTime.now()); + LOGGER.debug("Time since last hb_pos change {}s", tbt.toSeconds()); + // wait time for no change in heartbeat position is half of initial waitTime + return tbt.compareTo(this.firstRecordWaitTime.dividedBy(2)) > 0; + } + private boolean hasSnapshotFinished(final JsonNode eventAsJson) { final SnapshotMetadata snapshot = SnapshotMetadata.valueOf(eventAsJson.get("source").get("snapshot").asText().toUpperCase()); return SnapshotMetadata.TRUE != snapshot; @@ -122,6 +172,7 @@ private boolean hasSnapshotFinished(final JsonNode eventAsJson) { */ @Override public void close() throws Exception { + LOGGER.info("Closing: Iterator closing"); requestClose(); } diff --git a/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md b/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md index 78123bb4d33e4..ce9ad120f3e6f 100644 --- a/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md +++ b/airbyte-integrations/bases/source-acceptance-test/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog + +## 0.2.19 +Test for exposed secrets: const values can not hold secrets. [#19465](https://github.com/airbytehq/airbyte/pull/19465). + +## 0.2.18 +Test connector specification against exposed secret fields. [#19124](https://github.com/airbytehq/airbyte/pull/19124). + +## 0.2.17 +Make `incremental.future_state` mandatory in `high` `test_strictness_level`. [#19085](https://github.com/airbytehq/airbyte/pull/19085/). + ## 0.2.16 Run `basic_read` on the discovered catalog in `high` `test_strictness_level`. [#18937](https://github.com/airbytehq/airbyte/pull/18937). diff --git a/airbyte-integrations/bases/source-acceptance-test/Dockerfile b/airbyte-integrations/bases/source-acceptance-test/Dockerfile index d1cb06c59e73c..660a56ca4a986 100644 --- a/airbyte-integrations/bases/source-acceptance-test/Dockerfile +++ b/airbyte-integrations/bases/source-acceptance-test/Dockerfile @@ -33,7 +33,7 @@ COPY pytest.ini setup.py ./ COPY source_acceptance_test ./source_acceptance_test RUN pip install . -LABEL io.airbyte.version=0.2.16 +LABEL io.airbyte.version=0.2.19 LABEL io.airbyte.name=airbyte/source-acceptance-test ENTRYPOINT ["python", "-m", "pytest", "-p", "source_acceptance_test.plugin", "-r", "fEsx"] diff --git a/airbyte-integrations/bases/source-acceptance-test/README.md b/airbyte-integrations/bases/source-acceptance-test/README.md index b527589398f0d..d240d26387fc8 100644 --- a/airbyte-integrations/bases/source-acceptance-test/README.md +++ b/airbyte-integrations/bases/source-acceptance-test/README.md @@ -66,5 +66,5 @@ To migrate a legacy configuration to the latest configuration format please run: ```bash python -m venv .venv # If you don't have a virtualenv already source ./.venv/bin/activate # If you're not in your virtualenv already -python source_acceptance_test/utils/config_migration.py ../../connectors/source-to-migrate/acceptance-test-config.yml +python source_acceptance_test/tools/strictness_level_migration/config_migration.py ../../connectors/source-to-migrate/acceptance-test-config.yml ``` \ No newline at end of file diff --git a/airbyte-integrations/bases/source-acceptance-test/pytest.ini b/airbyte-integrations/bases/source-acceptance-test/pytest.ini index 2531c1f41463a..087a99ed7bc03 100644 --- a/airbyte-integrations/bases/source-acceptance-test/pytest.ini +++ b/airbyte-integrations/bases/source-acceptance-test/pytest.ini @@ -7,3 +7,4 @@ testpaths = markers = default_timeout slow: marks tests as slow (deselect with '-m "not slow"') + backward_compatibility diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py index 5bd5307e4fe98..4fbeae339d3b0 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/config.py @@ -141,13 +141,18 @@ class FullRefreshConfig(BaseConfig): ) +class FutureStateConfig(BaseConfig): + future_state_path: Optional[str] = Field(description="Path to a state file with values in far future") + missing_streams: List[EmptyStreamConfiguration] = Field(default=[], description="List of missings streams with valid bypass reasons.") + + class IncrementalConfig(BaseConfig): config_path: str = config_path configured_catalog_path: Optional[str] = configured_catalog_path cursor_paths: Optional[Mapping[str, List[str]]] = Field( description="For each stream, the path of its cursor field in the output state messages." ) - future_state_path: Optional[str] = Field(description="Path to a state file with values in far future") + future_state: Optional[FutureStateConfig] = Field(description="Configuration for the future state.") timeout_seconds: int = timeout_seconds threshold_days: int = Field( description="Allow records to be emitted with a cursor value this number of days before the state cursor", @@ -230,6 +235,9 @@ def migrate_legacy_to_current_config(legacy_config: dict) -> dict: basic_read_tests["empty_streams"] = [ {"name": empty_stream_name} for empty_stream_name in basic_read_tests.get("empty_streams", []) ] + for incremental_test in migrated_config["acceptance_tests"].get("incremental", {}).get("tests", []): + if "future_state_path" in incremental_test: + incremental_test["future_state"] = {"future_state_path": incremental_test.pop("future_state_path")} return migrated_config @root_validator(pre=True) diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py index e4bdc743a2be1..b1fd78d9e30eb 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_core.py @@ -8,7 +8,7 @@ from collections import Counter, defaultdict from functools import reduce from logging import Logger -from typing import Any, Dict, List, Mapping, MutableMapping, Optional, Set +from typing import Any, Dict, List, Mapping, MutableMapping, Optional, Set, Tuple from xmlrpc.client import Boolean import dpath.util @@ -53,6 +53,29 @@ def connector_spec_dict_fixture(actual_connector_spec): return json.loads(actual_connector_spec.json()) +@pytest.fixture(name="secret_property_names") +def secret_property_names_fixture(): + return ( + "client_token", + "access_token", + "api_token", + "token", + "secret", + "client_secret", + "password", + "key", + "service_account_info", + "service_account", + "tenant_id", + "certificate", + "jwt", + "credentials", + "app_id", + "appid", + "refresh_token", + ) + + @pytest.mark.default_timeout(10) class TestSpec(BaseTest): @@ -95,6 +118,20 @@ def test_docker_env(self, actual_connector_spec: ConnectorSpecification, docker_ docker_runner.entry_point ), "env should be equal to space-joined entrypoint" + def test_enum_usage(self, actual_connector_spec: ConnectorSpecification): + """Check that enum lists in specs contain distinct values.""" + docs_url = "https://docs.airbyte.io/connector-development/connector-specification-reference" + docs_msg = f"See specification reference at {docs_url}." + + schema_helper = JsonSchemaHelper(actual_connector_spec.connectionSpecification) + enum_paths = schema_helper.find_nodes(keys=["enum"]) + + for path in enum_paths: + enum_list = schema_helper.get_node(path) + assert len(set(enum_list)) == len( + enum_list + ), f"Enum lists should not contain duplicate values. Misconfigured enum array: {enum_list}. {docs_msg}" + def test_oneof_usage(self, actual_connector_spec: ConnectorSpecification): """Check that if spec contains oneOf it follows the rules according to reference https://docs.airbyte.io/connector-development/connector-specification-reference @@ -150,6 +187,90 @@ def test_has_secret(self): def test_secret_never_in_the_output(self): """This test should be injected into any docker command it needs to know current config and spec""" + @staticmethod + def _is_spec_property_name_secret(path: str, secret_property_names) -> Tuple[Optional[str], bool]: + """ + Given a path to a type field, extract a field name and decide whether it is a name of secret or not + based on a provided list of secret names. + Split the path by `/`, drop the last item and make list reversed. + Then iterate over it and find the first item that's not a reserved keyword or an index. + Example: + properties/credentials/oneOf/1/properties/api_key/type -> [api_key, properties, 1, oneOf, credentials, properties] -> api_key + """ + reserved_keywords = ("anyOf", "oneOf", "allOf", "not", "properties", "items", "type", "prefixItems") + for part in reversed(path.split("/")[:-1]): + if part.isdigit() or part in reserved_keywords: + continue + return part, part.lower() in secret_property_names + return None, False + + @staticmethod + def _property_can_store_secret(prop: dict) -> bool: + """ + Some fields can not hold a secret by design, others can. + Null type as well as boolean can not hold a secret value. + A string, a number or an integer type can always store secrets. + Objects and arrays can hold a secret in case they are generic, + meaning their inner structure is not described in details with properties/items. + A field with a constant value can not hold a secret as well. + """ + unsecure_types = {"string", "integer", "number"} + type_ = prop["type"] + is_property_generic_object = type_ == "object" and not any( + [prop.get("properties", {}), prop.get("anyOf", []), prop.get("oneOf", []), prop.get("allOf", [])] + ) + is_property_generic_array = type_ == "array" and not any([prop.get("items", []), prop.get("prefixItems", [])]) + is_property_constant_value = bool(prop.get("const")) + can_store_secret = any( + [ + isinstance(type_, str) and type_ in unsecure_types, + is_property_generic_object, + is_property_generic_array, + isinstance(type_, list) and (set(type_) & unsecure_types), + ] + ) + if not can_store_secret: + return False + # if a property can store a secret, additional check should be done if it's a constant value + return not is_property_constant_value + + def test_secret_is_properly_marked(self, connector_spec_dict: dict, detailed_logger, secret_property_names): + """ + Each field has a type, therefore we can make a flat list of fields from the returned specification. + Iterate over the list, check if a field name is a secret name, can potentially hold a secret value + and make sure it is marked as `airbyte_secret`. + """ + secrets_exposed = [] + non_secrets_hidden = [] + spec_properties = connector_spec_dict["connectionSpecification"]["properties"] + for type_path, value in dpath.util.search(spec_properties, "**/type", yielded=True): + _, is_property_name_secret = self._is_spec_property_name_secret(type_path, secret_property_names) + if not is_property_name_secret: + continue + absolute_path = f"/{type_path}" + property_path, _ = absolute_path.rsplit(sep="/", maxsplit=1) + property_definition = dpath.util.get(spec_properties, property_path) + marked_as_secret = property_definition.get("airbyte_secret", False) + possibly_a_secret = self._property_can_store_secret(property_definition) + if marked_as_secret and not possibly_a_secret: + non_secrets_hidden.append(property_path) + if not marked_as_secret and possibly_a_secret: + secrets_exposed.append(property_path) + + if non_secrets_hidden: + properties = "\n".join(non_secrets_hidden) + detailed_logger.warning( + f"""Some properties are marked with `airbyte_secret` although they probably should not be. + Please double check them. If they're okay, please fix this test. + {properties}""" + ) + if secrets_exposed: + properties = "\n".join(secrets_exposed) + pytest.fail( + f"""The following properties should be marked with `airbyte_secret!` + {properties}""" + ) + def test_defined_refs_exist_in_json_spec_file(self, connector_spec_dict: dict): """Checking for the presence of unresolved `$ref`s values within each json spec file""" check_result = list(find_all_values_for_key_in_schema(connector_spec_dict, "$ref")) diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_incremental.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_incremental.py index 15e2f7c38ae4f..219bfc70e55f5 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_incremental.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/tests/test_incremental.py @@ -11,24 +11,40 @@ import pytest from airbyte_cdk.models import AirbyteMessage, AirbyteStateMessage, AirbyteStateType, ConfiguredAirbyteCatalog, Type from source_acceptance_test import BaseTest -from source_acceptance_test.config import IncrementalConfig +from source_acceptance_test.config import Config, EmptyStreamConfiguration, IncrementalConfig from source_acceptance_test.utils import ConnectorRunner, JsonSchemaHelper, SecretDict, filter_output, incremental_only_catalog -@pytest.fixture(name="future_state_path") -def future_state_path_fixture(inputs, base_path) -> Path: +@pytest.fixture(name="future_state_configuration") +def future_state_configuration_fixture(inputs, base_path, test_strictness_level) -> Tuple[Path, List[EmptyStreamConfiguration]]: """Fixture with connector's future state path (relative to base_path)""" - if getattr(inputs, "future_state_path"): - return Path(base_path) / getattr(inputs, "future_state_path") - pytest.skip("`future_state_path` not specified, skipping") + if inputs.future_state and inputs.future_state.future_state_path: + return Path(base_path) / inputs.future_state.future_state_path, inputs.future_state.missing_streams + elif test_strictness_level is Config.TestStrictnessLevel.high: + pytest.fail("High test strictness level error: a future state configuration must be provided in high test strictness level.") + else: + pytest.skip("`future_state` not specified, skipping.") @pytest.fixture(name="future_state") -def future_state_fixture(future_state_path) -> Path: +def future_state_fixture(future_state_configuration, test_strictness_level, configured_catalog) -> List[MutableMapping]: """""" + future_state_path, missing_streams = future_state_configuration with open(str(future_state_path), "r") as file: contents = file.read() - return json.loads(contents) + states = json.loads(contents) + if test_strictness_level is Config.TestStrictnessLevel.high: + if not all([missing_stream.bypass_reason is not None for missing_stream in missing_streams]): + pytest.fail("High test strictness level error: all missing_streams must have a bypass reason specified.") + all_stream_names = set([stream.stream.name for stream in configured_catalog.streams]) + streams_in_states = set([state["stream"]["stream_descriptor"]["name"] for state in states]) + declared_missing_streams_names = set([missing_stream.name for missing_stream in missing_streams]) + undeclared_missing_streams_names = all_stream_names - declared_missing_streams_names - streams_in_states + if undeclared_missing_streams_names: + pytest.fail( + f"High test strictness level error: {', '.join(undeclared_missing_streams_names)} streams are missing in your future_state file, please declare a state for those streams or fill-in a valid bypass_reason." + ) + return states @pytest.fixture(name="cursor_paths") diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/config_migration.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/config_migration.py deleted file mode 100644 index b0a4f4f4ee153..0000000000000 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/config_migration.py +++ /dev/null @@ -1,37 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import argparse -from pathlib import Path - -import yaml -from source_acceptance_test.config import Config -from yaml import load - -try: - from yaml import CLoader as Loader -except ImportError: - from yaml import Loader - -parser = argparse.ArgumentParser(description="Migrate legacy acceptance-test-config.yml to the latest configuration format.") -parser.add_argument("config_path", type=str, help="Path to the acceptance-test-config.yml to migrate.") - - -def migrate_legacy_configuration(config_path: Path): - - with open(config_path, "r") as file: - to_migrate = load(file, Loader=Loader) - - if Config.is_legacy(to_migrate): - migrated_config = Config.migrate_legacy_to_current_config(to_migrate) - with open(config_path, "w") as output_file: - yaml.dump(migrated_config, output_file) - print(f"Your configuration was successfully migrated to the latest configuration format: {config_path}") - else: - print("Your configuration is not in a legacy format.") - - -if __name__ == "__main__": - args = parser.parse_args() - migrate_legacy_configuration(Path(args.config_path)) diff --git a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/json_schema_helper.py b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/json_schema_helper.py index 03c924e76869e..7f0c7badb7114 100644 --- a/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/json_schema_helper.py +++ b/airbyte-integrations/bases/source-acceptance-test/source_acceptance_test/utils/json_schema_helper.py @@ -4,7 +4,7 @@ from functools import reduce -from typing import Any, List, Mapping, Optional, Set +from typing import Any, Dict, List, Mapping, Optional, Set, Text, Union import pendulum from jsonref import JsonRef @@ -122,21 +122,23 @@ def get_node(self, path: List[str]) -> Any: return node def find_nodes(self, keys: List[str]) -> List[List[str]]: - """Get all nodes of schema that has specifies properties + """Find all paths that lead to nodes with the specified keys. - :param keys: + :param keys: list of keys :return: list of json object paths """ variant_paths = [] - def traverse_schema(_schema, path=None): + def traverse_schema(_schema: Union[Dict[Text, Any], List], path=None): path = path or [] if path and path[-1] in keys: variant_paths.append(path) - for item in _schema: - next_obj = _schema[item] if isinstance(_schema, dict) else item - if isinstance(next_obj, (list, dict)): - traverse_schema(next_obj, [*path, item]) + if isinstance(_schema, dict): + for item in _schema: + traverse_schema(_schema[item], [*path, item]) + elif isinstance(_schema, list): + for i, item in enumerate(_schema): + traverse_schema(_schema[i], [*path, i]) traverse_schema(self._schema) return variant_paths diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/README.md b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/README.md new file mode 100644 index 0000000000000..5d229f7508aa9 --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/README.md @@ -0,0 +1,64 @@ +# Tooling for `test_strictness_level` migration + +This directory contains scripts that can help us manage the migration of connectors's `acceptance-test-config.yml` to `high` test strictness level. +Before running these scripts you need to set up a local virtual environment in the **current directory**: +```bash +python -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt +``` +## Requirements +* [GitHub CLI](https://cli.github.com/) (`brew install gh`) + +## Create migration issue for GA connectors (`create_issues.py`) +This script will create one issue per GA connectors to migrate to `high` test strictness level. + +### What it does: +1. Find all GA connectors in `../../../../../airbyte-config/init/src/main/resources/seed/source_definitions.yaml` +2. Generate an issue content (title, body, labels, project), using `./templates/issue.md.j2` +3. Find an already existing issue with the same title. +4. Create the issue and return its url if it does not exist. + +Issues get created with the following labels: +* `area/connectors` +* `team/connectors-python` +* `type/enhancement` +* `test-strictness-level` + +Issues are added to the following project: `SAT-high-test-strictness-level` + +### How to run: +**Dry run**: +`python create_issues.py` + +**Real execution**: +`python create_issues.py --dry False` + +## Create migration PRs for GA connectors (`create_prs.py`) +This script will create one PR per GA connectors to migrate to `high` test strictness level. + +### What it does: +1. Iterate on all GA connectors in `../../../../../airbyte-config/init/src/main/resources/seed/source_definitions.yaml` +2. Create a branch for each GA connector +3. Locally migrate `acceptance_test_config.yml` to the latest format +4. Commit and push the changes on this branch +5. Open a PR for this branch +6. Run a SAT on this branch by posting a `/test` comment on the PR + +An example of the PR it creates can be found [here](https://github.com/airbytehq/airbyte/pull/19136) + +PR get created with the following labels: +* `area/connectors` +* `team/connectors-python` +* `type/enhancement` +* `test-strictness-level` + +PR are added to the following project: `SAT-high-test-strictness-level` + +### How to run: +**Dry run**: +`python create_prs.py` + +**Real execution**: +`python create_prs.py --dry False` + diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/__init__.py b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/config_migration.py b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/config_migration.py new file mode 100644 index 0000000000000..55f68267ead00 --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/config_migration.py @@ -0,0 +1,58 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import argparse +import logging +from pathlib import Path + +import yaml +from source_acceptance_test.config import Config +from yaml import load + +try: + from yaml import CLoader as Loader +except ImportError: + from yaml import Loader + +parser = argparse.ArgumentParser(description="Migrate legacy acceptance-test-config.yml to the latest configuration format.") +parser.add_argument("config_path", type=str, help="Path to the acceptance-test-config.yml to migrate.") + + +def get_new_config_format(config_path: Path): + + with open(config_path, "r") as file: + to_migrate = load(file, Loader=Loader) + + if Config.is_legacy(to_migrate): + return Config.migrate_legacy_to_current_config(to_migrate) + else: + logging.warn("The configuration is not in a legacy format.") + return to_migrate + + +def set_high_test_strictness_level(config): + config["test_strictness_level"] = "high" + for basic_read_test in config["acceptance_tests"].get("basic_read", {"tests": []})["tests"]: + basic_read_test.pop("configured_catalog_path", None) + return config + + +def write_new_config(new_config, output_path): + with open(output_path, "w") as output_file: + yaml.dump(new_config, output_file) + logging.info("Saved the configuration in its new format") + + +def migrate_configuration(config_path): + new_config = get_new_config_format(config_path) + new_config = set_high_test_strictness_level(new_config) + write_new_config(new_config, config_path) + logging.info(f"The configuration was successfully migrated to the latest configuration format: {config_path}") + return config_path + + +if __name__ == "__main__": + args = parser.parse_args() + config_path = Path(args.config_path) + migrate_configuration(config_path) diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/create_issues.py b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/create_issues.py new file mode 100644 index 0000000000000..25cd305ba4730 --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/create_issues.py @@ -0,0 +1,82 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import argparse +import json +import logging +import os +import subprocess +import tempfile + +from definitions import GA_DEFINITIONS +from jinja2 import Environment, FileSystemLoader + +TEMPLATES_FOLDER = "./templates/" +COMMON_ISSUE_LABELS = ["area/connectors", "team/connectors-python", "type/enhancement", "test-strictness-level"] +GITHUB_PROJECT_NAME = "SAT-high-test-strictness-level" + +logging.basicConfig(level=logging.DEBUG) +environment = Environment(loader=FileSystemLoader(TEMPLATES_FOLDER)) + +parser = argparse.ArgumentParser(description="Create issues for migration of GA connectors to high test strictness level in SAT") +parser.add_argument("-d", "--dry", default=True) + + +def get_issue_content(source_definition): + issue_title = f"Source {source_definition['name']}: enable `high` test strictness level in SAT" + + template = environment.get_template("issue.md.j2") + issue_body = template.render(connector_name=source_definition["name"], release_stage=source_definition["releaseStage"]) + file_definition, issue_body_path = tempfile.mkstemp() + + with os.fdopen(file_definition, "w") as tmp: + tmp.write(issue_body) + + return {"title": issue_title, "body_file": issue_body_path, "labels": COMMON_ISSUE_LABELS} + + +def create_issue(source_definition, dry_run=True): + issue_content = get_issue_content(source_definition) + list_command_arguments = ["gh", "issue", "list", "--state", "open", "--search", f"'{issue_content['title']}'", "--json", "url"] + + create_command_arguments = [ + "gh", + "issue", + "create", + "--title", + issue_content["title"], + "--body-file", + issue_content["body_file"], + "--project", + GITHUB_PROJECT_NAME, + ] + for label in issue_content["labels"]: + create_command_arguments += ["--label", label] + + list_existing_issue_process = subprocess.Popen(list_command_arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = list_existing_issue_process.communicate() + existing_issues = json.loads(stdout.decode()) + already_created = len(existing_issues) > 0 + if already_created: + logging.warning(f"An issue was already created for this definition: {existing_issues[0]}") + if not already_created: + if not dry_run: + process = subprocess.Popen(create_command_arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + if stderr: + logging.error(stderr.decode()) + else: + created_issue_url = stdout.decode() + logging.info(f"Created issue for {source_definition['name']}: {created_issue_url}") + else: + logging.info(f"[DRY RUN]: {' '.join(create_command_arguments)}") + os.remove(issue_content["body_file"]) + + +if __name__ == "__main__": + args = parser.parse_args() + dry_run = False if args.dry == "False" or args.dry == "false" else True + for definition in GA_DEFINITIONS: + create_issue(definition, dry_run=dry_run) diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/create_prs.py b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/create_prs.py new file mode 100644 index 0000000000000..86004efb55d3d --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/create_prs.py @@ -0,0 +1,145 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import argparse +import json +import logging +import os +import subprocess +import tempfile +from pathlib import Path + +from config_migration import migrate_configuration +from create_issues import COMMON_ISSUE_LABELS as COMMON_PR_LABELS +from create_issues import GITHUB_PROJECT_NAME +from definitions import GA_DEFINITIONS +from git import Repo +from jinja2 import Environment, FileSystemLoader + +CONNECTORS_DIRECTORY = "../../../../connectors" +REPO_ROOT = "../../../../../" +AIRBYTE_REPO = Repo(REPO_ROOT) +environment = Environment(loader=FileSystemLoader("./templates/")) +PR_TEMPLATE = environment.get_template("pr.md.j2") + +parser = argparse.ArgumentParser(description="Create PRs for migration of GA connectors to high test strictness level in SAT") +parser.add_argument("-d", "--dry", default=True) + + +logging.basicConfig(level=logging.DEBUG) + + +def migrate_acceptance_test_config(connector_name): + acceptance_test_config_path = Path(CONNECTORS_DIRECTORY) / connector_name / "acceptance-test-config.yml" + return migrate_configuration(acceptance_test_config_path) + + +def checkout_new_branch(connector_name): + AIRBYTE_REPO.heads.master.checkout() + new_branch_name = f"{connector_name}/sat/migrate-to-high-test-strictness-level" + new_branch = AIRBYTE_REPO.create_head(new_branch_name) + new_branch.checkout() + return new_branch + + +def commit_push_migrated_config(config_path, connector_name, new_branch, dry_run): + process = subprocess.Popen(["pre-commit", "run", "--files", config_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + process.communicate() + relative_config_path = f"airbyte-integrations/connectors/{connector_name}/acceptance-test-config.yml" + AIRBYTE_REPO.git.add(relative_config_path) + AIRBYTE_REPO.git.commit(m=f"Migrated config for {connector_name}") + logging.info(f"Committed migrated config on {new_branch}") + if not dry_run: + AIRBYTE_REPO.git.push("--set-upstream", "origin", new_branch) + logging.info(f"Pushed branch {new_branch} to origin") + + +def get_pr_content(definition): + pr_title = f"Source {definition['name']}: enable `high` test strictness level in SAT" + + pr_body = PR_TEMPLATE.render(connector_name=definition["name"], release_stage=definition["releaseStage"]) + file_definition, pr_body_path = tempfile.mkstemp() + + with os.fdopen(file_definition, "w") as tmp: + tmp.write(pr_body) + + return {"title": pr_title, "body_file": pr_body_path, "labels": COMMON_PR_LABELS} + + +def open_pr(definition, new_branch, dry_run): + pr_content = get_pr_content(definition) + list_command_arguments = ["gh", "pr", "list", "--state", "open", "--head", new_branch.name, "--json", "url"] + create_command_arguments = [ + "gh", + "pr", + "create", + "--draft", + "--title", + pr_content["title"], + "--body-file", + pr_content["body_file"], + "--project", + GITHUB_PROJECT_NAME, + ] + for label in pr_content["labels"]: + create_command_arguments += ["--label", label] + list_existing_pr_process = subprocess.Popen(list_command_arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = list_existing_pr_process.communicate() + existing_prs = json.loads(stdout.decode()) + already_created = len(existing_prs) > 0 + if already_created: + logging.warning(f"A PR was already created for this definition: {existing_prs[0]}") + if not already_created: + if not dry_run: + process = subprocess.Popen(create_command_arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + if stderr: + logging.error(stderr.decode()) + else: + created_pr_url = stdout.decode() + logging.info(f"Created PR for {definition['name']}: {created_pr_url}") + else: + logging.info(f"[DRY RUN]: {' '.join(create_command_arguments)}") + os.remove(pr_content["body_file"]) + + +def add_test_comment(definition, new_branch, dry_run): + connector_name = definition["dockerRepository"].replace("airbyte/", "") + comment = f"/test connector=connectors/{connector_name}" + comment_command_arguments = ["gh", "pr", "comment", new_branch.name, "--body", comment] + if not dry_run: + process = subprocess.Popen(comment_command_arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + _, stderr = process.communicate() + if stderr: + logging.error(stderr.decode()) + else: + logging.info("Added test comment") + else: + logging.info(f"[DRY RUN]: {' '.join(comment_command_arguments)}") + + +def migrate_config_on_new_branch(definition, dry_run): + AIRBYTE_REPO.heads.master.checkout() + connector_name = definition["dockerRepository"].replace("airbyte/", "") + new_branch = checkout_new_branch(connector_name) + config_path = migrate_acceptance_test_config(connector_name) + commit_push_migrated_config(config_path, connector_name, new_branch, dry_run) + return new_branch + + +def migrate_definition_and_open_pr(definition, dry_run): + original_branch = AIRBYTE_REPO.active_branch + new_branch = migrate_config_on_new_branch(definition, dry_run) + open_pr(definition, new_branch, dry_run) + add_test_comment(definition, new_branch, dry_run) + original_branch.checkout() + AIRBYTE_REPO.git.branch(D=new_branch) + logging.info(f"Deleted branch {new_branch}") + + +if __name__ == "__main__": + args = parser.parse_args() + dry_run = False if args.dry == "False" or args.dry == "false" else True + for definition in GA_DEFINITIONS[:1]: + migrate_definition_and_open_pr(definition, dry_run=dry_run) diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/definitions.py b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/definitions.py new file mode 100644 index 0000000000000..1a7f091f3cea1 --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/definitions.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import yaml + +SOURCE_DEFINITIONS_FILE_PATH = "../../../../../airbyte-config/init/src/main/resources/seed/source_definitions.yaml" + + +def read_source_definitions(): + with open(SOURCE_DEFINITIONS_FILE_PATH, "r") as source_definitions_file: + return yaml.safe_load(source_definitions_file) + + +def find_by_release_stage(source_definitions, release_stage): + return [definition for definition in source_definitions if definition.get("releaseStage") == release_stage] + + +ALL_DEFINITIONS = read_source_definitions() +GA_DEFINITIONS = find_by_release_stage(ALL_DEFINITIONS, "generally_available") diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/requirements.txt b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/requirements.txt new file mode 100644 index 0000000000000..b30299006be8a --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/requirements.txt @@ -0,0 +1,64 @@ +airbyte-cdk==0.7.0 +appdirs==1.4.4 +attrs==22.1.0 +backoff==2.2.1 +cattrs==22.2.0 +certifi==2022.9.24 +charset-normalizer==2.1.1 +coverage==6.5.0 +dataclasses-jsonschema==2.15.1 +deepdiff==5.8.1 +Deprecated==1.2.13 +docker==5.0.3 +dpath==2.0.6 +exceptiongroup==1.0.1 +fancycompleter==0.9.1 +gitdb==4.0.9 +GitPython==3.1.29 +hypothesis==6.54.6 +hypothesis-jsonschema==0.20.1 +icdiff==1.9.1 +idna==3.4 +inflection==0.5.1 +iniconfig==1.1.1 +Jinja2==3.1.2 +jsonref==0.2 +jsonschema==3.2.0 +MarkupSafe==2.1.1 +ordered-set==4.1.0 +packaging==21.3 +pdbpp==0.10.3 +pendulum==2.1.2 +pluggy==1.0.0 +pprintpp==0.4.0 +py==1.11.0 +pyaml==21.10.1 +pydantic==1.9.2 +Pygments==2.13.0 +pyparsing==3.0.9 +pyrepl==0.9.0 +pyrsistent==0.19.2 +pytest==6.2.5 +pytest-cov==3.0.0 +pytest-mock==3.6.1 +pytest-sugar==0.9.6 +pytest-timeout==1.4.2 +python-dateutil==2.8.2 +pytzdata==2020.1 +PyYAML==5.4.1 +requests==2.28.1 +requests-cache==0.9.7 +requests-mock==1.9.3 +six==1.16.0 +smmap==5.0.0 +sortedcontainers==2.4.0 +-e git+ssh://git@github.com/airbytehq/airbyte.git@70679775b55c5bb1be7384114155924772885be0#egg=source_acceptance_test&subdirectory=airbyte-integrations/bases/source-acceptance-test +termcolor==2.1.0 +toml==0.10.2 +tomli==2.0.1 +typing_extensions==4.4.0 +url-normalize==1.4.3 +urllib3==1.26.12 +websocket-client==1.4.2 +wmctrl==0.4 +wrapt==1.14.1 diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/templates/issue.md.j2 b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/templates/issue.md.j2 new file mode 100644 index 0000000000000..8f27348d70dd2 --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/templates/issue.md.j2 @@ -0,0 +1,15 @@ +## What +A `test_strictness_level` field was introduced to Source Acceptance Tests (SAT). +{{ connector_name }} is a {{ release_stage }} connector, we want it to have a `high` test strictness level. + +**This will help**: +- maximize the SAT coverage on this connector. +- document its potential weaknesses in term of test coverage. + +## How +1. Migrate the existing `acceptance-test-config.yml` file to the latest configuration format. (See instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/bases/source-acceptance-test/README.md#L61)) +2. Enable `high` test strictness level in `acceptance-test-config.yml`. (See instructions [here](https://github.com/airbytehq/airbyte/blob/master/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md#L240)) +3. Commit changes on `acceptance-test-config.yml` and open a PR. +4. Run SAT with the `/test` command on the branch. +5. If tests are failing please fix the failing test or use `bypass_reason` fields to explain why a specific test can't be run. + diff --git a/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/templates/pr.md.j2 b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/templates/pr.md.j2 new file mode 100644 index 0000000000000..7b8c52abe710e --- /dev/null +++ b/airbyte-integrations/bases/source-acceptance-test/tools/strictness_level_migration/templates/pr.md.j2 @@ -0,0 +1,22 @@ +## What +A `test_strictness_level` field was introduced to Source Acceptance Tests (SAT). +{{ connector_name }} is a {{ release_stage }} connector, we want it to have a `high` test strictness level. + +**This will help**: +- maximize the SAT coverage on this connector. +- document its potential weaknesses in term of test coverage. + +## How +1. Migrate the existing `acceptance-test-config.yml` file to the latest configuration format. (See instructions [here](https://github.com/airbytehq/airbyte/blob/master/airbyte-integrations/bases/source-acceptance-test/README.md#L61)) +2. Enable `high` test strictness level in `acceptance-test-config.yml`. (See instructions [here](https://github.com/airbytehq/airbyte/blob/master/docs/connector-development/testing-connectors/source-acceptance-tests-reference.md#L240)) + +⚠️ ⚠️ ⚠️ +**If tests are failing please fix the failing test by changing the `acceptance-test-config.yml` file or use `bypass_reason` fields to explain why a specific test can't be run.** + +Please open a new PR if the new enabled tests help discover a new bug. +Once this bug fix is merged please rebase this branch and run `/test` again. + +You can find more details about the rules enforced by `high` test strictness level [here](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference/). + +## Review process +Please ask the `connector-operations` teams for review. \ No newline at end of file diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_incremental.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_incremental.py index fe061812b4090..367a74c5ecb3a 100644 --- a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_incremental.py +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_incremental.py @@ -2,8 +2,10 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import json from contextlib import nullcontext as does_not_raise from datetime import datetime +from pathlib import Path from typing import Any, Optional from unittest.mock import MagicMock, patch @@ -24,9 +26,14 @@ SyncMode, Type, ) -from source_acceptance_test.config import IncrementalConfig +from source_acceptance_test.config import Config, EmptyStreamConfiguration, IncrementalConfig +from source_acceptance_test.tests import test_incremental from source_acceptance_test.tests.test_incremental import TestIncremental as _TestIncremental -from source_acceptance_test.tests.test_incremental import compare_cursor_with_threshold +from source_acceptance_test.tests.test_incremental import ( + compare_cursor_with_threshold, + future_state_configuration_fixture, + future_state_fixture, +) def build_messages_from_record_data(stream: str, records: list[dict]) -> list[AirbyteMessage]: @@ -681,3 +688,166 @@ def test_state_with_abnormally_large_values(mocker, read_output, expectation): future_state=mocker.MagicMock(), docker_runner=docker_runner_mock, ) + + +@pytest.mark.parametrize( + "test_strictness_level, inputs, expect_fail, expect_skip", + [ + pytest.param( + Config.TestStrictnessLevel.high, + MagicMock(future_state=MagicMock(future_state_path="my_future_state_path", missing_streams=["foo", "bar"])), + False, + False, + id="high test strictness level, future_state_path and missing streams are defined: run the test.", + ), + pytest.param( + Config.TestStrictnessLevel.low, + MagicMock(future_state=MagicMock(future_state_path="my_future_state_path", missing_streams=["foo", "bar"])), + False, + False, + id="low test strictness level, future_state_path and missing_streams are defined: run the test.", + ), + pytest.param( + Config.TestStrictnessLevel.high, + MagicMock(future_state=MagicMock(future_state_path=None)), + True, + False, + id="high test strictness level, future_state_path and missing streams are defined: fail the test.", + ), + pytest.param( + Config.TestStrictnessLevel.low, + MagicMock(future_state=MagicMock(future_state_path=None)), + False, + True, + id="low test strictness level, future_state_path not defined: skip the test.", + ), + ], +) +def test_future_state_configuration_fixture(mocker, test_strictness_level, inputs, expect_fail, expect_skip): + mocker.patch.object(test_incremental.pytest, "fail") + mocker.patch.object(test_incremental.pytest, "skip") + output = future_state_configuration_fixture.__wrapped__(inputs, "base_path", test_strictness_level) + if not expect_fail and not expect_skip: + assert output == (Path("base_path/my_future_state_path"), ["foo", "bar"]) + if expect_fail: + test_incremental.pytest.fail.assert_called_once() + test_incremental.pytest.skip.assert_not_called() + if expect_skip: + test_incremental.pytest.skip.assert_called_once() + test_incremental.pytest.fail.assert_not_called() + + +TEST_AIRBYTE_STREAM_A = AirbyteStream(name="test_stream_a", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh]) +TEST_AIRBYTE_STREAM_B = AirbyteStream(name="test_stream_b", json_schema={"k": "v"}, supported_sync_modes=[SyncMode.full_refresh]) + +TEST_CONFIGURED_AIRBYTE_STREAM_A = ConfiguredAirbyteStream( + stream=TEST_AIRBYTE_STREAM_A, + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, +) + +TEST_CONFIGURED_AIRBYTE_STREAM_B = ConfiguredAirbyteStream( + stream=TEST_AIRBYTE_STREAM_B, + sync_mode=SyncMode.full_refresh, + destination_sync_mode=DestinationSyncMode.overwrite, +) + + +TEST_CONFIGURED_CATALOG = ConfiguredAirbyteCatalog(streams=[TEST_CONFIGURED_AIRBYTE_STREAM_A, TEST_CONFIGURED_AIRBYTE_STREAM_B]) + + +@pytest.mark.parametrize( + "test_strictness_level, configured_catalog, states, missing_streams, expect_fail", + [ + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + [ + { + "type": "STREAM", + "stream": { + "stream_state": {"airbytehq/integration-test": {"updated_at": "2121-06-30T10:22:10Z"}}, + "stream_descriptor": {"name": "test_stream_a"}, + }, + } + ], + [EmptyStreamConfiguration(name="test_stream_b", bypass_reason="no good reason")], + False, + id="High test strictness level, all missing streams are declared with bypass reason: does not fail.", + ), + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + [ + { + "type": "STREAM", + "stream": { + "stream_state": {"airbytehq/integration-test": {"updated_at": "2121-06-30T10:22:10Z"}}, + "stream_descriptor": {"name": "test_stream_a"}, + }, + } + ], + [EmptyStreamConfiguration(name="test_stream_b")], + True, + id="High test strictness level, missing streams are declared without bypass reason: fail.", + ), + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + [ + { + "type": "STREAM", + "stream": { + "stream_state": {"airbytehq/integration-test": {"updated_at": "2121-06-30T10:22:10Z"}}, + "stream_descriptor": {"name": "test_stream_a"}, + }, + } + ], + [EmptyStreamConfiguration(name="test_stream_b")], + False, + id="Low test strictness level, missing streams are declared without bypass reason: does fail.", + ), + pytest.param( + Config.TestStrictnessLevel.high, + TEST_CONFIGURED_CATALOG, + [ + { + "type": "STREAM", + "stream": { + "stream_state": {"airbytehq/integration-test": {"updated_at": "2121-06-30T10:22:10Z"}}, + "stream_descriptor": {"name": "test_stream_a"}, + }, + } + ], + [], + True, + id="High test strictness level, missing streams are not declared: fail.", + ), + pytest.param( + Config.TestStrictnessLevel.low, + TEST_CONFIGURED_CATALOG, + [ + { + "type": "STREAM", + "stream": { + "stream_state": {"airbytehq/integration-test": {"updated_at": "2121-06-30T10:22:10Z"}}, + "stream_descriptor": {"name": "test_stream_a"}, + }, + } + ], + [], + False, + id="Low test strictness level, missing streams are not declared: does not fail.", + ), + ], +) +def test_future_state_fixture(tmp_path, mocker, test_strictness_level, configured_catalog, states, missing_streams, expect_fail): + mocker.patch.object(test_incremental.pytest, "fail") + future_state_path = tmp_path / "abnormal_states.json" + with open(future_state_path, "w") as f: + json.dump(states, f) + future_state_configuration = (future_state_path, missing_streams) + output = future_state_fixture.__wrapped__(future_state_configuration, test_strictness_level, configured_catalog) + assert output == states + if expect_fail: + test_incremental.pytest.fail.assert_called_once() diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_json_schema_helper.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_json_schema_helper.py index e4e789e174c8b..82117f8260f5d 100644 --- a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_json_schema_helper.py +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_json_schema_helper.py @@ -3,7 +3,7 @@ # from enum import Enum -from typing import Union +from typing import Any, List, Text, Union import pendulum import pytest @@ -208,3 +208,52 @@ def test_get_object_strucutre(object, pathes): ) def test_get_expected_schema_structure(schema, pathes): assert get_expected_schema_structure(schema) == pathes + + +@pytest.mark.parametrize( + "keys, num_paths, last_value", + [ + (["description"], 1, "Tests that keys can be found inside lists of dicts"), + (["option1"], 2, {"a_key": "a_value"}), + (["option2"], 1, ["value1", "value2"]), + (["nonexistent_key"], 0, None), + (["option1", "option2"], 3, ["value1", "value2"]) + ], +) +def test_find_and_get_nodes(keys: List[Text], num_paths: int, last_value: Any): + schema = { + "title": "Key_inside_oneOf", + "description": "Tests that keys can be found inside lists of dicts", + "type": "object", + "properties": { + "credentials": { + "type": "object", + "oneOf": [ + { + "type": "object", + "properties": { + "common": {"type": "string", "const": "option1", "default": "option1"}, + "option1": {"type": "string"}, + }, + }, + { + "type": "object", + "properties": { + "common": {"type": "string", "const": "option2", "default": "option2"}, + "option1": {"a_key": "a_value"}, + "option2": ["value1", "value2"], + }, + }, + ], + } + }, + } + schema_helper = JsonSchemaHelper(schema) + variant_paths = schema_helper.find_nodes(keys=keys) + assert len(variant_paths) == num_paths + + if variant_paths: + values_at_nodes = [] + for path in variant_paths: + values_at_nodes.append(schema_helper.get_node(path)) + assert last_value in values_at_nodes diff --git a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_spec.py b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_spec.py index 3a09ce07f887f..327876fec4c76 100644 --- a/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_spec.py +++ b/airbyte-integrations/bases/source-acceptance-test/unit_tests/test_spec.py @@ -6,6 +6,7 @@ import pytest from airbyte_cdk.models import ConnectorSpecification +from source_acceptance_test import conftest from source_acceptance_test.tests.test_core import TestSpec as _TestSpec from .conftest import does_not_raise @@ -351,6 +352,49 @@ def test_oneof_usage(connector_spec, should_fail): t.test_oneof_usage(actual_connector_spec=ConnectorSpecification(connectionSpecification=connector_spec)) +@parametrize_test_case( + { + "test_id": "successful", + "connector_spec": { + "type": "object", + "properties": { + "property_with_options": { + "title": "Property with options", + "description": "A property in the form of an enumerated list", + "type": "string", + "default": "Option 1", + "enum": ["Option 1", "Option 2", "Option 3"], + } + }, + }, + "should_fail": False, + }, + { + "test_id": "duplicate_values", + "connector_spec": { + "type": "object", + "properties": { + "property_with_options": { + "title": "Property with options", + "description": "A property in the form of an enumerated list", + "type": "string", + "default": "Option 1", + "enum": ["Option 1", "Option 2", "Option 3", "Option 2"], + } + }, + }, + "should_fail": True, + }, +) +def test_enum_usage(connector_spec, should_fail): + t = _TestSpec() + if should_fail is True: + with pytest.raises(AssertionError): + t.test_enum_usage(actual_connector_spec=ConnectorSpecification(connectionSpecification=connector_spec)) + else: + t.test_enum_usage(actual_connector_spec=ConnectorSpecification(connectionSpecification=connector_spec)) + + @pytest.mark.parametrize( "connector_spec, expected_error", [ @@ -605,3 +649,141 @@ def test_additional_properties_is_true(connector_spec, expectation): t = _TestSpec() with expectation: t.test_additional_properties_is_true(connector_spec) + + +@pytest.mark.parametrize( + "connector_spec, should_fail, is_warning_logged", + ( + ( + { + "connectionSpecification": {"type": "object", "properties": {"api_token": {"type": "string", "airbyte_secret": True}}} + }, + False, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"api_token": {"type": "null"}}} + }, + False, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"refresh_token": {"type": "boolean", "airbyte_secret": True}}} + }, + False, + True + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"jwt": {"type": "object"}}} + }, + True, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"refresh_token": {"type": ["null", "string"]}}} + }, + True, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"credentials": {"type": "array"}}} + }, + True, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"credentials": {"type": "array", "items": {"type": "string"}}}} + }, + True, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"auth": {"oneOf": [{"api_token": {"type": "string"}}]}}} + }, + True, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"credentials": {"oneOf": [{"type": "object", "properties": {"api_key": {"type": "string"}}}]}}} + }, + True, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"start_date": {"type": ["null", "string"]}}} + }, + False, + False + ), + ( + { + "connectionSpecification": {"type": "object", "properties": {"credentials": {"oneOf": [{"type": "string", "const": "OAuth2.0"}]}}} + }, + False, + False + ) + ), +) +def test_airbyte_secret(mocker, connector_spec, should_fail, is_warning_logged): + mocker.patch.object(conftest.pytest, "fail") + t = _TestSpec() + logger = mocker.Mock() + t.test_secret_is_properly_marked(connector_spec, logger, ("api_key", "api_token", "refresh_token", "jwt", "credentials")) + if should_fail: + conftest.pytest.fail.assert_called_once() + else: + conftest.pytest.fail.assert_not_called() + if is_warning_logged: + _, args, _ = logger.warning.mock_calls[0] + msg, *_ = args + assert "Some properties are marked with `airbyte_secret` although they probably should not be" in msg + else: + logger.warning.assert_not_called() + + +@pytest.mark.parametrize( + "path, expected_name, expected_result", + ( + ("properties/api_key/type", "api_key", True), + ("properties/start_date/type", "start_date", False), + ("properties/credentials/oneOf/1/properties/api_token/type", "api_token", True), + ("properties/type", None, False), # root element + ("properties/accounts/items/2/properties/jwt/type", "jwt", True) + ) +) +def test_is_spec_property_name_secret(path, expected_name, expected_result): + t = _TestSpec() + assert t._is_spec_property_name_secret(path, ("api_key", "api_token", "refresh_token", "jwt", "credentials")) == (expected_name, expected_result) + + +@pytest.mark.parametrize( + "property_def, can_store_secret", + ( + ({"type": "boolean"}, False), + ({"type": "null"}, False), + ({"type": "string"}, True), + ({"type": "integer"}, True), + ({"type": "number"}, True), + ({"type": ["null", "string"]}, True), + ({"type": ["null", "boolean"]}, False), + ({"type": "object"}, True), + # the object itself cannot hold a secret but the inner items can and will be processed separately + ({"type": "object", "properties": {"api_key": {}}}, False), + ({"type": "array"}, True), + # same as object + ({"type": "array", "items": {"type": "string"}}, False), + ({"type": "string", "const": "OAuth2.0"}, False) + ) +) +def test_property_can_store_secret(property_def, can_store_secret): + t = _TestSpec() + assert t._property_can_store_secret(property_def) is can_store_secret diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index 4ded507367282..6f77fee24a0c9 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -22,11 +22,13 @@ | BigCommerce | [![source-bigcommerce](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-bigcommerce%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-bigcommerce) | | BigQuery | [![source-bigquery](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-bigquery%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-bigquery/) | | Bing Ads | [![source-bing-ads](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-bing-ads%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-bing-ads) | +| Breezometer | [![source-breezometer](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-breezometer%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-breezometer) | | Chargebee | [![source-chargebee](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-chargebee%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-chargebee/) | | Chargify | [![source-chargify](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-chargify%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-chargify/) | | Chartmogul | [![source-chartmogul](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-chartmogul%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-chartmogul/) | | Cart.com | [![source-cart](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-cart%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-cart/) | | Close.com | [![source-close-com](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-close-com%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-close-com/) | +| Convex | [![source-convex](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-convex%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-convex/) | | Delighted | [![source-delighted](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-delighted%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-delighted) | | Dixa | [![source-dixa](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-dixa%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-dixa) | | Dockerhub | [![source-dockerhub](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-dockerhub%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-dockerhub) | @@ -34,6 +36,7 @@ | End-to-End Testing | [![source-e2e-test](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-e2e-test%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-e2e-test) | | Exchange Rates API | [![source-exchange-rates](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-exchange-rates%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-exchange-rates) | | Facebook Marketing | [![source-facebook-marketing](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-facebook-marketing%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-facebook-marketing) | +| Fastbill | [![source-fastbill](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-fastbill%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-fastbill) | | Fauna | [![source-fauna](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-fauna%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-fauna) | | Files | [![source-file](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-file%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-file) | | Flexport | [![source-file](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-flexport%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-flexport) | @@ -42,6 +45,7 @@ | Freshservice | [![source-service](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-freshservice%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-freshservice) | | GitHub | [![source-github](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-github%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-github) | | GitLab | [![source-gitlab](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-gitlab%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-gitlab) | +| Gong | [![source-gong](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-gong%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-gong) | | Google Ads | [![source-google-ads](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-google-ads%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-google-ads) | | Google Analytics v4 | [![source-google-analytics-v4](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-google-analytics-v4%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-google-analytics-v4) | | Google Search Console | [![source-google-search-console](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-google-search-console%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-google-search-console) | @@ -83,6 +87,7 @@ | OpenWeather | [![source-openweather](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-openweather%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-openweather) | | Oracle DB | [![source-oracle](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-oracle%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-oracle) | | Orbit | [![source-orbit](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-orbit%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-orbit) | +| Partnerstack | [![source-partnerstack](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-partnerstack%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-partnerstack) | | Paypal Transaction | [![paypal-transaction](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-paypal-transaction%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-paypal-transaction) | | Paystack | [![source-paystack](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-paystack%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-paystack) | | PersistIq | [![source-persistiq](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-persistiq%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-persistiq/) | @@ -93,6 +98,7 @@ | Posthog | [![source-posthog](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-posthog%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-posthog) | | PrestaShop | [![source-prestashop](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-prestashop%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-prestashop) | | Primetric | [![source-primetric](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-primetric%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-primetric) | +| PyPI | [![source-public-apis](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fpypi%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-pypi) | | Public APIs | [![source-public-apis](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-public-apis%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-public-apis) | | CockroachDb | [![source-cockroachdb](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-cockroachdb%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-cockroachdb) | | Confluence | [![source-confluence](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-confluence%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-confluence) | @@ -102,6 +108,8 @@ | Recharge | [![source-recharge](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-recharge%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-recharge) | | Recurly | [![source-recurly](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-recurly%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-recurly) | | Redshift | [![source-redshift](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-redshift%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-redshift) | +| Reply.io | [![source-reply-io](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-reply-io%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-reply-io) | +| RSS | [![source-rss](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-rss%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-rss) | | S3 | [![source-s3](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-s3%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-s3) | | Salesforce | [![source-salesforce](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-salesforce%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-salesforce) | | Salesloft | [![source-salesloft](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-salesloft%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-salesloft) | @@ -117,16 +125,22 @@ | Strava | [![source-stava](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-strava%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-strava) | | Stripe | [![source-stripe](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-stripe%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-stripe) | | Tempo | [![source-tempo](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-tempo%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-tempo) | +| The Guardian API | [![source-the-guardian-api](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-the-guardian-api%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-the-guardian-api) | | TikTok Marketing | [![source-tiktok-marketing](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-tiktok-marketing%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-tiktok-marketing) | | Trello | [![source-trello](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-trello%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-trello) | | Twilio | [![source-twilio](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-twilio%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-twilio) | | Typeform | [![source-typeform](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-typeform%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-typeform) | | US Census | [![source-us-census](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-us-census%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-us-census) | +| Vitally | [![source-vitally](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-vitally%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-vitally) | +| Visma e-conomics| [![source-visma-economic](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-visma-economic%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-us-census) | | Waiteraid | [![source-waiteraid]()](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-waiteraid%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-waiteraid) | | Whisky Hunter | [![source-whisky-hunter](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-whisky-hunter%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-whisky-hunter) | +| Workramp | [![source-workramp]()](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-workramp%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-workramp) | | Wrike | [![source-wrike](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-wrike%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-wrike) | | YouTube Analytics | [![source-youtube-analytics](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-youtube-analytics%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-youtube-analytics) | +| Weatherstack | [![source-weatherstack](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-weatherstack%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-weatherstack) | | Xkcd | [![source-xkcd](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-xkcd%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-xkcd) | +| Zapier Supported Storage | [![source-zapier-supported-storage](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-zapier-supported-storage%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-zapier-supported-storage) | | Zendesk Chat | [![source-zendesk-chat](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-zendesk-chat%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-zendesk-chat) | | Zendesk Support | [![source-zendesk-support](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-zendesk-support%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-zendesk-support) | | Zendesk Talk | [![source-zendesk-talk](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fsource-zendesk-talk%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/source-zendesk-talk) | @@ -167,4 +181,4 @@ | S3 | [![destination-s3](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-s3%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-s3) | | Scylla | [![destination-scylla](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-s3%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-scylla) | | SFTP-JSON | [![destination-sftp-json](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-sftp-json%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-sftp-json) | -| Snowflake | [![destination-snowflake](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-snowflake%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-snowflake) |\ +| Snowflake | [![destination-snowflake](https://img.shields.io/endpoint?url=https%3A%2F%2Fdnsgjos7lj2fu.cloudfront.net%2Ftests%2Fsummary%2Fdestination-snowflake%2Fbadge.json)](https://dnsgjos7lj2fu.cloudfront.net/tests/summary/destination-snowflake) |\ \ No newline at end of file diff --git a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/spec.yaml.hbs b/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/spec.yaml.hbs deleted file mode 100644 index 1c65d8c8502bc..0000000000000 --- a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/spec.yaml.hbs +++ /dev/null @@ -1,13 +0,0 @@ -documentationUrl: https://docsurl.com -connectionSpecification: - $schema: http://json-schema.org/draft-07/schema# - title: {{capitalCase name}} Spec - type: object - required: - - api_key - additionalProperties: true - properties: - # 'TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.': - api_key: - type: string - description: API Key diff --git a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/{{snakeCase name}}.yaml.hbs b/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/{{snakeCase name}}.yaml.hbs index 2f75e22bbbd1d..c9a620037322e 100644 --- a/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/{{snakeCase name}}.yaml.hbs +++ b/airbyte-integrations/connector-templates/source-configuration-based/source_{{snakeCase name}}/{{snakeCase name}}.yaml.hbs @@ -33,3 +33,17 @@ streams: check: stream_names: - "customers" + +spec: + documentation_url: https://docsurl.com + connection_specification: + title: {{capitalCase name}} Spec + type: object + required: + - api_key + additionalProperties: true + properties: + # 'TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.': + api_key: + type: string + description: API Key diff --git a/airbyte-integrations/connector-templates/source_acceptance_test_files/acceptance-test-docker.sh b/airbyte-integrations/connector-templates/source_acceptance_test_files/acceptance-test-docker.sh old mode 100644 new mode 100755 diff --git a/airbyte-integrations/connectors/destination-azure-blob-storage/src/main/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageConsumer.java b/airbyte-integrations/connectors/destination-azure-blob-storage/src/main/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageConsumer.java index 3305d960cc8cd..f93b3fa4079a0 100644 --- a/airbyte-integrations/connectors/destination-azure-blob-storage/src/main/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageConsumer.java +++ b/airbyte-integrations/connectors/destination-azure-blob-storage/src/main/java/io/airbyte/integrations/destination/azure_blob_storage/AzureBlobStorageConsumer.java @@ -9,7 +9,6 @@ import com.azure.storage.blob.specialized.SpecializedBlobClientBuilder; import com.azure.storage.common.StorageSharedKeyCredential; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.destination.azure_blob_storage.writer.AzureBlobStorageWriter; import io.airbyte.integrations.destination.azure_blob_storage.writer.AzureBlobStorageWriterFactory; @@ -17,6 +16,7 @@ import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.DestinationSyncMode; @@ -85,7 +85,7 @@ protected void startTracked() throws Exception { final AirbyteStream stream = configuredStream.getStream(); final AirbyteStreamNameNamespacePair streamNamePair = AirbyteStreamNameNamespacePair - .fromAirbyteSteam(stream); + .fromAirbyteStream(stream); streamNameAndNamespaceToWriters.put(streamNamePair, writer); } } diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile index d50b200be66eb..0ad3bb307bcfa 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.2.5 +LABEL io.airbyte.version=1.2.7 LABEL io.airbyte.name=airbyte/destination-bigquery-denormalized diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestination.java b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestination.java index 7f0b45eb9e0c3..37f181f578e38 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestination.java +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestination.java @@ -9,7 +9,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.cloud.bigquery.Field; import com.google.cloud.bigquery.Table; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.destination.bigquery.formatter.BigQueryRecordFormatter; @@ -22,6 +21,7 @@ import io.airbyte.integrations.destination.bigquery.uploader.config.UploaderConfig; import io.airbyte.integrations.destination.s3.avro.JsonToAvroSchemaConverter; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.io.IOException; import java.util.Map; import java.util.function.BiFunction; @@ -106,7 +106,7 @@ protected void putStreamIntoUploaderMap(AirbyteStream stream, AbstractBigQueryUploader uploader = BigQueryUploaderFactory.getUploader(uploaderConfig); uploaderMap.put( - AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream), + AirbyteStreamNameNamespacePair.fromAirbyteStream(stream), uploader); } diff --git a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationTest.java b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationTest.java index 5b36c7bc1b319..964bff57cb9fa 100644 --- a/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationTest.java +++ b/airbyte-integrations/connectors/destination-bigquery-denormalized/src/test/java/io/airbyte/integrations/destination/bigquery/BigQueryDenormalizedDestinationTest.java @@ -28,7 +28,6 @@ import com.google.cloud.bigquery.StandardSQLTypeName; import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.TableDefinition; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.bigquery.formatter.BigQueryRecordFormatter; import io.airbyte.integrations.destination.bigquery.formatter.DefaultBigQueryDenormalizedRecordFormatter; import io.airbyte.integrations.destination.bigquery.formatter.GcsBigQueryDenormalizedRecordFormatter; @@ -39,6 +38,7 @@ import io.airbyte.integrations.destination.bigquery.uploader.UploaderType; import io.airbyte.integrations.destination.bigquery.uploader.config.UploaderConfig; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import java.io.IOException; import java.util.HashMap; diff --git a/airbyte-integrations/connectors/destination-bigquery/Dockerfile b/airbyte-integrations/connectors/destination-bigquery/Dockerfile index d558e2a10c538..fccbfd8aae01c 100644 --- a/airbyte-integrations/connectors/destination-bigquery/Dockerfile +++ b/airbyte-integrations/connectors/destination-bigquery/Dockerfile @@ -17,5 +17,5 @@ ENV ENABLE_SENTRY true COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.2.5 +LABEL io.airbyte.version=1.2.7 LABEL io.airbyte.name=airbyte/destination-bigquery diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryAvroSerializedBuffer.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryAvroSerializedBuffer.java index 817b40c2768df..7841ffe2fa238 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryAvroSerializedBuffer.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryAvroSerializedBuffer.java @@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.functional.CheckedBiFunction; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.bigquery.formatter.BigQueryRecordFormatter; import io.airbyte.integrations.destination.record_buffer.BufferStorage; import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; @@ -14,6 +13,7 @@ import io.airbyte.integrations.destination.s3.avro.S3AvroFormatConfig; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.io.IOException; import java.util.concurrent.Callable; diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java index d2bdc252185c3..0e20d754eb6e2 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryDestination.java @@ -9,16 +9,17 @@ import com.google.auth.oauth2.GoogleCredentials; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Dataset; import com.google.cloud.bigquery.Job; import com.google.cloud.bigquery.QueryJobConfiguration; import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; import com.google.common.base.Charsets; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.functional.CheckedBiFunction; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.BaseConnector; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.destination.StandardNameTransformer; @@ -42,6 +43,7 @@ import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import java.io.ByteArrayInputStream; @@ -86,28 +88,30 @@ public AirbyteConnectionStatus check(final JsonNode config) { final BigQuery bigquery = getBigQuery(config); final UploadingMethod uploadingMethod = BigQueryUtils.getLoadingMethod(config); - BigQueryUtils.createDataset(bigquery, datasetId, datasetLocation); + BigQueryUtils.checkHasCreateAndDeleteDatasetRole(bigquery, datasetId, datasetLocation); + + final Dataset dataset = BigQueryUtils.getOrCreateDataset(bigquery, datasetId, datasetLocation); + if (!dataset.getLocation().equals(datasetLocation)) { + throw new ConfigErrorException("Actual dataset location doesn't match to location from config"); + } final QueryJobConfiguration queryConfig = QueryJobConfiguration .newBuilder(String.format("SELECT * FROM `%s.INFORMATION_SCHEMA.TABLES` LIMIT 1;", datasetId)) .setUseLegacySql(false) .build(); if (UploadingMethod.GCS.equals(uploadingMethod)) { - final AirbyteConnectionStatus airbyteConnectionStatus = checkGcsPermission(config); - if (Status.FAILED == airbyteConnectionStatus.getStatus()) { - return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(airbyteConnectionStatus.getMessage()); - } + checkGcsPermission(config); } final ImmutablePair result = BigQueryUtils.executeQuery(bigquery, queryConfig); if (result.getLeft() != null) { return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); } else { - return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(result.getRight()); + throw new ConfigErrorException(result.getRight()); } } catch (final Exception e) { LOGGER.error("Check failed.", e); - return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(e.getMessage() != null ? e.getMessage() : e.toString()); + throw new ConfigErrorException(e.getMessage() != null ? e.getMessage() : e.toString()); } } @@ -149,11 +153,7 @@ public AirbyteConnectionStatus checkGcsPermission(final JsonNode config) { message.append(" Please make sure the service account can access the bucket path, and the HMAC keys are correct."); LOGGER.error(message.toString(), e); - - return new AirbyteConnectionStatus() - .withStatus(AirbyteConnectionStatus.Status.FAILED) - .withMessage("Could access the GCS bucket with the provided configuration.\n" + e - .getMessage()); + throw new ConfigErrorException("Could not access the GCS bucket with the provided configuration.\n", e); } } @@ -237,7 +237,7 @@ protected void putStreamIntoUploaderMap(final AirbyteStream stream, final Map> uploaderMap) throws IOException { uploaderMap.put( - AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream), + AirbyteStreamNameNamespacePair.fromAirbyteStream(stream), BigQueryUploaderFactory.getUploader(uploaderConfig)); } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsOperations.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsOperations.java index b2bd77a69efe5..480ad21fac4a7 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsOperations.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryGcsOperations.java @@ -84,7 +84,7 @@ public String getStagingFullPath(final String datasetId, final String stream) { public void createSchemaIfNotExists(final String datasetId, final String datasetLocation) { if (!existingSchemas.contains(datasetId)) { LOGGER.info("Creating dataset {}", datasetId); - BigQueryUtils.createDataset(bigQuery, datasetId, datasetLocation); + BigQueryUtils.getOrCreateDataset(bigQuery, datasetId, datasetLocation); existingSchemas.add(datasetId); } } diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumer.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumer.java index 92fd666eb6452..989dc11a8606d 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumer.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumer.java @@ -6,11 +6,11 @@ import io.airbyte.commons.string.Strings; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.destination.bigquery.uploader.AbstractBigQueryUploader; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryStagingConsumerFactory.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryStagingConsumerFactory.java index 3c61de7e35e10..49eff33187a0d 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryStagingConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryStagingConsumerFactory.java @@ -13,13 +13,13 @@ import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.bigquery.formatter.BigQueryRecordFormatter; import io.airbyte.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer; import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; import io.airbyte.integrations.destination.record_buffer.SerializedBufferingStrategy; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.Map; import java.util.function.Consumer; diff --git a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java index 85af12ee3e948..2baaf6b4a9708 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/main/java/io/airbyte/integrations/destination/bigquery/BigQueryUtils.java @@ -64,6 +64,7 @@ public class BigQueryUtils { DateTimeFormatter.ofPattern("[yyyy][yy]['-']['/']['.'][' '][MMM][MM][M]['-']['/']['.'][' '][dd][d]" + "[[' ']['T']HH:mm[':'ss[.][SSSSSS][SSSSS][SSSS][SSS][' '][z][zzz][Z][O][x][XXX][XX][X]]]"); private static final String USER_AGENT_FORMAT = "%s (GPN: Airbyte)"; + private static final String CHECK_TEST_DATASET_SUFFIX = "_airbyte_check_stage_tmp"; public static ImmutablePair executeQuery(final BigQuery bigquery, final QueryJobConfiguration queryConfig) { final JobId jobId = JobId.of(UUID.randomUUID().toString()); @@ -101,18 +102,33 @@ public static void createSchemaAndTableIfNeeded(final BigQuery bigquery, final String datasetLocation, final Schema schema) { if (!existingSchemas.contains(schemaName)) { - createDataset(bigquery, schemaName, datasetLocation); + getOrCreateDataset(bigquery, schemaName, datasetLocation); existingSchemas.add(schemaName); } BigQueryUtils.createPartitionedTable(bigquery, tmpTableId, schema); } - public static void createDataset(final BigQuery bigquery, final String datasetId, final String datasetLocation) { - final Dataset dataset = bigquery.getDataset(datasetId); + public static Dataset getOrCreateDataset(final BigQuery bigquery, final String datasetId, final String datasetLocation) { + Dataset dataset = bigquery.getDataset(datasetId); if (dataset == null || !dataset.exists()) { final DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetId).setLocation(datasetLocation).build(); - bigquery.create(datasetInfo); + dataset = bigquery.create(datasetInfo); } + return dataset; + } + + public static void checkHasCreateAndDeleteDatasetRole(final BigQuery bigquery, final String datasetId, final String datasetLocation) { + final String tmpTestDatasetId = datasetId + CHECK_TEST_DATASET_SUFFIX; + final Dataset dataset = bigquery.getDataset(tmpTestDatasetId); + + // remove possible tmp datasets from previous execution + if (dataset != null && dataset.exists()) { + bigquery.delete(tmpTestDatasetId); + } + + final DatasetInfo datasetInfo = DatasetInfo.newBuilder(tmpTestDatasetId).setLocation(datasetLocation).build(); + bigquery.create(datasetInfo); + bigquery.delete(tmpTestDatasetId); } // https://cloud.google.com/bigquery/docs/creating-partitioned-tables#java diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java index dab6a5fabfd69..f81de9d4fad36 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test-integration/java/io/airbyte/integrations/destination/bigquery/BigQueryDestinationTest.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.destination.bigquery; +import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -26,6 +27,7 @@ import com.google.cloud.bigquery.TableInfo; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; @@ -72,6 +74,8 @@ class BigQueryDestinationTest { protected static final Path CREDENTIALS_PATH = Path.of("secrets/credentials.json"); + protected static final Path CREDENTIALS_WITH_MISSED_CREATE_DATASET_ROLE_PATH = + Path.of("secrets/credentials-with-missed-dataset-creation-role.json"); private static final Logger LOGGER = LoggerFactory.getLogger(BigQueryDestinationTest.class); private static final String DATASET_NAME_PREFIX = "bq_dest_integration_test"; @@ -227,12 +231,49 @@ void testCheckSuccess(final DatasetIdResetter resetDatasetId) { void testCheckFailure(final DatasetIdResetter resetDatasetId) { ((ObjectNode) config).put(BigQueryConsts.CONFIG_PROJECT_ID, "fake"); resetDatasetId.accept(config); - final AirbyteConnectionStatus actual = new BigQueryDestination().check(config); - final String actualMessage = actual.getMessage(); - LOGGER.info("Checking expected failure message:" + actualMessage); - assertTrue(actualMessage.contains("Access Denied:")); - final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(""); - assertEquals(expected, actual.withMessage("")); + + // Assert that check throws exception. Later it will be handled by IntegrationRunner + final ConfigErrorException ex = assertThrows(ConfigErrorException.class, () -> { + new BigQueryDestination().check(config); + }); + + assertThat(ex.getMessage()).contains("Access Denied"); + } + + @ParameterizedTest + @MethodSource("datasetIdResetterProvider") + void testCheckFailureInsufficientPermissionForCreateDataset(final DatasetIdResetter resetDatasetId) throws IOException { + + if (!Files.exists(CREDENTIALS_WITH_MISSED_CREATE_DATASET_ROLE_PATH)) { + throw new IllegalStateException(""" + Json config not found. Must provide path to a big query credentials file, + please add file with creds to + ../destination-bigquery/secrets/credentialsWithMissedDatasetCreationRole.json."""); + } + final String fullConfigAsString = Files.readString( + CREDENTIALS_WITH_MISSED_CREATE_DATASET_ROLE_PATH); + final JsonNode credentialsJson = Jsons.deserialize(fullConfigAsString).get(BigQueryConsts.BIGQUERY_BASIC_CONFIG); + final String projectId = credentialsJson.get(BigQueryConsts.CONFIG_PROJECT_ID).asText(); + final String datasetId = Strings.addRandomSuffix(DATASET_NAME_PREFIX, "_", 8); + + final JsonNode insufficientRoleConfig; + + insufficientRoleConfig = Jsons.jsonNode(ImmutableMap.builder() + .put(BigQueryConsts.CONFIG_PROJECT_ID, projectId) + .put(BigQueryConsts.CONFIG_CREDS, credentialsJson.toString()) + .put(BigQueryConsts.CONFIG_DATASET_ID, datasetId) + .put(BigQueryConsts.CONFIG_DATASET_LOCATION, DATASET_LOCATION) + .put(BIG_QUERY_CLIENT_CHUNK_SIZE, 10) + .build()); + + resetDatasetId.accept(insufficientRoleConfig); + + // Assert that check throws exception. Later it will be handled by IntegrationRunner + final ConfigErrorException ex = assertThrows(ConfigErrorException.class, () -> { + new BigQueryDestination().check(insufficientRoleConfig); + }); + + assertThat(ex.getMessage()).contains("User does not have bigquery.datasets.create permission"); } @ParameterizedTest diff --git a/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumerTest.java b/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumerTest.java index 50fb51e4cf755..819e3d179e68e 100644 --- a/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumerTest.java +++ b/airbyte-integrations/connectors/destination-bigquery/src/test/java/io/airbyte/integrations/destination/bigquery/BigQueryRecordConsumerTest.java @@ -4,11 +4,11 @@ package io.airbyte.integrations.destination.bigquery; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.destination.bigquery.uploader.AbstractBigQueryUploader; import io.airbyte.integrations.standardtest.destination.PerStreamStateMessageTest; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.Map; import java.util.function.Consumer; import org.junit.jupiter.api.extension.ExtendWith; diff --git a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java index 023110d00cab1..c2d138f2539c3 100644 --- a/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java +++ b/airbyte-integrations/connectors/destination-cassandra/src/main/java/io/airbyte/integrations/destination/cassandra/CassandraMessageConsumer.java @@ -5,9 +5,9 @@ package io.airbyte.integrations.destination.cassandra; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.Map; import java.util.function.Consumer; diff --git a/airbyte-integrations/connectors/destination-doris/.dockerignore b/airbyte-integrations/connectors/destination-doris/.dockerignore new file mode 100644 index 0000000000000..65c7d0ad3e73c --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/.dockerignore @@ -0,0 +1,3 @@ +* +!Dockerfile +!build diff --git a/airbyte-integrations/connectors/destination-doris/Dockerfile b/airbyte-integrations/connectors/destination-doris/Dockerfile new file mode 100644 index 0000000000000..9b1d9b40fb508 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/Dockerfile @@ -0,0 +1,18 @@ +FROM airbyte/integration-base-java:dev AS build + +WORKDIR /airbyte +ENV APPLICATION destination-doris + +COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar + +RUN tar xf ${APPLICATION}.tar --strip-components=1 && rm -rf ${APPLICATION}.tar + +FROM airbyte/integration-base-java:dev + +WORKDIR /airbyte +ENV APPLICATION destination-doris + +COPY --from=build /airbyte /airbyte + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-doris diff --git a/airbyte-integrations/connectors/destination-doris/README.md b/airbyte-integrations/connectors/destination-doris/README.md new file mode 100644 index 0000000000000..2bb9f6c170b5d --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/README.md @@ -0,0 +1,68 @@ +# Destination Doris + +This is the repository for the Doris destination connector in Java. +For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/doris). + +## Local development + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-doris:build +``` + +#### Create credentials +**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. +Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. + +**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. + +### Locally running the connector docker image + +#### Build +Build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-doris:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-doris:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-doris:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-doris:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-doris:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +We use `JUnit` for Java tests. + +### Unit and Integration Tests +Place unit tests under `src/test/io/airbyte/integrations/destinations/doris`. + +#### Acceptance Tests +Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in +`src/test-integration/java/io/airbyte/integrations/destinations/dorisDestinationAcceptanceTest.java`. + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-doris:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-doris:integrationTest +``` + +## Dependency Management + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-doris/bootstrap.md b/airbyte-integrations/connectors/destination-doris/bootstrap.md new file mode 100644 index 0000000000000..30f9d07820e3c --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/bootstrap.md @@ -0,0 +1,29 @@ +# Doris destination + + +Doris destination adopts MySQL protocol(JDBC) and Doris Stream Load to exchange data. + +1. JDBC is used to manipulate the data table structure and execute the create table statement before data import +2. Stream Load is a synchronous import method based on HTTP/HTTPS, For Doris destination, first pre-write csv file, and then write to doris with Stream Load transaction operation. + +## Introduction to Apache Doris + +Apache Doris is a high-performance, real-time analytical database based on MPP architecture, known for its extreme speed and ease of use. It only requires a sub-second response time to return query results under massive data and can support not only high-concurrent point query scenarios but also high-throughput complex analysis scenarios. Based on this, Apache Doris can better meet the scenarios of report analysis, ad-hoc query, unified data warehouse, Data Lake Query Acceleration, etc. Users can build user behavior analysis, AB test platform, log retrieval analysis, user portrait analysis, order analysis, and other applications on top of this. +[https://doris.apache.org/docs/summary/basic-summary](https://doris.apache.org/docs/summary/basic-summary) + + +## Technical Overview +The overall architecture of Apache Doris is shown in the following figure. The Doris architecture is very simple, with only two types of processes. + +#### Frontend(FE): +##### It is mainly responsible for user request access, query parsing and planning, management of metadata, and node management-related work. +#### Backend(BE): +##### It is mainly responsible for data storage and query plan execution. + +Both types of processes are horizontally scalable, and a single cluster can support up to hundreds of machines and tens of petabytes of storage capacity. And these two types of processes guarantee high availability of services and high reliability of data through consistency protocols. This highly integrated architecture design greatly reduces the operation and maintenance cost of a distributed system. + +Apache Doris adopts MySQL protocol, highly compatible with MySQL dialect, and supports standard SQL. Users can access Doris through various client tools and support seamless connection with BI tools. + +[Stream load](https://doris.apache.org/docs/data-operate/import/import-way/stream-load-manual/) is a synchronous way of importing. Users import local files or data streams into Doris by sending HTTP protocol requests. Stream load synchronously executes the import and returns the import result. Users can directly determine whether the import is successful by the return body of the request. Stream load is mainly suitable for importing local files or data from data streams through procedures. + +Each import job of Doris, whether it is batch import using Stream Load or single import using INSERT statement, is a complete transaction operation. The import transaction can ensure that the data in a batch takes effect atomically, and there will be no partial data writing. \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-doris/build.gradle b/airbyte-integrations/connectors/destination-doris/build.gradle new file mode 100644 index 0000000000000..82e4c702251f3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/build.gradle @@ -0,0 +1,21 @@ +plugins { + id 'application' + id 'airbyte-docker' + id 'airbyte-integration-test-java' +} + +application { + mainClass = 'io.airbyte.integrations.destination.doris.DorisDestination' +} + +dependencies { + implementation 'org.apache.commons:commons-csv:1.4' + implementation group: 'mysql', name: 'mysql-connector-java', version: '8.0.16' + implementation project(':airbyte-config:config-models') + implementation project(':airbyte-protocol:protocol-models') + implementation project(':airbyte-integrations:bases:base-java') + implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) + + integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') + integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-doris') +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConnectionOptions.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConnectionOptions.java new file mode 100644 index 0000000000000..b6fd413170736 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConnectionOptions.java @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import com.fasterxml.jackson.databind.JsonNode; + +public class DorisConnectionOptions { + + private String db; + private static String DB_KEY = "database"; + private String table; + private static final String TABLE_KEY = "table"; + + private String user; + private static final String USER_KEY = "username"; + + private String pwd; + private static final String PWD_KEY = "password"; + + private String feHost; + private static final String FE_HOST_KEY = "host"; + + private Integer feHttpPort; + private static final String FE_HTTP_PORT_KEY = "httpport"; + + private Integer feQueryPort; + private static final String FE_QUERY_PORT_KEY = "queryport"; + + public static DorisConnectionOptions getDorisConnection(final JsonNode config, String table) { + return new DorisConnectionOptions( + config.get(DB_KEY).asText(), + table, + config.get(USER_KEY).asText(), + config.get(PWD_KEY) == null ? "" : config.get(PWD_KEY).asText(), + config.get(FE_HOST_KEY).asText(), + config.get(FE_HTTP_PORT_KEY).asInt(8030), + config.get(FE_QUERY_PORT_KEY).asInt(9030)); + + } + + public DorisConnectionOptions(String db, String table, String user, String pwd, String feHost, Integer feHttpPort, Integer feQueryPort) { + this.db = db; + this.table = table; + this.user = user; + this.pwd = pwd; + this.feHost = feHost; + this.feHttpPort = feHttpPort; + this.feQueryPort = feQueryPort; + } + + public String getDb() { + return db; + } + + public String getTable() { + return table; + } + + public String getUser() { + return user; + } + + public String getPwd() { + return pwd; + } + + public String getFeHost() { + return feHost; + } + + public Integer getFeHttpPort() { + return feHttpPort; + } + + public String getHttpHostPort() { + return feHost + ":" + feHttpPort; + } + + public String getQueryHostPort() { + return feHost + ":" + feHttpPort; + } + + public Integer getFeQueryPort() { + return feQueryPort; + } + + @Override + public String toString() { + return "DorisConnectionOptions{" + + "db='" + db + '\'' + + ", table='" + table + '\'' + + ", user='" + user + '\'' + + ", pwd='" + pwd + '\'' + + ", feHost='" + feHost + '\'' + + ", feHttpPort=" + feHttpPort + + ", feQueryPort=" + feQueryPort + + '}'; + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConsumer.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConsumer.java new file mode 100644 index 0000000000000..e5447673a8b4f --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisConsumer.java @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import com.fasterxml.jackson.core.io.JsonStringEncoder; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.CommitOnStateAirbyteMessageConsumer; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.io.IOException; +import java.nio.file.Files; +import java.util.Map; +import java.util.UUID; +import java.util.function.Consumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DorisConsumer extends CommitOnStateAirbyteMessageConsumer { + + private static final Logger LOGGER = LoggerFactory.getLogger(DorisConsumer.class); + + private final ConfiguredAirbyteCatalog catalog; + private final Map writeConfigs; + + private JsonStringEncoder jsonEncoder; + + public DorisConsumer( + final Map writeConfigs, + final ConfiguredAirbyteCatalog catalog, + final Consumer outputRecordCollector) { + super(outputRecordCollector); + jsonEncoder = JsonStringEncoder.getInstance(); + this.catalog = catalog; + this.writeConfigs = writeConfigs; + LOGGER.info("initializing DorisConsumer."); + } + + @Override + public void commit() throws Exception { + for (final DorisWriteConfig writeConfig : writeConfigs.values()) { + writeConfig.getWriter().flush(); + } + } + + @Override + protected void startTracked() throws Exception {} + + @Override + protected void acceptTracked(AirbyteMessage msg) throws Exception { + if (msg.getType() != AirbyteMessage.Type.RECORD) { + return; + } + final AirbyteRecordMessage recordMessage = msg.getRecord(); + if (!writeConfigs.containsKey(recordMessage.getStream())) { + throw new IllegalArgumentException( + String.format("Message contained record from a stream that was not in the catalog. \ncatalog: %s , \nmessage: %s", + Jsons.serialize(catalog), Jsons.serialize(recordMessage))); + } + + writeConfigs.get(recordMessage.getStream()).getWriter().printRecord( + UUID.randomUUID(), + // new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date(recordMessage.getEmittedAt())), + recordMessage.getEmittedAt(), + new String(jsonEncoder.quoteAsString(Jsons.serialize(recordMessage.getData())))); + + } + + @Override + protected void close(boolean hasFailed) throws Exception { + LOGGER.info("finalizing DorisConsumer"); + for (final Map.Entry entries : writeConfigs.entrySet()) { + try { + entries.getValue().getWriter().flush(); + entries.getValue().getWriter().close(); + } catch (final Exception e) { + hasFailed = true; + LOGGER.error("failed to close writer for: {}", entries.getKey()); + } + } + + try { + for (final DorisWriteConfig value : writeConfigs.values()) { + value.getDorisStreamLoad().firstCommit(); + } + } catch (final Exception e) { + hasFailed = true; + final String message = "Failed to pre-commit doris in destination: "; + LOGGER.error(message + e.getMessage()); + for (final DorisWriteConfig value : writeConfigs.values()) { + if (value.getDorisStreamLoad().getTxnID() > 0) + value.getDorisStreamLoad().abortTransaction(); + } + } + + // + try { + if (!hasFailed) { + for (final DorisWriteConfig writeConfig : writeConfigs.values()) { + if (writeConfig.getDorisStreamLoad().getTxnID() > 0) + writeConfig.getDorisStreamLoad().commitTransaction(); + LOGGER.info(String.format("stream load commit (TxnID: %s ) successed ", writeConfig.getDorisStreamLoad().getTxnID())); + } + } else { + final String message = "Failed to commit doris in destination"; + LOGGER.error(message); + for (final DorisWriteConfig writeConfig : writeConfigs.values()) { + if (writeConfig.getDorisStreamLoad().getTxnID() > 0) + writeConfig.getDorisStreamLoad().abortTransaction(); + } + throw new IOException(message); + } + } finally { + for (final DorisWriteConfig writeConfig : writeConfigs.values()) { + Files.deleteIfExists(writeConfig.getDorisStreamLoad().getPath()); + writeConfig.getDorisStreamLoad().close(); + } + } + + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisDestination.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisDestination.java new file mode 100644 index 0000000000000..5144b46460148 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisDestination.java @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import static io.airbyte.integrations.destination.doris.DorisStreamLoad.CSV_COLUMN_SEPARATOR; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.base.Preconditions; +import io.airbyte.integrations.BaseConnector; +import io.airbyte.integrations.base.AirbyteMessageConsumer; +import io.airbyte.integrations.base.Destination; +import io.airbyte.integrations.base.IntegrationRunner; +import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.integrations.destination.StandardNameTransformer; +import io.airbyte.protocol.models.*; +import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.sql.*; +import java.util.*; +import java.util.function.Consumer; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVPrinter; +import org.apache.commons.io.FileUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DorisDestination extends BaseConnector implements Destination { + + private static final Logger LOGGER = LoggerFactory.getLogger(DorisDestination.class); + private static final StandardNameTransformer namingResolver = new StandardNameTransformer(); + private static Connection conn = null; + private static HttpUtil http = new HttpUtil(); + static final String DESTINATION_TEMP_PATH_FIELD = "destination_temp_path"; + private static final String JDBC_DRIVER = "com.mysql.cj.jdbc.Driver"; + private static final String DB_URL_PATTERN = "jdbc:mysql://%s:%d/%s?rewriteBatchedStatements=true&useUnicode=true&characterEncoding=utf8"; + + public static void main(String[] args) throws Exception { + new IntegrationRunner(new DorisDestination()).run(args); + } + + @Override + public AirbyteConnectionStatus check(JsonNode config) { + try { + Preconditions.checkNotNull(config); + FileUtils.forceMkdir(getTempPathDir(config).toFile()); + checkDorisAndConnect(config); + } catch (final Exception e) { + return new AirbyteConnectionStatus().withStatus(Status.FAILED).withMessage(e.getMessage()); + } + return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); + } + + @Override + public AirbyteMessageConsumer getConsumer(JsonNode config, + ConfiguredAirbyteCatalog configuredCatalog, + Consumer outputRecordCollector) + throws IOException, SQLException { + final Map writeConfigs = new HashMap<>(); + + try { + final Path destinationDir = getTempPathDir(config); + FileUtils.forceMkdir(destinationDir.toFile()); + for (ConfiguredAirbyteStream stream : configuredCatalog.getStreams()) { + + final DestinationSyncMode syncMode = stream.getDestinationSyncMode(); + if (syncMode == null) { + throw new IllegalStateException("Undefined destination sync mode"); + } + + final String streamName = stream.getStream().getName(); + final String tableName = namingResolver.getIdentifier(streamName); + final String tmpTableName = namingResolver.getTmpTableName(streamName); + final Path tmpPath = destinationDir.resolve(tmpTableName + ".csv"); + if (conn == null) + checkDorisAndConnect(config); + Statement stmt = conn.createStatement(); + stmt.execute(createTableQuery(tableName)); + if (syncMode == DestinationSyncMode.OVERWRITE) { + stmt.execute(truncateTable(tableName)); + } + CSVFormat csvFormat = CSVFormat.DEFAULT + .withSkipHeaderRecord() + .withDelimiter(CSV_COLUMN_SEPARATOR) + .withQuote(null) + .withHeader( + JavaBaseConstants.COLUMN_NAME_AB_ID, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT, + JavaBaseConstants.COLUMN_NAME_DATA); + final FileWriter fileWriter = new FileWriter(tmpPath.toFile(), Charset.defaultCharset(), false); + final CSVPrinter printer = new CSVPrinter(fileWriter, csvFormat); + DorisStreamLoad dorisStreamLoad = new DorisStreamLoad( + tmpPath, + DorisConnectionOptions.getDorisConnection(config, tableName), + new DorisLabelInfo("airbyte_doris", tableName, true), + http.getClient(), + JavaBaseConstants.COLUMN_NAME_AB_ID, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT, + JavaBaseConstants.COLUMN_NAME_DATA); + writeConfigs.put(streamName, new DorisWriteConfig(dorisStreamLoad, printer, csvFormat)); + } + } catch (SQLException | ClassNotFoundException e) { + LOGGER.error("Exception while creating Doris destination table: ", e); + throw new SQLException(e); + } catch (IOException e) { + LOGGER.error("Exception while handling temporary csv files : ", e); + throw new IOException(e); + } finally { + if (conn != null) + conn.close(); + } + return new DorisConsumer(writeConfigs, configuredCatalog, outputRecordCollector); + } + + protected void checkDorisAndConnect(JsonNode config) throws ClassNotFoundException, SQLException { + DorisConnectionOptions dorisConnection = DorisConnectionOptions.getDorisConnection(config, ""); + String dbUrl = String.format(DB_URL_PATTERN, dorisConnection.getFeHost(), dorisConnection.getFeQueryPort(), dorisConnection.getDb()); + Class.forName(JDBC_DRIVER); + conn = DriverManager.getConnection(dbUrl, dorisConnection.getUser(), dorisConnection.getPwd()); + } + + protected String createTableQuery(String tableName) { + String s = "CREATE TABLE IF NOT EXISTS `" + tableName + "` ( \n" + + "`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "` varchar(40),\n" + + "`" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT + "` BIGINT,\n" + + "`" + JavaBaseConstants.COLUMN_NAME_DATA + "` String)\n" + + "DUPLICATE KEY(`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "`,`" + JavaBaseConstants.COLUMN_NAME_EMITTED_AT + "`) \n" + + "DISTRIBUTED BY HASH(`" + JavaBaseConstants.COLUMN_NAME_AB_ID + "`) BUCKETS 16 \n" + + "PROPERTIES ( \n" + + "\"replication_allocation\" = \"tag.location.default: 1\" \n" + + ");"; + LOGGER.info("create doris table SQL : \n " + s); + return s; + } + + protected String truncateTable(String tableName) { + String s = "TRUNCATE TABLE `" + tableName + "`;"; + LOGGER.info("truncate doris table SQL : \n " + s); + return s; + } + + protected Path getTempPathDir(final JsonNode config) { + Path path = Paths.get(DESTINATION_TEMP_PATH_FIELD); + Preconditions.checkNotNull(path); + if (!path.startsWith("/code/local")) { + path = Path.of("/local", path.toString()); + } + final Path normalizePath = path.normalize(); + if (!normalizePath.startsWith("/local")) { + throw new IllegalArgumentException("Stream Load destination temp file should be inside the /local directory"); + } + return path; + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisLabelInfo.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisLabelInfo.java new file mode 100644 index 0000000000000..52c8b7bb6dfbc --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisLabelInfo.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import java.util.UUID; + +public class DorisLabelInfo { + + private String prefix; + + private String table; + + private boolean enable2PC; + + public DorisLabelInfo(String labelPrefix, String table, boolean enable2PC) { + this.prefix = labelPrefix; + this.table = table; + this.enable2PC = enable2PC; + } + + public String label() { + return prefix + "_" + table + "_" + UUID.randomUUID() + System.currentTimeMillis(); + } + + public String label(long chkId) { + return prefix + "_" + chkId; + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisStreamLoad.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisStreamLoad.java new file mode 100644 index 0000000000000..219d4f99f262b --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisStreamLoad.java @@ -0,0 +1,235 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import io.airbyte.integrations.destination.doris.exception.DorisRuntimeException; +import io.airbyte.integrations.destination.doris.exception.StreamLoadException; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.file.Path; +import java.util.*; +import java.util.concurrent.Future; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.InputStreamEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DorisStreamLoad { + + private static final Logger LOGGER = LoggerFactory.getLogger(DorisStreamLoad.class); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + public static final Pattern LABEL_EXIST_PATTERN = + Pattern.compile("errCode = 2, detailMessage = Label \\[(.*)\\] " + + "has already been used, relate to txn \\[(\\d+)\\]"); + public static final Pattern COMMITTED_PATTERN = + Pattern.compile("errCode = 2, detailMessage = transaction \\[(\\d+)\\] " + + "is already \\b(COMMITTED|committed|VISIBLE|visible)\\b, not pre-committed."); + private final DorisLabelInfo dorisLabelInfo; + private static final String LOAD_FIRST_URL_PATTERN = "http://%s/api/%s/%s/_stream_load"; + private static final String LOAD_SECOND_URL_PATTERN = "http://%s/api/%s/_stream_load_2pc"; + private static final String LINE_DELIMITER_DEFAULT = "\n"; + public static final Character CSV_COLUMN_SEPARATOR = '\t'; + + private final String hostPort; + private final String loadUrlStr; + private final String secondUrlStr; + private final String user; + private final String passwd; + private final boolean enable2PC; + private final Properties streamLoadProp; + private final Integer maxRetry; + private Long txnID = 0L; + private final Path path; + private Future pendingLoadFuture; + private final CloseableHttpClient httpClient; + public static final String SUCCESS = "Success"; + public static final String PUBLISH_TIMEOUT = "Publish Timeout"; + private static final List DORIS_SUCCESS_STATUS = new ArrayList<>(Arrays.asList(SUCCESS, PUBLISH_TIMEOUT)); + public static final String FAIL = "Fail"; + + public DorisStreamLoad( + Path path, + DorisConnectionOptions dorisOptions, + DorisLabelInfo dorisLabelInfo, + CloseableHttpClient httpClient, + String... head) { + this.hostPort = dorisOptions.getHttpHostPort(); + String db = dorisOptions.getDb(); + this.user = dorisOptions.getUser(); + this.passwd = dorisOptions.getPwd(); + this.dorisLabelInfo = dorisLabelInfo; + this.loadUrlStr = String.format(LOAD_FIRST_URL_PATTERN, hostPort, db, dorisOptions.getTable()); + this.secondUrlStr = String.format(LOAD_SECOND_URL_PATTERN, hostPort, db); + this.enable2PC = true; + + StringBuilder stringBuilder = new StringBuilder(); + for (String s : head) { + if (!stringBuilder.isEmpty()) + stringBuilder.append(","); + stringBuilder.append(s); + } + this.streamLoadProp = new Properties(); + streamLoadProp.setProperty("column_separator", CSV_COLUMN_SEPARATOR.toString()); + streamLoadProp.setProperty("columns", stringBuilder.toString()); + this.maxRetry = 3; + this.path = path; + this.httpClient = httpClient; + } + + public Long getTxnID() { + return txnID; + } + + public void firstCommit() throws Exception { + Path pathChecked = Preconditions.checkNotNull(path, "stream load temp CSV file is empty."); + String label = dorisLabelInfo.label(); + LOGGER.info("preCommit label {}. .", label); + StreamLoadRespContent respContent = null; + try { + + InputStreamEntity entity = new InputStreamEntity(new FileInputStream(pathChecked.toFile())); + StreamLoadHttpPutBuilder builder = StreamLoadHttpPutBuilder.builder(); + builder.setUrl(loadUrlStr) + .baseAuth(user, passwd) + .addCommonHeader() + .enable2PC(enable2PC) + .setLabel(label) + .setEntity(entity) + .addProperties(streamLoadProp); + HttpPut build = builder.build(); + respContent = handlePreCommitResponse(httpClient.execute(build)); + Preconditions.checkState("true".equals(respContent.getTwoPhaseCommit())); + if (!DORIS_SUCCESS_STATUS.contains(respContent.getStatus())) { + String errMsg = String.format("stream load error: %s, see more in %s", respContent.getMessage(), respContent.getErrorURL()); + throw new DorisRuntimeException(errMsg); + } else { + String commitType = enable2PC ? "preCommit" : "commit"; + LOGGER.info("{} for label {} finished: {}", commitType, label, respContent.toString()); + } + } catch (Exception e) { + LOGGER.warn("failed to stream load data", e); + throw e; + } + this.txnID = respContent.getTxnId(); + } + + // commit + public void commitTransaction() throws IOException { + int statusCode = -1; + String reasonPhrase = null; + int retry = 0; + CloseableHttpResponse response = null; + StreamLoadHttpPutBuilder putBuilder = StreamLoadHttpPutBuilder.builder(); + putBuilder.setUrl(secondUrlStr) + .baseAuth(user, passwd) + .addCommonHeader() + .addTxnId(txnID) + .setEmptyEntity() + .commit(); + while (retry++ < maxRetry) { + + try { + response = httpClient.execute(putBuilder.build()); + } catch (IOException e) { + LOGGER.warn("try commit failed with {} times", retry + 1); + continue; + } + statusCode = response.getStatusLine().getStatusCode(); + reasonPhrase = response.getStatusLine().getReasonPhrase(); + if (statusCode != 200) { + LOGGER.warn("commit transaction failed with {}, reason {}", hostPort, reasonPhrase); + } else { + LOGGER.info("commit transaction successes , response: {}", response.getStatusLine().toString()); + break; + } + } + + if (statusCode != 200) { + throw new DorisRuntimeException("stream load error: " + reasonPhrase); + } + + ObjectMapper mapper = new ObjectMapper(); + if (response.getEntity() != null) { + String loadResult = EntityUtils.toString(response.getEntity()); + Map res = mapper.readValue(loadResult, new TypeReference>() {}); + Matcher matcher = COMMITTED_PATTERN.matcher(res.get("msg")); + if (res.get("status").equals(FAIL) && !matcher.matches()) { + throw new DorisRuntimeException("Commit failed " + loadResult); + } else { + LOGGER.info("load result {}", loadResult); + } + } + } + + // abort + public void abortTransaction() throws Exception { + StreamLoadHttpPutBuilder builder = StreamLoadHttpPutBuilder.builder(); + builder.setUrl(secondUrlStr) + .baseAuth(user, passwd) + .addCommonHeader() + .addTxnId(txnID) + .setEmptyEntity() + .abort(); + CloseableHttpResponse response = httpClient.execute(builder.build()); + + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode != 200 || response.getEntity() == null) { + LOGGER.warn("abort transaction response: " + response.getStatusLine().toString()); + throw new DorisRuntimeException("Failed abort transaction:" + txnID + ", with url " + secondUrlStr); + } else { + LOGGER.info("abort transaction response: " + response.getStatusLine().toString()); + } + + ObjectMapper mapper = new ObjectMapper(); + String loadResult = EntityUtils.toString(response.getEntity()); + Map res = mapper.readValue(loadResult, new TypeReference>() {}); + if (FAIL.equals(res.get("status"))) { + LOGGER.warn("Fail to abort transaction. error: {}", res.get("msg")); + } + } + + private StreamLoadRespContent stopLoad() throws IOException { + LOGGER.info("stream load stopped."); + Preconditions.checkState(pendingLoadFuture != null); + try { + return handlePreCommitResponse(pendingLoadFuture.get()); + } catch (Exception e) { + throw new DorisRuntimeException(e); + } + } + + public StreamLoadRespContent handlePreCommitResponse(CloseableHttpResponse response) throws Exception { + final int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode == 200 && response.getEntity() != null) { + String loadResult = EntityUtils.toString(response.getEntity()); + LOGGER.info("load Result {}", loadResult); + return OBJECT_MAPPER.readValue(loadResult, StreamLoadRespContent.class); + } + throw new StreamLoadException("stream load response error: " + response.getStatusLine().toString()); + } + + public Path getPath() { + return path; + } + + public void close() throws IOException { + if (null != httpClient) { + try { + httpClient.close(); + } catch (IOException e) { + throw new IOException("Closing httpClient failed.", e); + } + } + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisWriteConfig.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisWriteConfig.java new file mode 100644 index 0000000000000..961317b29f019 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/DorisWriteConfig.java @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVPrinter; + +public class DorisWriteConfig { + + private final DorisStreamLoad dorisStreamLoad; + private final CSVPrinter writer; + private final CSVFormat format; + + public DorisWriteConfig(DorisStreamLoad dorisStreamLoad, CSVPrinter writer, CSVFormat format) { + this.dorisStreamLoad = dorisStreamLoad; + this.writer = writer; + this.format = format; + } + + public DorisStreamLoad getDorisStreamLoad() { + return dorisStreamLoad; + } + + public CSVFormat getFormat() { + return format; + } + + public CSVPrinter getWriter() { + return writer; + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/HttpUtil.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/HttpUtil.java new file mode 100644 index 0000000000000..ec5ca0aad2eec --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/HttpUtil.java @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.DefaultRedirectStrategy; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; + +public class HttpUtil { + + private final HttpClientBuilder httpClientBuilder = + HttpClients + .custom() + .setRedirectStrategy(new DefaultRedirectStrategy() { + + @Override + protected boolean isRedirectable(String method) { + return true; + } + + }); + + public CloseableHttpClient getClient() { + return httpClientBuilder.build(); + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadHttpPutBuilder.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadHttpPutBuilder.java new file mode 100644 index 0000000000000..ee10d8bc04e14 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadHttpPutBuilder.java @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import com.google.common.base.Preconditions; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +import org.apache.commons.codec.binary.Base64; +import org.apache.http.HttpEntity; +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.StringEntity; + +public class StreamLoadHttpPutBuilder { + + String url; + + Map prop; + + HttpEntity httpEntity; + + private StreamLoadHttpPutBuilder() { + this.prop = new HashMap<>(); + } + + public static StreamLoadHttpPutBuilder builder() { + return new StreamLoadHttpPutBuilder(); + } + + public StreamLoadHttpPutBuilder setUrl(String url) { + this.url = url; + return this; + } + + // 用户最好设置Expect Header字段内容100-continue,这样可以在某些出错场景下避免不必要的数据传输 + public StreamLoadHttpPutBuilder addCommonHeader() { + prop.put(HttpHeaders.EXPECT, "100-continue"); + return this; + } + + public StreamLoadHttpPutBuilder enable2PC(Boolean bool) { + prop.put("two_phase_commit", bool.toString()); + return this; + } + + public StreamLoadHttpPutBuilder baseAuth(String user, String password) { + byte[] encoded = Base64.encodeBase64(user.concat(":").concat(password).getBytes(StandardCharsets.UTF_8)); + prop.put(HttpHeaders.AUTHORIZATION, "Basic " + new String(encoded, StandardCharsets.UTF_8)); + return this; + } + + public StreamLoadHttpPutBuilder addTxnId(long txnID) { + prop.put("txn_id", String.valueOf(txnID)); + return this; + } + + public StreamLoadHttpPutBuilder commit() { + prop.put("txn_operation", "commit"); + return this; + } + + public StreamLoadHttpPutBuilder abort() { + prop.put("txn_operation", "abort"); + return this; + } + + public StreamLoadHttpPutBuilder setEntity(HttpEntity httpEntity) { + this.httpEntity = httpEntity; + return this; + } + + public StreamLoadHttpPutBuilder setEmptyEntity() { + try { + this.httpEntity = new StringEntity(""); + } catch (Exception e) { + throw new IllegalArgumentException(e); + } + return this; + } + + public StreamLoadHttpPutBuilder addProperties(Properties properties) { + properties.forEach((key, value) -> prop.put(String.valueOf(key), String.valueOf(value))); + return this; + } + + public StreamLoadHttpPutBuilder setLabel(String label) { + prop.put("label", label); + return this; + } + + public HttpPut build() { + Preconditions.checkNotNull(url); + Preconditions.checkNotNull(httpEntity); + HttpPut put = new HttpPut(url); + prop.forEach(put::setHeader); + put.setEntity(httpEntity); + return put; + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadRespContent.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadRespContent.java new file mode 100644 index 0000000000000..4ef3c46844bdf --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/StreamLoadRespContent.java @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +@JsonIgnoreProperties(ignoreUnknown = true) +public class StreamLoadRespContent { + + @JsonProperty(value = "TxnId") + private long TxnId; + + @JsonProperty(value = "Label") + private String Label; + + @JsonProperty(value = "Status") + private String Status; + + @JsonProperty(value = "TwoPhaseCommit") + private String TwoPhaseCommit; + + @JsonProperty(value = "ExistingJobStatus") + private String ExistingJobStatus; + + @JsonProperty(value = "Message") + private String Message; + + @JsonProperty(value = "NumberTotalRows") + private long NumberTotalRows; + + @JsonProperty(value = "NumberLoadedRows") + private long NumberLoadedRows; + + @JsonProperty(value = "NumberFilteredRows") + private int NumberFilteredRows; + + @JsonProperty(value = "NumberUnselectedRows") + private int NumberUnselectedRows; + + @JsonProperty(value = "LoadBytes") + private long LoadBytes; + + @JsonProperty(value = "LoadTimeMs") + private int LoadTimeMs; + + @JsonProperty(value = "BeginTxnTimeMs") + private int BeginTxnTimeMs; + + @JsonProperty(value = "StreamLoadPutTimeMs") + private int StreamLoadPutTimeMs; + + @JsonProperty(value = "ReadDataTimeMs") + private int ReadDataTimeMs; + + @JsonProperty(value = "WriteDataTimeMs") + private int WriteDataTimeMs; + + @JsonProperty(value = "CommitAndPublishTimeMs") + private int CommitAndPublishTimeMs; + + @JsonProperty(value = "ErrorURL") + private String ErrorURL; + + public long getTxnId() { + return TxnId; + } + + public String getStatus() { + return Status; + } + + public String getTwoPhaseCommit() { + return TwoPhaseCommit; + } + + public String getMessage() { + return Message; + } + + public String getExistingJobStatus() { + return ExistingJobStatus; + } + + @Override + public String toString() { + ObjectMapper mapper = new ObjectMapper(); + try { + return mapper.writeValueAsString(this); + } catch (JsonProcessingException e) { + return ""; + } + } + + public String getErrorURL() { + return ErrorURL; + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisException.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisException.java new file mode 100644 index 0000000000000..ff2ce53673759 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisException.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris.exception; + +public class DorisException extends Exception { + + public DorisException() { + super(); + } + + public DorisException(String message) { + super(message); + } + + public DorisException(String message, Throwable cause) { + super(message, cause); + } + + public DorisException(Throwable cause) { + super(cause); + } + + protected DorisException(String message, + Throwable cause, + boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisRuntimeException.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisRuntimeException.java new file mode 100644 index 0000000000000..e9d4feb40c050 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/DorisRuntimeException.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris.exception; + +/** + * Doris runtime exception. + */ +public class DorisRuntimeException extends RuntimeException { + + public DorisRuntimeException() { + super(); + } + + public DorisRuntimeException(String message) { + super(message); + } + + public DorisRuntimeException(String message, Throwable cause) { + super(message, cause); + } + + public DorisRuntimeException(Throwable cause) { + super(cause); + } + + protected DorisRuntimeException(String message, + Throwable cause, + boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/IllegalArgumentException.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/IllegalArgumentException.java new file mode 100644 index 0000000000000..8a3c406f3acb7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/IllegalArgumentException.java @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris.exception; + +public class IllegalArgumentException extends DorisException { + + public IllegalArgumentException(String msg, Throwable cause) { + super(msg, cause); + } + + public IllegalArgumentException(String arg, String value) { + super("argument '" + arg + "' is illegal, value is '" + value + "'."); + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/StreamLoadException.java b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/StreamLoadException.java new file mode 100644 index 0000000000000..7edba2d9959b3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/java/io/airbyte/integrations/destination/doris/exception/StreamLoadException.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris.exception; + +public class StreamLoadException extends Exception { + + public StreamLoadException() { + super(); + } + + public StreamLoadException(String message) { + super(message); + } + + public StreamLoadException(String message, Throwable cause) { + super(message, cause); + } + + public StreamLoadException(Throwable cause) { + super(cause); + } + + protected StreamLoadException(String message, + Throwable cause, + boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-doris/src/main/resources/spec.json new file mode 100644 index 0000000000000..42cddd0a4780e --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/main/resources/spec.json @@ -0,0 +1,60 @@ +{ + "documentationUrl": "https://docs.airbyte.io/integrations/destinations/doris", + "supportsIncremental": false, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": ["append", "overwrite"], + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Doris Destination Spec", + "type": "object", + "required": ["host", "httpport", "queryport", "username", "database"], + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the database", + "type": "string", + "order": 0 + }, + "httpport": { + "title": "HttpPort", + "description": "Http Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 8030, + "examples": ["8030"], + "order": 1 + }, + "queryport": { + "title": "QueryPort", + "description": "Query(SQL) Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 9030, + "examples": ["9030"], + "order": 2 + }, + "database": { + "title": "DataBase Name", + "description": "Name of the database.", + "type": "string", + "order": 3 + }, + "username": { + "title": "UserName", + "description": "Username to use to access the database.", + "type": "string", + "order": 4 + }, + "password": { + "title": "Password", + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 5 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-doris/src/test-integration/java/io/airbyte/integrations/destination/doris/DorisDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-doris/src/test-integration/java/io/airbyte/integrations/destination/doris/DorisDestinationAcceptanceTest.java new file mode 100644 index 0000000000000..9b03356c3f53a --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/test-integration/java/io/airbyte/integrations/destination/doris/DorisDestinationAcceptanceTest.java @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.integrations.destination.StandardNameTransformer; +import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.sql.*; +import java.util.ArrayList; +import java.util.List; +import org.apache.commons.lang3.StringEscapeUtils; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DorisDestinationAcceptanceTest extends DestinationAcceptanceTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(DorisDestinationAcceptanceTest.class); + + private JsonNode configJson; + + private static final Path RELATIVE_PATH = Path.of("integration_test/test"); + + private static final String JDBC_DRIVER = "com.mysql.cj.jdbc.Driver"; + private static final String DB_URL_PATTERN = "jdbc:mysql://%s:%d?rewriteBatchedStatements=true&useSSL=true&useUnicode=true&characterEncoding=utf8"; + private static final int PORT = 8211; + private static Connection conn = null; + + private static final StandardNameTransformer namingResolver = new StandardNameTransformer(); + + @Override + protected String getImageName() { + return "airbyte/destination-doris:dev"; + } + + @BeforeAll + public static void getConnect() { + JsonNode config = Jsons.deserialize(IOs.readFile(Paths.get("../../../secrets/config.json"))); + String dbUrl = String.format(DB_URL_PATTERN, config.get("host").asText(), PORT); + try { + Class.forName(JDBC_DRIVER); + conn = + DriverManager.getConnection(dbUrl, config.get("username").asText(), config.get("password") == null ? "" : config.get("password").asText()); + } catch (Exception e) { + e.printStackTrace(); + } + + } + + @AfterAll + public static void closeConnect() throws SQLException { + if (conn != null) { + conn.close(); + } + } + + @Override + protected JsonNode getConfig() { + // TODO: Generate the configuration JSON file to be used for running the destination during the test + // configJson can either be static and read from secrets/config.json directly + // or created in the setup method + configJson = Jsons.deserialize(IOs.readFile(Paths.get("../../../secrets/config.json"))); + return configJson; + } + + @Override + protected JsonNode getFailCheckConfig() { + // TODO return an invalid config which, when used to run the connector's check connection operation, + // should result in a failed connection check + return null; + } + + @Override + protected List retrieveRecords(TestDestinationEnv testEnv, + String streamName, + String namespace, + JsonNode streamSchema) + throws IOException, SQLException { + // TODO Implement this method to retrieve records which written to the destination by the connector. + // Records returned from this method will be compared against records provided to the connector + // to verify they were written correctly + + final String tableName = namingResolver.getIdentifier(streamName); + + String query = String.format( + "SELECT * FROM %s.%s ORDER BY %s ASC;", configJson.get("database").asText(), tableName, + JavaBaseConstants.COLUMN_NAME_EMITTED_AT); + PreparedStatement stmt = conn.prepareStatement(query); + ResultSet resultSet = stmt.executeQuery(); + + List res = new ArrayList<>(); + while (resultSet.next()) { + String sss = resultSet.getString(JavaBaseConstants.COLUMN_NAME_DATA); + res.add(Jsons.deserialize(StringEscapeUtils.unescapeJava(sss))); + } + stmt.close(); + return res; + } + + @Override + protected void setup(TestDestinationEnv testEnv) { + // TODO Implement this method to run any setup actions needed before every test case + } + + @Override + protected void tearDown(TestDestinationEnv testEnv) { + // TODO Implement this method to run any cleanup actions needed after every test case + } + + public void testLineBreakCharacters() { + // overrides test with a no-op until we handle full UTF-8 in the destination + } + + public void testSecondSync() throws Exception { + // PubSub cannot overwrite messages, its always append only + } + +} diff --git a/airbyte-integrations/connectors/destination-doris/src/test/java/io/airbyte/integrations/destination/doris/DorisDestinationTest.java b/airbyte-integrations/connectors/destination-doris/src/test/java/io/airbyte/integrations/destination/doris/DorisDestinationTest.java new file mode 100644 index 0000000000000..6e504f925109e --- /dev/null +++ b/airbyte-integrations/connectors/destination-doris/src/test/java/io/airbyte/integrations/destination/doris/DorisDestinationTest.java @@ -0,0 +1,164 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.doris; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import io.airbyte.commons.io.IOs; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.integrations.base.AirbyteMessageConsumer; +import io.airbyte.integrations.base.Destination; +import io.airbyte.integrations.destination.StandardNameTransformer; +import io.airbyte.protocol.models.*; +import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Instant; +import java.util.Collections; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.commons.io.FileUtils; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class DorisDestinationTest { + + private static final Instant NOW = Instant.now(); + private static final Path TEST_ROOT = Path.of("/tmp/airbyte_tests"); + private static final String USERS_STREAM_NAME = "users"; + private static final String TASKS_STREAM_NAME = "tasks"; + private static final String USERS_FILE = new StandardNameTransformer().getRawTableName(USERS_STREAM_NAME) + ".csv"; + private static final String TASKS_FILE = new StandardNameTransformer().getRawTableName(TASKS_STREAM_NAME) + ".csv";; + private static final AirbyteMessage MESSAGE_USERS1 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) + .withRecord(new AirbyteRecordMessage().withStream(USERS_STREAM_NAME) + .withData(Jsons.jsonNode(ImmutableMap.builder().put("name", "john").put("id", "10").build())) + .withEmittedAt(NOW.toEpochMilli())); + private static final AirbyteMessage MESSAGE_USERS2 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) + .withRecord(new AirbyteRecordMessage().withStream(USERS_STREAM_NAME) + .withData(Jsons.jsonNode(ImmutableMap.builder().put("name", "susan").put("id", "30").build())) + .withEmittedAt(NOW.toEpochMilli())); + private static final AirbyteMessage MESSAGE_TASKS1 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) + .withRecord(new AirbyteRecordMessage().withStream(TASKS_STREAM_NAME) + .withData(Jsons.jsonNode(ImmutableMap.builder().put("goal", "game").build())) + .withEmittedAt(NOW.toEpochMilli())); + private static final AirbyteMessage MESSAGE_TASKS2 = new AirbyteMessage().withType(AirbyteMessage.Type.RECORD) + .withRecord(new AirbyteRecordMessage().withStream(TASKS_STREAM_NAME) + .withData(Jsons.jsonNode(ImmutableMap.builder().put("goal", "code").build())) + .withEmittedAt(NOW.toEpochMilli())); + private static final AirbyteMessage MESSAGE_STATE = new AirbyteMessage().withType(AirbyteMessage.Type.STATE) + .withState(new AirbyteStateMessage().withData(Jsons.jsonNode(ImmutableMap.builder().put("checkpoint", "now!").build()))); + + private static final ConfiguredAirbyteCatalog CATALOG = new ConfiguredAirbyteCatalog().withStreams(Lists.newArrayList( + CatalogHelpers.createConfiguredAirbyteStream(USERS_STREAM_NAME, null, Field.of("name", JsonSchemaType.STRING), + Field.of("id", JsonSchemaType.STRING)), + CatalogHelpers.createConfiguredAirbyteStream(TASKS_STREAM_NAME, null, Field.of("goal", JsonSchemaType.STRING)))); + + private Path destinationPath; + private JsonNode config; + + @BeforeEach + void setup() throws IOException { + destinationPath = Files.createTempDirectory(Files.createDirectories(TEST_ROOT), "test"); + config = Jsons.deserialize(IOs.readFile(Paths.get("../../../secrets/config.json"))); + } + + private DorisDestination getDestination() { + final DorisDestination result = spy(DorisDestination.class); + doReturn(destinationPath).when(result).getTempPathDir(any()); + return result; + } + + @Test + void testSpec() throws Exception { + final ConnectorSpecification actual = getDestination().spec(); + final String resourceString = MoreResources.readResource("spec.json"); + final ConnectorSpecification expected = Jsons.deserialize(resourceString, ConnectorSpecification.class); + + assertEquals(expected, actual); + } + + @Test + void testCheckSuccess() { + final AirbyteConnectionStatus actual = getDestination().check(config); + final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED); + assertEquals(expected, actual); + } + + @Test + void testCheckFailure() throws IOException { + final Path looksLikeADirectoryButIsAFile = destinationPath.resolve("file"); + FileUtils.touch(looksLikeADirectoryButIsAFile.toFile()); + final DorisDestination destination = spy(DorisDestination.class); + doReturn(looksLikeADirectoryButIsAFile).when(destination).getTempPathDir(any()); + // final JsonNode config = + // Jsons.jsonNode(ImmutableMap.of(DorisDestination.DESTINATION_TEMP_PATH_FIELD, + // looksLikeADirectoryButIsAFile.toString())); + final AirbyteConnectionStatus actual = destination.check(config); + final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.FAILED); + + // the message includes the random file path, so just verify it exists and then remove it when we do + // rest of the comparison. + assertNotNull(actual.getMessage()); + actual.setMessage(null); + assertEquals(expected, actual); + } + + @Test + void testCheckInvalidDestinationFolder() { + // final Path relativePath = Path.of("../tmp/conf.d/"); + // final JsonNode config = + // Jsons.jsonNode(ImmutableMap.of(DorisDestination.DESTINATION_TEMP_PATH_FIELD, + // relativePath.toString())); + final AirbyteConnectionStatus actual = new DorisDestination().check(config); + final AirbyteConnectionStatus expected = new AirbyteConnectionStatus().withStatus(Status.FAILED); + // the message includes the random file path, so just verify it exists and then remove it when we do + // rest of the comparison. + assertNotNull(actual.getMessage()); + actual.setMessage(null); + assertEquals(expected, actual); + } + + @Test + void testWriteSuccess() throws Exception { + DorisDestination destination = getDestination(); + destination.check(config); + final AirbyteMessageConsumer consumer = destination.getConsumer(config, CATALOG, Destination::defaultOutputRecordCollector); + consumer.accept(MESSAGE_USERS1); + consumer.accept(MESSAGE_TASKS1); + consumer.accept(MESSAGE_USERS2); + consumer.accept(MESSAGE_TASKS2); + consumer.accept(MESSAGE_STATE); + consumer.close(); + + } + + @SuppressWarnings("ResultOfMethodCallIgnored") + @Test + void testWriteFailure() throws Exception { + // hack to force an exception to be thrown from within the consumer. + final AirbyteMessage spiedMessage = spy(MESSAGE_USERS1); + doThrow(new RuntimeException()).when(spiedMessage).getRecord(); + DorisDestination destination = getDestination(); + destination.check(config); + final AirbyteMessageConsumer consumer = spy(destination.getConsumer(config, CATALOG, Destination::defaultOutputRecordCollector)); + + assertThrows(RuntimeException.class, () -> consumer.accept(spiedMessage)); + consumer.accept(MESSAGE_USERS2); + assertThrows(IOException.class, consumer::close); + + // verify tmp files are cleaned up and no files are output at all + final Set actualFilenames = Files.list(destinationPath).map(Path::getFileName).map(Path::toString).collect(Collectors.toSet()); + assertEquals(Collections.emptySet(), actualFilenames); + } + +} diff --git a/airbyte-integrations/connectors/destination-dynamodb/src/main/java/io/airbyte/integrations/destination/dynamodb/DynamodbConsumer.java b/airbyte-integrations/connectors/destination-dynamodb/src/main/java/io/airbyte/integrations/destination/dynamodb/DynamodbConsumer.java index babd12b13af30..b4317776c834b 100644 --- a/airbyte-integrations/connectors/destination-dynamodb/src/main/java/io/airbyte/integrations/destination/dynamodb/DynamodbConsumer.java +++ b/airbyte-integrations/connectors/destination-dynamodb/src/main/java/io/airbyte/integrations/destination/dynamodb/DynamodbConsumer.java @@ -12,9 +12,9 @@ import com.amazonaws.services.dynamodbv2.AmazonDynamoDB; import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.protocol.models.*; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.*; import java.util.function.Consumer; import org.slf4j.Logger; @@ -70,7 +70,7 @@ protected void startTracked() throws Exception { final AirbyteStream stream = configuredStream.getStream(); final AirbyteStreamNameNamespacePair streamNamePair = AirbyteStreamNameNamespacePair - .fromAirbyteSteam(stream); + .fromAirbyteStream(stream); streamNameAndNamespaceToWriters.put(streamNamePair, writer); } } diff --git a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/BaseLogger.java b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/BaseLogger.java index abc2cd57fbe28..ae99565a89a53 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/BaseLogger.java +++ b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/BaseLogger.java @@ -4,8 +4,8 @@ package io.airbyte.integrations.destination.e2e_test.logging; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.time.Instant; import java.time.OffsetDateTime; import java.time.ZoneId; diff --git a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/EveryNthLogger.java b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/EveryNthLogger.java index bd021cc61ea87..127e53bee3cbb 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/EveryNthLogger.java +++ b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/EveryNthLogger.java @@ -4,8 +4,8 @@ package io.airbyte.integrations.destination.e2e_test.logging; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/FirstNLogger.java b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/FirstNLogger.java index ef8fb8806a77c..2ba624427608a 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/FirstNLogger.java +++ b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/FirstNLogger.java @@ -4,8 +4,8 @@ package io.airbyte.integrations.destination.e2e_test.logging; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/LoggingConsumer.java b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/LoggingConsumer.java index 0b62220acdcd7..d1252f3b3f6e8 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/LoggingConsumer.java +++ b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/LoggingConsumer.java @@ -6,11 +6,11 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import java.util.HashMap; @@ -41,7 +41,7 @@ public LoggingConsumer(final TestingLoggerFactory loggerFactory, public void start() { for (final ConfiguredAirbyteStream configuredStream : configuredCatalog.getStreams()) { final AirbyteStream stream = configuredStream.getStream(); - final AirbyteStreamNameNamespacePair streamNamePair = AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream); + final AirbyteStreamNameNamespacePair streamNamePair = AirbyteStreamNameNamespacePair.fromAirbyteStream(stream); final TestingLogger logger = loggerFactory.create(streamNamePair); loggers.put(streamNamePair, logger); } diff --git a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/RandomSamplingLogger.java b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/RandomSamplingLogger.java index 4fa159d6f32a7..035f911623f4b 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/RandomSamplingLogger.java +++ b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/RandomSamplingLogger.java @@ -4,8 +4,8 @@ package io.airbyte.integrations.destination.e2e_test.logging; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.Random; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/TestingLoggerFactory.java b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/TestingLoggerFactory.java index f0a6969aa88f2..5c68bc347417c 100644 --- a/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/TestingLoggerFactory.java +++ b/airbyte-integrations/connectors/destination-e2e-test/src/main/java/io/airbyte/integrations/destination/e2e_test/logging/TestingLoggerFactory.java @@ -5,8 +5,8 @@ package io.airbyte.integrations.destination.e2e_test.logging; import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.e2e_test.logging.TestingLogger.LoggingType; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; public class TestingLoggerFactory { diff --git a/airbyte-integrations/connectors/destination-heap-analytics/.dockerignore b/airbyte-integrations/connectors/destination-heap-analytics/.dockerignore deleted file mode 100644 index 0db1b78f4b2cc..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/.dockerignore +++ /dev/null @@ -1,5 +0,0 @@ -* -!Dockerfile -!main.py -!destination_heap_analytics -!setup.py diff --git a/airbyte-integrations/connectors/destination-heap-analytics/Dockerfile b/airbyte-integrations/connectors/destination-heap-analytics/Dockerfile deleted file mode 100644 index 8e0bf46858495..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9.11-alpine3.15 as base - -# build and load all requirements -FROM base as builder -WORKDIR /airbyte/integration_code - -# upgrade pip to the latest version -RUN apk --no-cache upgrade \ - && pip install --upgrade pip \ - && apk --no-cache add tzdata build-base - - -COPY setup.py ./ -# install necessary packages to a temporary folder -RUN pip install --prefix=/install . - -# build a clean environment -FROM base -WORKDIR /airbyte/integration_code - -# copy all loaded and built libraries to a pure basic image -COPY --from=builder /install /usr/local -# add default timezone settings -COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime -RUN echo "Etc/UTC" > /etc/timezone - -# bash is installed for more convenient debugging. -RUN apk --no-cache add bash - -# copy payload code only -COPY main.py ./ -COPY destination_heap_analytics ./destination_heap_analytics - -ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" -ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] - -LABEL io.airbyte.version=0.1.0 -LABEL io.airbyte.name=airbyte/destination-heap-analytics diff --git a/airbyte-integrations/connectors/destination-heap-analytics/README.md b/airbyte-integrations/connectors/destination-heap-analytics/README.md deleted file mode 100644 index 8d380c441fe47..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/README.md +++ /dev/null @@ -1,180 +0,0 @@ -# Heap Analytics Destination - -This is the repository for the Heap Analytics destination connector, written in Python. -For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/destinations/heap-analytics). - -## Local development - -### Prerequisites -**To iterate on this connector, make sure to complete this prerequisites section.** - -#### Minimum Python version required `= 3.7.0` - -#### Build & Activate Virtual Environment and install dependencies - -From this connector directory, create a virtualenv: -``` -python -m venv .venv -``` - -This will generate a virtual environment for this module in `.venv/`. Make sure this venv is active in your -development environment of choice. To activate it from the terminal, run: -``` -source .venv/bin/activate -pip install -r requirements.txt -``` -If you are in an IDE, follow your IDE's instructions to activate the virtualenv. - -Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is -used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. -If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything -should work as you expect. - -#### Building via Gradle -From the Airbyte repository root, run: -``` -./gradlew :airbyte-integrations:connectors:destination-heap-analytics:build -``` - -#### Create credentials -**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/destinations/heap-analytics) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `destination_heap_analytics/spec.json` file. -Note that the `secrets` directory is gitignored by default, so there is no danger of accidentally checking in sensitive information. -See `integration_tests/sample_config.json` for a sample config file. - -**If you are an Airbyte core member**, copy the app id in Lastpass under the secret name `destination heap-analytics app id` and replace the app_id under the `sample_files/config-*.json` - -### Locally running the connector - -#### Server-Side API - Track - -Use [this API](https://developers.heap.io/reference/track-1) to send custom events to Heap server-side. - -```bash -python main.py spec -python main.py check --config sample_files/config-events.json -cat sample_files/messages.jsonl | python main.py write --config sample_files/config-events.json --catalog sample_files/configured_catalog.json -``` - -#### Server-Side API - Add User Properties - -[This API](https://developers.heap.io/reference/add-user-properties) allows you to attach custom properties to any identified users from your servers, such as Sign Up Date (in ISO8601 format), Total # Transactions Completed, or Total Dollars Spent. - -```bash -python main.py spec -python main.py check --config sample_files/config-aup.json -cat sample_files/messages.jsonl | python main.py write --config sample_files/config-aup.json --catalog sample_files/configured_catalog.json -``` - -#### Server-Side API - Add Account Properties - -[This API](https://developers.heap.io/reference/add-account-properties) allows you to attach custom account properties to users. An account ID or use of our Salesforce integration is required for this to work. - -```bash -python main.py spec -python main.py check --config sample_files/config-aap.json -cat sample_files/messages.jsonl | python main.py write --config sample_files/config-aap.json --catalog sample_files/configured_catalog.json -``` - -### Locally running the connector docker image - -#### Build - -First, make sure you build the latest Docker image: - -```bash -docker build . -t airbyte/destination-heap-analytics:dev -``` - -You can also build the connector image via Gradle: - -```bash -./gradlew :airbyte-integrations:connectors:destination-heap-analytics:airbyteDocker -``` - -When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in -the Dockerfile. - -#### Run - -Then run any of the connector commands as follows: -Spec command - -```bash -docker run --rm airbyte/destination-heap-analytics:dev spec -``` - -Check command - -```bash -docker run --rm -v $(pwd)/sample_files:/sample_files airbyte/destination-heap-analytics:dev check --config /sample_files/config-events.json -docker run --rm -v $(pwd)/sample_files:/sample_files airbyte/destination-heap-analytics:dev check --config /sample_files/config-aap.json -docker run --rm -v $(pwd)/sample_files:/sample_files airbyte/destination-heap-analytics:dev check --config /sample_files/config-aup.json -``` - -Write command -```bash -# messages.jsonl is a file containing line-separated JSON representing AirbyteMessages -cat sample_files/messages.jsonl | docker run --rm -v $(pwd)/sample_files:/sample_files airbyte/destination-heap-analytics:dev write --config /sample_files/config-events.json --catalog /sample_files/configured_catalog.json -cat sample_files/messages.jsonl | docker run --rm -v $(pwd)/sample_files:/sample_files airbyte/destination-heap-analytics:dev write --config /sample_files/config-aup.json --catalog /sample_files/configured_catalog.json -cat sample_files/messages.jsonl | docker run --rm -v $(pwd)/sample_files:/sample_files airbyte/destination-heap-analytics:dev write --config /sample_files/config-aap.json --catalog /sample_files/configured_catalog.json -``` - -## Testing - -Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. -First install test dependencies into your virtual environment: - -``` -pip install .[tests] -``` - -### Unit Tests -To run unit tests locally, from the connector directory run: - -``` -python -m pytest unit_tests -``` - -### Integration Tests - -There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all destination connectors) and custom integration tests (which are specific to this connector). - -#### Custom Integration tests - -Place custom tests inside `integration_tests/` folder, then, from the connector root, run - -```bash -python -m pytest integration_tests -``` - -### Using gradle to run tests - -All commands should be run from airbyte project root. -To run unit tests: - -```bash -./gradlew :airbyte-integrations:connectors:destination-heap-analytics:unitTest -``` - -To run acceptance and custom integration tests: -```bash -./gradlew :airbyte-integrations:connectors:destination-heap-analytics:integrationTest -``` - -## Dependency Management - -All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. -We split dependencies between two groups, dependencies that are: - -* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. -* required for the testing need to go to `TEST_REQUIREMENTS` list - -### Publishing a new version of the connector - -You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? -1. Make sure your changes are passing unit and integration tests. -2. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). -3. Create a Pull Request. -4. Pat yourself on the back for being an awesome contributor. -5. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-heap-analytics/bootstramp.md b/airbyte-integrations/connectors/destination-heap-analytics/bootstramp.md deleted file mode 100644 index 64abc30333b48..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/bootstramp.md +++ /dev/null @@ -1,101 +0,0 @@ -# Heap Analytics Destination - -[Heap](https://heap.io) is a product analytics tool that help you collect and analyze understand customers' behavior data in your web apps or mobile apps.Every single click, swipe, tag, pageview and fill will be tracked. It's also called [Auto Capture](https://heap.io/platform/autocapture) - -Other than that, developers can write codes to "manually" track an event -- using a JavaScript SDK or a http request. Today, there is a 3rd way, you can import a large set of data via the open source E(t)L platform -- Airbyte. - -## Support any types of data source - -Airbyte loads data to heap through the [server-side API](https://developers.heap.io/reference/server-side-apis-overview). As long as the data is transformed correctly, and the output includes all required properties, data will be successfully loaded. The api is always on! - -All types of data source are supported, but you have to specify where the required properties are extracted from. - -Let's use [track events](https://developers.heap.io/reference/track-1) as an example. -The following sample data is an user fetched [Auth0's API](https://auth0.com/docs/api/management/v2#!/Users/get_users). - -```json -[{ - "blocked": false, - "created_at": "2022-10-21T04:09:54.622Z", - "email": "evalyn_shields@hotmail.com", - "email_verified": false, - "family_name": "Brakus", - "given_name": "Camden", - "identities": { - "user_id": "0a12757f-4b19-4e93-969e-c3a2e98fe82b", - "connection": "Username-Password-Authentication", - "provider": "auth0", - "isSocial": false - }, - "name": "Jordan Yost", - "nickname": "Elroy", - "updated_at": "2022-10-21T04:09:54.622Z", - "user_id": "auth0|0a12757f-4b19-4e93-969e-c3a2e98fe82b" -}] -``` - -According to [the track API](https://developers.heap.io/reference/track-1), the following attributes are required in the request body. - -- app_id: The id of your project or app -- identity: An identity, typically corresponding to an existing user. -- event: The name of the server-side event. -- properties: An object with key-value properties you want associated with the event. -- timestamp: (optional), the datetime in ISO8601. e.g. "2017-03-10T22:21:56+00:00". Defaults to the current time if not provided. - -To transform the data, you need to configure the following 4 fields when you create the connector: - -- Identity Column: The attribute name from the source data populated to identity. -- event_column: The attribute name from the source data populated to event. -- Timestamp Column: The attribute name from the source data populated to timestamp. This field is optional. It will be the current time if not provided. -- Property Columns: The attribute names from the source data populated to object properties. If you want to pick multiple attributes, split the names by comma(`,`). If you want to pick ALL attributes, simply put asterisk(`*`). - -So, if you want to load the following data: - -```json -{ - "identity": "evalyn_shields@hotmail.com", - "event": "Username-Password-Authentication", - "timestamp": "2022-10-21T04:09:54.622Z", - "properties": { - "blocked": false, - "created_at": "2022-10-21T04:09:54.622Z", - "name": "Jordan Yost" - } -} -``` - -Here's how you may configure the connector: - -```json -{ - "app_id": "11", - "base_url": "https://heapanalytics.com", - "api": { - "api_type": "track", - "property_columns": "blocked,created_at,name", - "event_column": "identities.connection", - "identity_column": "email", - "timestamp_column": "updated_at" - } -} -``` - -Notice, the event property comes from a property `connection` embedded in an object `identities`, that's why you set `event_column` `identities.connection`. It's called dot notation -- write the name of the object, followed by a dot (.), followed by the name of the property. - -Similarly, if you want to load a user or an account, there are other set of required properties. To learn more, please refer to the [ReadMe.md](/docs/integrations/destinations/heap-analytics.md). - -## Liminations - -Though The destination connector supports a generic schema. There are a few limitations. - -### Performance - -Heap offers a bulk api that allows you to load multiple rows of data. However, it's not implemented in the first version. So every row is a http post request to Heap, it's not efficient. Please submit your request and we will enhance it for you. - -### Only one schema is supported in a connector - -Because the configuration of the destination connector includes the details of the transformation, a connector only works for one schema. For example, there are 4 tables in a postgres database -- products, orders, users, logs. If you want to import all tables to heap, you may create 4 different connectors. Each connector includes a transformation setting suitable for the corresponding table schema. - -### Unable to join 2 streams - -If you understand the section above, you may realize there's no way to merge data from 2 streams. Still the postgres example above, the table `products` contains the details(also called metadata) for a given product id. The table `orders` users product id as a foreign key to reference the table `products`. In a SQL console, You can use an `inner join` to combine these 2 table. However, the destination connector is unable to merge them for you. Instead, you may pre-process the data by creating a view in postgres first, and configure Airbyte to load the view, the view that joins these 2 tables. diff --git a/airbyte-integrations/connectors/destination-heap-analytics/build.gradle b/airbyte-integrations/connectors/destination-heap-analytics/build.gradle deleted file mode 100644 index 4eb911066d2ed..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/build.gradle +++ /dev/null @@ -1,8 +0,0 @@ -plugins { - id 'airbyte-python' - id 'airbyte-docker' -} - -airbytePython { - moduleDirectory 'destination_heap_analytics' -} diff --git a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/__init__.py b/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/__init__.py deleted file mode 100644 index 5eab928daf60d..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from .destination import DestinationHeapAnalytics - -__all__ = ["DestinationHeapAnalytics"] diff --git a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/client.py b/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/client.py deleted file mode 100644 index f8fcfe3de1d38..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/client.py +++ /dev/null @@ -1,51 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import logging -from typing import Any, Mapping -from urllib import parse - -import pendulum -import requests -from destination_heap_analytics.utils import datetime_to_string - -HEADERS = {"Content_Type": "application/json"} - -logger = logging.getLogger("airbyte") - - -class HeapClient: - api_type = "" - api_endpoint = "" - check_endpoint = "" - - def __init__(self, base_url: str, app_id: str, api: Mapping[str, str]): - self.api_type = api.get("api_type") - self.app_id = app_id - self.api_endpoint = parse.urljoin(base_url, f"api/{self.api_type}") - self.check_endpoint = parse.urljoin(base_url, "api/track") - - def check(self): - """ - send a payload to the track endpoint - """ - return self._request( - url=self.check_endpoint, - json={ - "identity": "admin@heap.io", - "idempotency_key": "airbyte-preflight-check", - "event": "Airbyte Preflight Check", - "timestamp": datetime_to_string(pendulum.now("UTC")), - }, - ) - - def write(self, json: Mapping[str, Any]): - return self._request(url=self.api_endpoint, json=json) - - def _request(self, url: str, json: Mapping[str, Any] = {}) -> requests.Response: - logger.debug(json) - response = requests.post(url=url, headers=HEADERS, json={"app_id": self.app_id, **(json or {})}) - logger.debug(response.status_code) - response.raise_for_status() - return response diff --git a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/destination.py b/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/destination.py deleted file mode 100644 index 93987c3ee605f..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/destination.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import logging -from typing import Any, Dict, Iterable, Mapping - -from airbyte_cdk import AirbyteLogger -from airbyte_cdk.destinations import Destination -from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteLogMessage, AirbyteMessage, ConfiguredAirbyteCatalog, Level, Status, Type -from destination_heap_analytics.client import HeapClient -from destination_heap_analytics.utils import flatten_json, parse_aap_json, parse_aup_json, parse_event_json -from requests import HTTPError - -logger = logging.getLogger("airbyte") - - -class DestinationHeapAnalytics(Destination): - def parse_and_validate_json(self, data: Dict[str, any], api: Mapping[str, str]): - flatten = flatten_json(data) - api_type = api.get("api_type") - if api_type == "track": - return parse_event_json(data=flatten, **api) - elif api_type == "add_user_properties": - return parse_aup_json(data=flatten, **api) - elif api_type == "add_account_properties": - return parse_aap_json(data=flatten, **api) - else: - return None - - def write( - self, config: Mapping[str, Any], configured_catalog: ConfiguredAirbyteCatalog, input_messages: Iterable[AirbyteMessage] - ) -> Iterable[AirbyteMessage]: - messages_count = 0 - records_count = 0 - loaded_count = 0 - api = config.get("api") - api["property_columns"] = api.get("property_columns").split(",") - client = HeapClient(**config) - for message in input_messages: - messages_count = messages_count + 1 - if message.type == Type.STATE: - yield message - elif message.type == Type.RECORD: - record = message.record - data = record.data - records_count = records_count + 1 - validated = self.parse_and_validate_json(data=data, api=api) - if validated: - try: - client.write(validated) - loaded_count = loaded_count + 1 - except HTTPError as ex: - logger.warn(f"experienced an error at the {records_count}th row, error: {ex}") - else: - logger.warn(f"data is invalid, skip writing the {records_count}th row") - else: - yield message - resultMessage = AirbyteMessage( - type=Type.LOG, - log=AirbyteLogMessage( - level=Level.INFO, message=f"Total Messages: {messages_count}. Total Records: {records_count}. Total loaded: {loaded_count}." - ), - ) - yield resultMessage - - def check(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: - try: - client = HeapClient(**config) - logger.info(f"Checking connection for app_id: {client.app_id}, api_endpoint: {client.api_endpoint}") - client.check() - except Exception as e: - return AirbyteConnectionStatus(status=Status.FAILED, message=f"An exception occurred: {repr(e)}") - else: - return AirbyteConnectionStatus(status=Status.SUCCEEDED) diff --git a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/spec.json b/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/spec.json deleted file mode 100644 index 55a2b5fad1448..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/spec.json +++ /dev/null @@ -1,144 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/destinations/heap-analytics", - "supported_destination_sync_modes": ["append", "append_dedup"], - "supportsIncremental": true, - "supportsDBT": false, - "supportsNormalization": false, - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Heap Analytics Destination Spec", - "type": "object", - "required": ["base_url", "app_id", "api"], - "additionalProperties": true, - "properties": { - "app_id": { - "order": 0, - "type": "string", - "title": "App Id", - "description": "The Environment Id of your Main Profudction project, read the doc to learn more.", - "default": "11" - }, - "base_url": { - "order": 1, - "type": "string", - "title": "Base URL", - "description": "The Base URL for Heap Analytics", - "default": "https://heapanalytics.com", - "examples": ["https://heapanalytics.com"] - }, - "api": { - "order": 2, - "type": "object", - "title": "API Type", - "additionalProperties": true, - "oneOf": [ - { - "order": 0, - "type": "object", - "title": "Track Events", - "required": [ - "api_type", - "property_columns", - "event_column", - "identity_column" - ], - "properties": { - "api_type": { - "order": 0, - "type": "string", - "const": "track" - }, - "property_columns": { - "order": 1, - "type": "string", - "title": "Property Columns", - "default": "*", - "description": "Please list all columns populated to the properties attribute, split by comma(,). It's case sensitive.", - "examples": ["subject,variation"] - }, - "event_column": { - "order": 2, - "type": "string", - "title": "Event Column", - "description": "Please pick the column populated to the event attribute. It's case sensitive.", - "examples": ["order_name"] - }, - "identity_column": { - "order": 3, - "type": "string", - "title": "Identity Column", - "description": "Please pick the column populated to the identity attribute.", - "examples": ["email"] - }, - "timestamp_column": { - "order": 4, - "type": "string", - "title": "Identity Column", - "description": "Please pick the column populated to the (optional) timestamp attribute. time_now() will be used if missing.", - "examples": ["updated_at"] - } - } - }, - { - "order": 1, - "type": "object", - "title": "Add User Properties", - "required": ["api_type", "property_columns", "identity_column"], - "properties": { - "api_type": { - "order": 0, - "type": "string", - "const": "add_user_properties" - }, - "property_columns": { - "order": 1, - "type": "string", - "title": "Property Columns", - "default": "*", - "description": "Please list all columns populated to the properties attribute, split by comma(,). It's case sensitive.", - "examples": ["age,language,profession"] - }, - "identity_column": { - "order": 3, - "type": "string", - "title": "Identity Column", - "description": "Please pick the column populated to the identity attribute.", - "examples": ["user_id"] - } - } - }, - { - "order": 2, - "type": "object", - "title": "Add Account Properties", - "required": ["api_type", "property_columns", "account_id_column"], - "properties": { - "api_type": { - "order": 0, - "type": "string", - "const": "add_account_properties" - }, - "property_columns": { - "order": 1, - "type": "string", - "title": "Property Columns", - "default": "*", - "description": "Please list all columns populated to the properties attribute, split by comma(,). It's case sensitive.", - "examples": [ - "is_in_good_standing,revenue_potential,account_hq,subscription" - ] - }, - "account_id_column": { - "order": 3, - "type": "string", - "title": "Account ID Column", - "description": "Please pick the column populated to the account_id attribute.", - "examples": ["company_name"] - } - } - } - ] - } - } - } -} diff --git a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/utils.py b/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/utils.py deleted file mode 100644 index 9d3a76165d697..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/destination_heap_analytics/utils.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import datetime -from typing import Any, Dict, List, Mapping - -import pendulum - - -def datetime_to_string(date: datetime.datetime) -> str: - return date.to_iso8601_string() - - -def flatten_json(obj: Dict[str, Any]) -> Dict[str, Any]: - out = {} - - def flatten(x: Dict[str, Any], prefix=""): - if type(x) is dict: - for a in x: - flatten(x[a], prefix + a + ".") - elif type(x) is list: - i = 0 - for a in x: - flatten(a, prefix + str(i) + ".") - i += 1 - else: - out[prefix[:-1]] = x - - flatten(obj) - return out - - -def parse_property_json(data: Dict[str, any], property_columns: List[str]) -> Mapping[str, Any]: - if len(property_columns) == 1 and property_columns[0] == "*": - return {**(data or {})} - else: - properties = {} - for column in property_columns: - if column in data and data[column] is not None: - properties[column] = data[column] - return properties - - -def parse_event_json( - data: Dict[str, any], property_columns: List[str], event_column: str, identity_column: str, timestamp_column: str = None, **kwargs -) -> Mapping[str, Any]: - timestamp = data.get(timestamp_column) if data.get(timestamp_column) else datetime_to_string(pendulum.now("UTC")) - event = data.get(event_column) - identity = data.get(identity_column) - if timestamp and event and identity: - properties = parse_property_json(data=data, property_columns=property_columns) - return { - "identity": identity, - "event": event, - "timestamp": timestamp, - "properties": properties, - } - else: - return None - - -def parse_aup_json(data: Dict[str, any], property_columns: List[str], identity_column: str, **kwargs) -> Mapping[str, Any]: - identity = data.get(identity_column) - if identity: - properties = parse_property_json(data=data, property_columns=property_columns) - return { - "identity": identity, - "properties": properties, - } - else: - return None - - -def parse_aap_json(data: Dict[str, any], property_columns: List[str], account_id_column: str, **kwargs) -> Mapping[str, Any]: - account_id = data.get(account_id_column) - if account_id: - properties = parse_property_json(data=data, property_columns=property_columns) - return { - "account_id": account_id, - "properties": properties, - } - else: - return None diff --git a/airbyte-integrations/connectors/destination-heap-analytics/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-heap-analytics/integration_tests/integration_test.py deleted file mode 100644 index 407058071aa3c..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/integration_tests/integration_test.py +++ /dev/null @@ -1,149 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import sys -from datetime import datetime -from io import StringIO -from json import load -from typing import Any, Dict, List -from unittest.mock import MagicMock - -from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStateMessage, AirbyteStateType, Level, Status, Type -from airbyte_cdk.models.airbyte_protocol import ConfiguredAirbyteCatalog -from destination_heap_analytics.destination import DestinationHeapAnalytics -from pytest import fixture - - -class CaptureStdOut(list): - """ - Captures the stdout messages into the variable list, that could be validated later. - """ - - def __enter__(self): - self._stdout = sys.stdout - sys.stdout = self._stringio = StringIO() - return self - - def __exit__(self, *args): - self.extend(self._stringio.getvalue().splitlines()) - del self._stringio - sys.stdout = self._stdout - - -@fixture(scope="module") -def config_events() -> Dict[str, str]: - with open( - "sample_files/config-events.json", - ) as f: - yield load(f) - - -@fixture(scope="module") -def configured_catalog() -> Dict[str, str]: - with open( - "sample_files/configured_catalog.json", - ) as f: - yield load(f) - - -@fixture(scope="module") -def config_aap() -> Dict[str, str]: - with open( - "sample_files/config-aap.json", - ) as f: - yield load(f) - - -@fixture(scope="module") -def config_aup() -> Dict[str, str]: - with open( - "sample_files/config-aup.json", - ) as f: - yield load(f) - - -@fixture(scope="module") -def invalid_config() -> Dict[str, str]: - with open( - "integration_tests/invalid_config.json", - ) as f: - yield load(f) - - -@fixture -def airbyte_state_message(): - return AirbyteMessage( - type=Type.STATE, - state=AirbyteStateMessage( - type=AirbyteStateType.STREAM, - data={}, - ), - ) - - -@fixture -def airbyte_messages(airbyte_state_message): - return [ - airbyte_state_message, - AirbyteMessage( - type=Type.RECORD, - record=AirbyteRecordMessage( - stream="users", - data={ - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "email_verified": False, - "family_name": "Blanda", - "given_name": "Bradly", - "identities": { - "user_id": "4ce74b28-bc00-4bbf-8a01-712dae975291", - "connection": "Username-Password-Authentication", - "provider": "auth0", - "isSocial": False, - }, - "name": "Hope Rodriguez", - "nickname": "Terrence", - "updated_at": "2022-10-21T04:08:58.994Z", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - }, - emitted_at=int(datetime.now().timestamp()) * 1000, - ), - ), - airbyte_state_message, - ] - - -def test_check_fails(invalid_config): - destination = DestinationHeapAnalytics() - status = destination.check(logger=MagicMock(), config=invalid_config) - assert status.status == Status.FAILED - - -def test_check_succeeds(config_events, config_aap, config_aup): - destination = DestinationHeapAnalytics() - for config in [config_events, config_aap, config_aup]: - status = destination.check(logger=MagicMock(), config=config) - assert status.status == Status.SUCCEEDED - - -def test_write( - config_events: Dict[str, Any], - config_aap: Dict[str, Any], - config_aup: Dict[str, Any], - configured_catalog: ConfiguredAirbyteCatalog, - airbyte_messages: List[AirbyteMessage], - airbyte_state_message: AirbyteStateMessage, -): - destination = DestinationHeapAnalytics() - - for config in [config_events, config_aap, config_aup]: - generator = destination.write(config, configured_catalog, airbyte_messages) - result = list(generator) - assert len(result) == 3 - assert result[0] == airbyte_state_message - assert result[1] == airbyte_state_message - assert result[2].type == Type.LOG - assert result[2].log.level == Level.INFO - assert result[2].log.message == "Total Messages: 3. Total Records: 1. Total loaded: 1." diff --git a/airbyte-integrations/connectors/destination-heap-analytics/integration_tests/invalid_config.json b/airbyte-integrations/connectors/destination-heap-analytics/integration_tests/invalid_config.json deleted file mode 100644 index 5ad762a6beb15..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/integration_tests/invalid_config.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "app_id": "11", - "base_url": "https://www.heapanalytics.com", - "api": { - "api_type": "track", - "property_columns": "*", - "event_column": "event", - "identity_column": "email" - } -} diff --git a/airbyte-integrations/connectors/destination-heap-analytics/main.py b/airbyte-integrations/connectors/destination-heap-analytics/main.py deleted file mode 100644 index cf506f8e77383..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/main.py +++ /dev/null @@ -1,11 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import sys - -from destination_heap_analytics import DestinationHeapAnalytics - -if __name__ == "__main__": - DestinationHeapAnalytics().run(sys.argv[1:]) diff --git a/airbyte-integrations/connectors/destination-heap-analytics/requirements.txt b/airbyte-integrations/connectors/destination-heap-analytics/requirements.txt deleted file mode 100644 index d6e1198b1ab1f..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-aap.json b/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-aap.json deleted file mode 100644 index ae6f0d1ad5bd8..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-aap.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "app_id": "11", - "base_url": "https://heapanalytics.com", - "api": { - "api_type": "add_account_properties", - "property_columns": "family_name,email_verified,blocked", - "account_id_column": "identities.user_id" - } -} diff --git a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-aup.json b/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-aup.json deleted file mode 100644 index 6ad9ad0b0335c..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-aup.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "app_id": "11", - "base_url": "https://heapanalytics.com", - "api": { - "api_type": "add_user_properties", - "property_columns": "identities_connection,identities_provider,created_at,updated_at,name", - "identity_column": "user_id" - } -} diff --git a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-events.json b/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-events.json deleted file mode 100644 index b2e9e87fc233f..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/config-events.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "app_id": "11", - "base_url": "https://heapanalytics.com", - "api": { - "api_type": "track", - "property_columns": "blocked,created_at,updated_at,name", - "event_column": "identities.connection", - "identity_column": "email", - "timestamp_column": "updated_at" - } -} diff --git a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/configured_catalog.json b/airbyte-integrations/connectors/destination-heap-analytics/sample_files/configured_catalog.json deleted file mode 100644 index cac1ca9af2e33..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/configured_catalog.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "users", - "json_schema": {}, - "supported_sync_modes": ["full_refresh", "incremental"] - }, - "sync_mode": "incremental", - "destination_sync_mode": "overwrite", - "cursor_field": ["updated_at"], - "primary_key": [["user_id"]] - } - ] -} diff --git a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/messages.jsonl b/airbyte-integrations/connectors/destination-heap-analytics/sample_files/messages.jsonl deleted file mode 100644 index c89edac727f8a..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/sample_files/messages.jsonl +++ /dev/null @@ -1,50 +0,0 @@ -{"type": "RECORD", "record": {"stream": "users", "data": {"created_at": "2022-10-19T21:44:49.226Z", "email": "abcd@email.com", "email_verified": false, "identities": {"connection": "Username-Password-Authentication", "user_id": "63506fd15615e6a1bdb54ebb", "provider": "auth0", "isSocial": false}, "name": "abcd@email.com", "nickname": "abcd", "picture": "https://s.gravatar.com/avatar/0c50c2e0d77c79a9852e31e715038a03?s=480&r=pg&d=https%3A%2F%2Fcdn.auth0.com%2Favatars%2Fab.png", "updated_at": "2022-10-19T21:44:49.226Z", "user_id": "auth0|63506fd15615e6a1bdb54ebb"}, "emitted_at": 1666743597616}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:08:53.393Z", "email": "nedra14@hotmail.com", "email_verified": false, "family_name": "Tillman", "given_name": "Jacinto", "identities": {"user_id": "815ff3c3-84fa-4f63-b959-ac2d11efc63c", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Lola Conn", "nickname": "Kenyatta", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:08:53.393Z", "user_id": "auth0|815ff3c3-84fa-4f63-b959-ac2d11efc63c", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597620}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:08:56.729Z", "email": "myrtice.maggio@yahoo.com", "email_verified": false, "family_name": "Thompson", "given_name": "Greg", "identities": {"user_id": "d9b32ba6-f330-4d31-a062-21edc7dcd47b", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Marilyn Goldner", "nickname": "Alysa", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:08:56.729Z", "user_id": "auth0|d9b32ba6-f330-4d31-a062-21edc7dcd47b", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597621}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:08:57.575Z", "email": "jace2@gmail.com", "email_verified": false, "family_name": "Bahringer", "given_name": "Carey", "identities": {"user_id": "69cccde7-2ec8-4206-9c60-37cfbbf76b89", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Dr. Jay Donnelly", "nickname": "Hiram", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:08:57.575Z", "user_id": "auth0|69cccde7-2ec8-4206-9c60-37cfbbf76b89", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597621}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:08:58.333Z", "email": "thelma.rohan@yahoo.com", "email_verified": false, "family_name": "Sauer", "given_name": "Estel", "identities": {"user_id": "3b0e855c-3ca7-4ef0-ba04-1ad03e9925f3", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Garry Rolfson", "nickname": "Celestine", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:08:58.333Z", "user_id": "auth0|3b0e855c-3ca7-4ef0-ba04-1ad03e9925f3", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597621}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:08:58.994Z", "email": "beryl_becker95@yahoo.com", "email_verified": false, "family_name": "Blanda", "given_name": "Bradly", "identities": {"user_id": "4ce74b28-bc00-4bbf-8a01-712dae975291", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Hope Rodriguez", "nickname": "Terrence", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:08:58.994Z", "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597621}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:08:59.720Z", "email": "rubye_spinka86@yahoo.com", "email_verified": false, "family_name": "Purdy", "given_name": "Florida", "identities": {"user_id": "98831d6c-3cd6-4594-9245-b103ca89cace", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Felipe Corwin PhD", "nickname": "Lilyan", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:08:59.720Z", "user_id": "auth0|98831d6c-3cd6-4594-9245-b103ca89cace", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597621}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:08.993Z", "email": "daniella.ondricka67@yahoo.com", "email_verified": false, "family_name": "Grimes", "given_name": "Ladarius", "identities": {"user_id": "78fefc1c-9971-4f83-8199-fea13d77dd77", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Pam Carroll", "nickname": "Jabari", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:08.993Z", "user_id": "auth0|78fefc1c-9971-4f83-8199-fea13d77dd77", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597622}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:10.060Z", "email": "neva62@gmail.com", "email_verified": false, "family_name": "Nolan", "given_name": "Garnett", "identities": {"user_id": "bc0fd79d-e3a9-4204-8ab5-9983e40fd126", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Kelvin Goldner", "nickname": "Alexandrea", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:10.060Z", "user_id": "auth0|bc0fd79d-e3a9-4204-8ab5-9983e40fd126", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597622}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:13.399Z", "email": "brycen60@hotmail.com", "email_verified": false, "family_name": "Weimann", "given_name": "Marcella", "identities": {"user_id": "af1fc04e-ff8c-4ed3-9aca-54f0dfd6fd44", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Olivia Rice", "nickname": "Cortney", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:13.399Z", "user_id": "auth0|af1fc04e-ff8c-4ed3-9aca-54f0dfd6fd44", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597622}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:14.323Z", "email": "pierce43@yahoo.com", "email_verified": false, "family_name": "Vandervort", "given_name": "Hilbert", "identities": {"user_id": "702b3afc-d551-4c90-81bd-f792bae32b3b", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Flora Parisian", "nickname": "Aglae", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:14.323Z", "user_id": "auth0|702b3afc-d551-4c90-81bd-f792bae32b3b", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597622}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:15.072Z", "email": "rosemary.kautzer@hotmail.com", "email_verified": false, "family_name": "Robel", "given_name": "Coty", "identities": {"user_id": "24e149ff-04f5-457a-9936-64e8a6cb6d06", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Laurie Metz", "nickname": "Harrison", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:15.072Z", "user_id": "auth0|24e149ff-04f5-457a-9936-64e8a6cb6d06", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597622}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:51.891Z", "email": "otho.ward@hotmail.com", "email_verified": false, "family_name": "Funk", "given_name": "Hazle", "identities": {"user_id": "73da3042-0713-423a-bd3c-a45838269230", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Nora Kerluke", "nickname": "Herminio", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:51.891Z", "user_id": "auth0|73da3042-0713-423a-bd3c-a45838269230", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597623}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:54.068Z", "email": "jamel15@yahoo.com", "email_verified": false, "family_name": "Kunze", "given_name": "Maria", "identities": {"user_id": "c7723c91-9d12-41c2-a539-a0908d46092f", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Nichole Von", "nickname": "Mikayla", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:54.068Z", "user_id": "auth0|c7723c91-9d12-41c2-a539-a0908d46092f", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597623}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:54.622Z", "email": "evalyn_shields@hotmail.com", "email_verified": false, "family_name": "Brakus", "given_name": "Camden", "identities": {"user_id": "0a12757f-4b19-4e93-969e-c3a2e98fe82b", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Jordan Yost", "nickname": "Elroy", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:54.622Z", "user_id": "auth0|0a12757f-4b19-4e93-969e-c3a2e98fe82b", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597623}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:55.448Z", "email": "shayna74@gmail.com", "email_verified": false, "family_name": "Klocko", "given_name": "Bulah", "identities": {"user_id": "88abf12b-8a2b-473d-a735-4ca07353378e", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Ms. Marsha Kiehn", "nickname": "Garret", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:55.448Z", "user_id": "auth0|88abf12b-8a2b-473d-a735-4ca07353378e", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597624}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:56.062Z", "email": "alexandrea23@yahoo.com", "email_verified": false, "family_name": "Wehner", "given_name": "Carmine", "identities": {"user_id": "681d25e1-92b9-4997-a1ed-058c71089b03", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Erika Konopelski", "nickname": "Sunny", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:56.062Z", "user_id": "auth0|681d25e1-92b9-4997-a1ed-058c71089b03", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597624}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:56.711Z", "email": "zita.hoeger@hotmail.com", "email_verified": false, "family_name": "Simonis", "given_name": "Estel", "identities": {"user_id": "9c9c0239-a4de-42ee-8169-4fd13db69266", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Ginger Kiehn", "nickname": "Prudence", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:56.711Z", "user_id": "auth0|9c9c0239-a4de-42ee-8169-4fd13db69266", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597624}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:57.377Z", "email": "barrett.collins@gmail.com", "email_verified": false, "family_name": "Carter", "given_name": "Mabelle", "identities": {"user_id": "395305e9-08ce-465f-844b-f968a33bdaa3", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Genevieve Dietrich", "nickname": "Xavier", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:57.377Z", "user_id": "auth0|395305e9-08ce-465f-844b-f968a33bdaa3", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597624}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:57.818Z", "email": "marlen42@yahoo.com", "email_verified": false, "family_name": "Mante", "given_name": "Destini", "identities": {"user_id": "455f21be-922d-4a0f-be9b-5c578358ef59", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Jeanne O'Connell II", "nickname": "Cheyanne", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:57.818Z", "user_id": "auth0|455f21be-922d-4a0f-be9b-5c578358ef59", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597624}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:58.402Z", "email": "glennie_runolfsson1@hotmail.com", "email_verified": false, "family_name": "Muller", "given_name": "Gideon", "identities": {"user_id": "7d3cbf2a-cf1b-406b-b6dc-9c0c46365ef4", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Stewart Schumm", "nickname": "Esmeralda", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:58.402Z", "user_id": "auth0|7d3cbf2a-cf1b-406b-b6dc-9c0c46365ef4", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597625}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:58.874Z", "email": "jany93@gmail.com", "email_verified": false, "family_name": "Donnelly", "given_name": "Kennedi", "identities": {"user_id": "9a35cf40-1a2e-4bf2-bfdf-30c7a7db9039", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Darla Schneider", "nickname": "Olen", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:58.874Z", "user_id": "auth0|9a35cf40-1a2e-4bf2-bfdf-30c7a7db9039", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597625}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:09:59.328Z", "email": "marielle.murazik8@hotmail.com", "email_verified": false, "family_name": "Gutkowski", "given_name": "Alysha", "identities": {"user_id": "26d8952b-2e1e-4b79-b2aa-e363f062701a", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Lynn Crooks", "nickname": "Noe", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:09:59.328Z", "user_id": "auth0|26d8952b-2e1e-4b79-b2aa-e363f062701a", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597625}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:00.175Z", "email": "vergie17@hotmail.com", "email_verified": false, "family_name": "Jones", "given_name": "Gail", "identities": {"user_id": "1110bc0d-d59e-409b-b84a-448dc6c7d6bb", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Edith Pagac", "nickname": "Ignacio", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:00.175Z", "user_id": "auth0|1110bc0d-d59e-409b-b84a-448dc6c7d6bb", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597625}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:01.180Z", "email": "lulu_ullrich@hotmail.com", "email_verified": false, "family_name": "Lesch", "given_name": "Dejon", "identities": {"user_id": "24d08805-c399-431d-a54c-416f6416e341", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Emanuel Hilpert", "nickname": "Kraig", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:01.180Z", "user_id": "auth0|24d08805-c399-431d-a54c-416f6416e341", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597625}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:01.889Z", "email": "lew.hudson76@hotmail.com", "email_verified": false, "family_name": "Roberts", "given_name": "Jackie", "identities": {"user_id": "42797566-e687-4dfc-b5c5-da5e246fcea7", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Francis Hammes V", "nickname": "Irwin", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:01.889Z", "user_id": "auth0|42797566-e687-4dfc-b5c5-da5e246fcea7", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597626}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:03.177Z", "email": "kelli.abbott86@yahoo.com", "email_verified": false, "family_name": "Crooks", "given_name": "Bessie", "identities": {"user_id": "fc9fcb3d-8b1d-496a-9461-e9c9d549601b", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Miss Jessie Pfannerstill", "nickname": "Alvah", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:03.177Z", "user_id": "auth0|fc9fcb3d-8b1d-496a-9461-e9c9d549601b", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597626}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:04.059Z", "email": "kenna_champlin@yahoo.com", "email_verified": false, "family_name": "Torp", "given_name": "Bill", "identities": {"user_id": "8ef15327-eecb-48da-b167-3ef38f7dfdba", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Angel Koepp", "nickname": "Jaron", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:04.059Z", "user_id": "auth0|8ef15327-eecb-48da-b167-3ef38f7dfdba", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597626}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:04.795Z", "email": "merl.harvey33@yahoo.com", "email_verified": false, "family_name": "Muller", "given_name": "Emelia", "identities": {"user_id": "c5819fd9-24c0-4599-9bd3-63e3fbca74a6", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Mrs. Tiffany Carroll", "nickname": "Kamron", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:04.795Z", "user_id": "auth0|c5819fd9-24c0-4599-9bd3-63e3fbca74a6", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597626}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:05.479Z", "email": "berneice48@hotmail.com", "email_verified": false, "family_name": "Heller", "given_name": "Hortense", "identities": {"user_id": "1be8c604-5cca-4c91-9f3d-efca5ca38485", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Betty Powlowski", "nickname": "Sandra", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:05.479Z", "user_id": "auth0|1be8c604-5cca-4c91-9f3d-efca5ca38485", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597626}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:06.068Z", "email": "ethyl_hoppe77@yahoo.com", "email_verified": false, "family_name": "Medhurst", "given_name": "Kaelyn", "identities": {"user_id": "ef66586b-43bb-4b75-840d-0619c9e847bd", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Ismael Jast", "nickname": "Hortense", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:06.068Z", "user_id": "auth0|ef66586b-43bb-4b75-840d-0619c9e847bd", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597626}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:06.716Z", "email": "marilyne88@hotmail.com", "email_verified": false, "family_name": "Rolfson", "given_name": "Frederic", "identities": {"user_id": "70072c5b-d0d4-4603-8b67-ec7f5fe84a50", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Noel Hagenes", "nickname": "Cooper", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:06.716Z", "user_id": "auth0|70072c5b-d0d4-4603-8b67-ec7f5fe84a50", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597627}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:07.327Z", "email": "margie.legros10@yahoo.com", "email_verified": false, "family_name": "Greenfelder", "given_name": "Ricky", "identities": {"user_id": "bef87c6b-ebbd-4963-8039-68768214b0ba", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Paulette Leannon", "nickname": "Destiny", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:07.327Z", "user_id": "auth0|bef87c6b-ebbd-4963-8039-68768214b0ba", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597627}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:07.902Z", "email": "einar_graham@hotmail.com", "email_verified": false, "family_name": "Weissnat", "given_name": "Jessyca", "identities": {"user_id": "f542feea-6a8e-4718-8486-bd42cfbd263e", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Samantha Ortiz", "nickname": "Ryann", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:07.902Z", "user_id": "auth0|f542feea-6a8e-4718-8486-bd42cfbd263e", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597627}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:08.453Z", "email": "katrina23@hotmail.com", "email_verified": false, "family_name": "Hauck", "given_name": "Santos", "identities": {"user_id": "ca975aaa-6840-4f1a-8a6a-5bab7b6f7ddd", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Cecelia Runolfsdottir", "nickname": "Harley", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:08.453Z", "user_id": "auth0|ca975aaa-6840-4f1a-8a6a-5bab7b6f7ddd", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597627}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:09.028Z", "email": "natalia.moore57@gmail.com", "email_verified": false, "family_name": "Walker", "given_name": "Wyman", "identities": {"user_id": "c60e0dc6-844c-444a-9642-7463e6503584", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Lyle Pouros", "nickname": "Michaela", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:09.028Z", "user_id": "auth0|c60e0dc6-844c-444a-9642-7463e6503584", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597628}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:21.677Z", "email": "libbie.grant83@hotmail.com", "email_verified": false, "family_name": "Daniel", "given_name": "Bradford", "identities": {"user_id": "8bf58e36-ca07-4c50-8906-7d329ae4bff8", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Cory Brekke", "nickname": "Jeanne", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:21.677Z", "user_id": "auth0|8bf58e36-ca07-4c50-8906-7d329ae4bff8", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597628}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:23.060Z", "email": "easter11@gmail.com", "email_verified": false, "family_name": "Heaney", "given_name": "Cassidy", "identities": {"user_id": "7b9ab6fe-f1a4-45fe-a0a7-6752f968e4d1", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Dan Hudson", "nickname": "Florine", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:23.060Z", "user_id": "auth0|7b9ab6fe-f1a4-45fe-a0a7-6752f968e4d1", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597628}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:23.807Z", "email": "felicity.johnson89@hotmail.com", "email_verified": false, "family_name": "Waters", "given_name": "Isaiah", "identities": {"user_id": "a251175c-a56a-447e-bcff-c3fb50b7f718", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Donnie Klein", "nickname": "Daphney", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:23.807Z", "user_id": "auth0|a251175c-a56a-447e-bcff-c3fb50b7f718", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597628}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:24.491Z", "email": "alexandra18@gmail.com", "email_verified": false, "family_name": "Simonis", "given_name": "Jaeden", "identities": {"user_id": "d5994287-2f57-4b63-8808-2b001fb1ad4a", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Leticia Spencer", "nickname": "Gabrielle", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:24.491Z", "user_id": "auth0|d5994287-2f57-4b63-8808-2b001fb1ad4a", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597628}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:25.112Z", "email": "obie79@gmail.com", "email_verified": false, "family_name": "Skiles", "given_name": "Ernestina", "identities": {"user_id": "8cf3818b-9c63-47d8-97a2-1daee603e864", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Ronald Yundt", "nickname": "Okey", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:25.112Z", "user_id": "auth0|8cf3818b-9c63-47d8-97a2-1daee603e864", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597628}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:25.655Z", "email": "alexandria.brekke@hotmail.com", "email_verified": false, "family_name": "Bergstrom", "given_name": "Royce", "identities": {"user_id": "dec1828b-f438-4140-a80e-9f3c551b3287", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Johnnie Kuphal", "nickname": "Peggie", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:25.655Z", "user_id": "auth0|dec1828b-f438-4140-a80e-9f3c551b3287", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597629}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:26.228Z", "email": "deanna.breitenberg19@gmail.com", "email_verified": false, "family_name": "Weber", "given_name": "Santiago", "identities": {"user_id": "267902a9-d6a3-4206-a272-bbbf05c5dbef", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Ismael Bogan III", "nickname": "Armando", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:26.228Z", "user_id": "auth0|267902a9-d6a3-4206-a272-bbbf05c5dbef", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597629}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:26.826Z", "email": "kris.ratke@gmail.com", "email_verified": false, "family_name": "Turcotte", "given_name": "Quentin", "identities": {"user_id": "c54f2299-2c1a-4db7-b904-92235c90465d", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Brendan Stark DVM", "nickname": "Tiara", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:26.826Z", "user_id": "auth0|c54f2299-2c1a-4db7-b904-92235c90465d", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597629}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:28.314Z", "email": "christy38@hotmail.com", "email_verified": false, "family_name": "Paucek", "given_name": "Ara", "identities": {"user_id": "4b6f14da-75c1-4fdc-9b42-a890930051d8", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Cynthia Herman", "nickname": "Andreanne", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:28.314Z", "user_id": "auth0|4b6f14da-75c1-4fdc-9b42-a890930051d8", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597629}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:28.857Z", "email": "kirstin.crist62@gmail.com", "email_verified": false, "family_name": "Jacobi", "given_name": "Asia", "identities": {"user_id": "63349601-2daa-4fea-9a8c-6d5904d9f52d", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Geneva Block", "nickname": "Walter", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:28.857Z", "user_id": "auth0|63349601-2daa-4fea-9a8c-6d5904d9f52d", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597629}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:29.365Z", "email": "trevor.cummings68@yahoo.com", "email_verified": false, "family_name": "Crona", "given_name": "Lucious", "identities": {"user_id": "561580da-457b-4fe9-993d-14315d914f91", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Myra Jones", "nickname": "Chaz", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:29.365Z", "user_id": "auth0|561580da-457b-4fe9-993d-14315d914f91", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597629}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:29.809Z", "email": "toy88@gmail.com", "email_verified": false, "family_name": "Leannon", "given_name": "Desiree", "identities": {"user_id": "374793eb-62fa-4818-9fd0-ff1fbc53c522", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Kellie Champlin", "nickname": "Dennis", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:29.810Z", "user_id": "auth0|374793eb-62fa-4818-9fd0-ff1fbc53c522", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597630}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-21T04:10:30.240Z", "email": "jenifer.huel11@yahoo.com", "email_verified": false, "family_name": "Sauer", "given_name": "Jordane", "identities": {"user_id": "64738f61-34af-495a-a22e-1f5085fa9a97", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Catherine Hayes", "nickname": "Trystan", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-21T04:10:30.240Z", "user_id": "auth0|64738f61-34af-495a-a22e-1f5085fa9a97", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597630}} -{"type": "RECORD", "record": {"stream": "users", "data": {"blocked": false, "created_at": "2022-10-23T04:14:20.799Z", "email": "ebony_aufderhar12@yahoo.com", "email_verified": false, "family_name": "Price", "given_name": "Antonietta", "identities": {"user_id": "b1616e22-34af-4e19-812e-6c8c91fe2192", "connection": "Username-Password-Authentication", "provider": "auth0", "isSocial": false}, "name": "Israel Cole", "nickname": "Lue", "picture": "https://secure.gravatar.com/avatar/15626c5e0c749cb912f9d1ad48dba440?s=480&r=pg&d=https%3A%2F%2Fssl.gstatic.com%2Fs2%2Fprofiles%2Fimages%2Fsilhouette80.png", "updated_at": "2022-10-23T04:14:20.799Z", "user_id": "auth0|b1616e22-34af-4e19-812e-6c8c91fe2192", "user_metadata": {}, "app_metadata": {}}, "emitted_at": 1666743597630}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/destination-heap-analytics/setup.py b/airbyte-integrations/connectors/destination-heap-analytics/setup.py deleted file mode 100644 index fd5c3f35477e2..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/setup.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from setuptools import find_packages, setup - -MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.2", -] - -TEST_REQUIREMENTS = [ - "pytest~=6.1", - "pytest-mock~=3.6.1", -] - -setup( - name="destination_heap_analytics", - description="Destination implementation for Heap Analytics.", - author="Airbyte", - author_email="contact@airbyte.io", - packages=find_packages(), - install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json"]}, - extras_require={ - "tests": TEST_REQUIREMENTS, - }, -) diff --git a/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_client.py b/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_client.py deleted file mode 100644 index 60ad79895a237..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_client.py +++ /dev/null @@ -1,54 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -from json import load -from typing import Dict - -from destination_heap_analytics.client import HeapClient -from pytest import fixture - - -@fixture(scope="module") -def config_events() -> Dict[str, str]: - with open( - "sample_files/config-events.json", - ) as f: - yield load(f) - - -@fixture(scope="module") -def config_aap() -> Dict[str, str]: - with open( - "sample_files/config-aap.json", - ) as f: - yield load(f) - - -@fixture(scope="module") -def config_aup() -> Dict[str, str]: - with open( - "sample_files/config-aup.json", - ) as f: - yield load(f) - - -class TestHeapClient: - def test_constructor(self, config_events, config_aup, config_aap): - client = HeapClient(**config_events) - assert client.app_id == "11" - assert client.api_type == "track" - assert client.check_endpoint == "https://heapanalytics.com/api/track" - assert client.api_endpoint == "https://heapanalytics.com/api/track" - - client = HeapClient(**config_aup) - assert client.app_id == "11" - assert client.api_type == "add_user_properties" - assert client.check_endpoint == "https://heapanalytics.com/api/track" - assert client.api_endpoint == "https://heapanalytics.com/api/add_user_properties" - - client = HeapClient(**config_aap) - assert client.app_id == "11" - assert client.api_type == "add_account_properties" - assert client.check_endpoint == "https://heapanalytics.com/api/track" - assert client.api_endpoint == "https://heapanalytics.com/api/add_account_properties" diff --git a/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_parse_json.py b/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_parse_json.py deleted file mode 100644 index 726367afd2945..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_parse_json.py +++ /dev/null @@ -1,224 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -import pendulum -from destination_heap_analytics.utils import parse_aap_json, parse_aup_json, parse_event_json, parse_property_json - -user = { - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "email_verified": False, - "family_name": "Blanda", - "given_name": "Bradly", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", -} - - -class TestParsePropertyJson: - data = { - "user_id": "4ce74b28-bc00-4bbf-8a01-712dae975291", - "connection": "Username-Password-Authentication", - "provider": "auth0", - "isSocial": False, - } - - def test_parse_all_properties(self): - columns = "*".split(",") - assert parse_property_json(data=self.data, property_columns=columns) == self.data - - def test_parse_selective_properties(self): - columns = "user_id,provider,isSocial".split(",") - assert parse_property_json(data=self.data, property_columns=columns) == { - "user_id": "4ce74b28-bc00-4bbf-8a01-712dae975291", - "provider": "auth0", - "isSocial": False, - } - - def test_parse_missing_properties(self): - columns = "uSeR_iD,identity_provider,isAuthenticated".split(",") - assert parse_property_json(data=self.data, property_columns=columns) == {} - - -class TestParseEventJson: - def test_parse_all_properties(self): - columns = "*".split(",") - assert parse_event_json( - data=user, property_columns=columns, event_column="family_name", identity_column="email", timestamp_column="created_at" - ) == { - "event": "Blanda", - "identity": "beryl_becker95@yahoo.com", - "properties": { - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "email_verified": False, - "family_name": "Blanda", - "given_name": "Bradly", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - }, - "timestamp": "2022-10-21T04:08:58.994Z", - } - - def test_parse_selective_properties(self): - columns = "blocked,email,created_at,user_id".split(",") - assert parse_event_json( - data=user, property_columns=columns, event_column="family_name", identity_column="email", timestamp_column="created_at" - ) == { - "event": "Blanda", - "identity": "beryl_becker95@yahoo.com", - "properties": { - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - }, - "timestamp": "2022-10-21T04:08:58.994Z", - } - - def test_parse_missing_properties(self): - columns = "uSeR_iD,identity_provider,isAuthenticated".split(",") - assert parse_event_json( - data=user, property_columns=columns, event_column="family_name", identity_column="email", timestamp_column="created_at" - ) == { - "event": "Blanda", - "identity": "beryl_becker95@yahoo.com", - "properties": {}, - "timestamp": "2022-10-21T04:08:58.994Z", - } - - def test_parse_missing_identity(self): - columns = "*".split(",") - assert ( - parse_event_json( - data=user, property_columns=columns, event_column="family_name", identity_column="UsEr_id", timestamp_column="created_at" - ) - is None - ) - - def test_parse_missing_event(self): - columns = "*".split(",") - assert ( - parse_event_json( - data=user, property_columns=columns, event_column="order_name", identity_column="email", timestamp_column="created_at" - ) - is None - ) - - def test_parse_missing_timestamp(self): - known = pendulum.datetime(2023, 5, 21, 12) - pendulum.set_test_now(known) - columns = "*".split(",") - assert parse_event_json( - data=user, property_columns=columns, event_column="family_name", identity_column="email", timestamp_column="updated_at" - ) == { - "event": "Blanda", - "identity": "beryl_becker95@yahoo.com", - "properties": { - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "email_verified": False, - "family_name": "Blanda", - "given_name": "Bradly", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - }, - "timestamp": "2023-05-21T12:00:00Z", - } - pendulum.set_test_now() - - -class TestParseAupJson: - def test_parse_all_properties(self): - columns = "*".split(",") - assert parse_aup_json(data=user, property_columns=columns, identity_column="user_id",) == { - "identity": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - "properties": { - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "email_verified": False, - "family_name": "Blanda", - "given_name": "Bradly", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - }, - } - - def test_parse_selective_properties(self): - columns = "blocked,email,created_at,user_id".split(",") - assert parse_aup_json(data=user, property_columns=columns, identity_column="user_id",) == { - "identity": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - "properties": { - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - }, - } - - def test_parse_missing_properties(self): - columns = "uSeR_iD,identity_provider,isAuthenticated".split(",") - assert parse_aup_json(data=user, property_columns=columns, identity_column="user_id",) == { - "identity": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - "properties": {}, - } - - def test_parse_missing_account_id(self): - columns = "*".split(",") - assert ( - parse_aup_json( - data=user, - property_columns=columns, - identity_column="UsEr_id", - ) - is None - ) - - -class TestParseAapJson: - def test_parse_all_properties(self): - columns = "*".split(",") - assert parse_aap_json(data=user, property_columns=columns, account_id_column="user_id",) == { - "account_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - "properties": { - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "email_verified": False, - "family_name": "Blanda", - "given_name": "Bradly", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - }, - } - - def test_parse_selective_properties(self): - columns = "blocked,email,created_at,user_id".split(",") - assert parse_aap_json(data=user, property_columns=columns, account_id_column="user_id",) == { - "account_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - "properties": { - "blocked": False, - "created_at": "2022-10-21T04:08:58.994Z", - "email": "beryl_becker95@yahoo.com", - "user_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - }, - } - - def test_parse_missing_properties(self): - columns = "uSeR_iD,identity_provider,isAuthenticated".split(",") - assert parse_aap_json(data=user, property_columns=columns, account_id_column="user_id",) == { - "account_id": "auth0|4ce74b28-bc00-4bbf-8a01-712dae975291", - "properties": {}, - } - - def test_parse_missing_account_id(self): - columns = "*".split(",") - assert ( - parse_aap_json( - data=user, - property_columns=columns, - account_id_column="UsEr_id", - ) - is None - ) diff --git a/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_utils.py b/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_utils.py deleted file mode 100644 index 3b29bcb66ffb2..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/test_utils.py +++ /dev/null @@ -1,106 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - -import pendulum -from destination_heap_analytics.utils import datetime_to_string, flatten_json - - -class TestDatetimeToString: - def test_min_date_time_to_string(self): - assert datetime_to_string(pendulum.DateTime.min) == "0001-01-01T00:00:00Z" - - def test_valid_date_time_to_string(self): - in_utc = pendulum.datetime(2022, 10, 26, 3, 6, 59) - assert datetime_to_string(in_utc) == "2022-10-26T03:06:59Z" - - -class TestFlattenJson: - def test_flatten_none(self): - assert flatten_json({"myUndefined": None}) == {"myUndefined": None} - assert flatten_json({"myNull": None}) == {"myNull": None} - - def test_flatten_number(self): - assert flatten_json({"myNumber": 1}) == {"myNumber": 1} - - def test_flatten_string(self): - assert flatten_json({"myString": "1"}) == {"myString": "1"} - - def test_flatten_boolean(self): - assert flatten_json({"myTrue": True}) == {"myTrue": True} - assert flatten_json({"myFalse": False}) == {"myFalse": False} - - def test_flatten_array_of_nulls(self): - assert flatten_json({"myNulls": [None, 1, None, 3]}) == {"myNulls.0": None, "myNulls.1": 1, "myNulls.2": None, "myNulls.3": 3} - - def test_flatten_array_of_numbers(self): - assert flatten_json({"myNumbers": [1, 2, 3, 4]}) == {"myNumbers.0": 1, "myNumbers.1": 2, "myNumbers.2": 3, "myNumbers.3": 4} - - def test_flatten_array_of_strings(self): - assert flatten_json({"myStrings": ["a", "1", "b", "2"]}) == { - "myStrings.0": "a", - "myStrings.1": "1", - "myStrings.2": "b", - "myStrings.3": "2", - } - - def test_flatten_array_of_booleans(self): - assert flatten_json({"myBools": [True, False, True, False]}) == { - "myBools.0": True, - "myBools.1": False, - "myBools.2": True, - "myBools.3": False, - } - - def test_flatten_a_complex_object(self): - embeded_object = { - "firstName": "John", - "middleName": "", - "lastName": "Green", - "car": { - "make": "Honda", - "model": "Civic", - "year": None, - "revisions": [ - {"miles": 10150, "code": "REV01", "changes": 0, "firstTime": True}, - { - "miles": 20021, - "code": "REV02", - "firstTime": False, - "changes": [ - {"type": "asthetic", "desc": "Left tire cap", "price": 123.45}, - {"type": "mechanic", "desc": "Engine pressure regulator", "engineer": None}, - ], - }, - ], - }, - "visits": [{"date": "2015-01-01", "dealer": "DEAL-001", "useCoupon": True}, {"date": "2015-03-01", "dealer": "DEAL-002"}], - } - assert flatten_json(embeded_object) == ( - { - "car.make": "Honda", - "car.model": "Civic", - "car.revisions.0.changes": 0, - "car.revisions.0.code": "REV01", - "car.revisions.0.miles": 10150, - "car.revisions.0.firstTime": True, - "car.revisions.1.changes.0.desc": "Left tire cap", - "car.revisions.1.changes.0.price": 123.45, - "car.revisions.1.changes.0.type": "asthetic", - "car.revisions.1.changes.1.desc": "Engine pressure regulator", - "car.revisions.1.changes.1.engineer": None, - "car.revisions.1.changes.1.type": "mechanic", - "car.revisions.1.firstTime": False, - "car.revisions.1.code": "REV02", - "car.revisions.1.miles": 20021, - "car.year": None, - "firstName": "John", - "lastName": "Green", - "middleName": "", - "visits.0.date": "2015-01-01", - "visits.0.dealer": "DEAL-001", - "visits.0.useCoupon": True, - "visits.1.date": "2015-03-01", - "visits.1.dealer": "DEAL-002", - } - ) diff --git a/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/unit_test.py deleted file mode 100644 index dddaea0060fa1..0000000000000 --- a/airbyte-integrations/connectors/destination-heap-analytics/unit_tests/unit_test.py +++ /dev/null @@ -1,7 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -def test_example_method(): - assert True diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java index 556cc15def618..f2c6a6d49f87c 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/AbstractJdbcDestination.java @@ -8,6 +8,7 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.exceptions.ConnectionErrorException; +import io.airbyte.commons.json.Jsons; import io.airbyte.commons.map.MoreMaps; import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.DefaultJdbcDatabase; @@ -21,8 +22,10 @@ import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.sql.SQLException; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.UUID; @@ -84,11 +87,38 @@ public AirbyteConnectionStatus check(final JsonNode config) { } } + /** + * This method is deprecated. It verifies table creation, but not insert right to a newly created + * table. Use attemptTableOperations with the attemptInsert argument instead. + */ + @Deprecated public static void attemptSQLCreateAndDropTableOperations(final String outputSchema, final JdbcDatabase database, final NamingConventionTransformer namingResolver, final SqlOperations sqlOps) throws Exception { + attemptTableOperations(outputSchema, database, namingResolver, sqlOps, false); + } + + /** + * Verifies if provided creds has enough permissions. Steps are: 1. Create schema if not exists. 2. + * Create test table. 3. Insert dummy record to newly created table if "attemptInsert" set to true. + * 4. Delete table created on step 2. + * + * @param outputSchema - schema to tests against. + * @param database - database to tests against. + * @param namingResolver - naming resolver. + * @param sqlOps - SqlOperations object + * @param attemptInsert - set true if need to make attempt to insert dummy records to newly created + * table. Set false to skip insert step. + * @throws Exception + */ + public static void attemptTableOperations(final String outputSchema, + final JdbcDatabase database, + final NamingConventionTransformer namingResolver, + final SqlOperations sqlOps, + final boolean attemptInsert) + throws Exception { // verify we have write permissions on the target schema by creating a table with a random name, // then dropping that table try { @@ -100,7 +130,14 @@ public static void attemptSQLCreateAndDropTableOperations(final String outputSch final String outputTableName = namingResolver.getIdentifier("_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", "")); sqlOps.createSchemaIfNotExists(database, outputSchema); sqlOps.createTableIfNotExists(database, outputSchema, outputTableName); - sqlOps.dropTableIfExists(database, outputSchema, outputTableName); + // verify if user has permission to make SQL INSERT queries + try { + if (attemptInsert) { + sqlOps.insertRecords(database, List.of(getDummyRecord()), outputSchema, outputTableName); + } + } finally { + sqlOps.dropTableIfExists(database, outputSchema, outputTableName); + } } catch (final SQLException e) { if (Objects.isNull(e.getCause()) || !(e.getCause() instanceof SQLException)) { throw new ConnectionErrorException(e.getSQLState(), e.getErrorCode(), e.getMessage(), e); @@ -113,6 +150,19 @@ public static void attemptSQLCreateAndDropTableOperations(final String outputSch } } + /** + * Generates a dummy AirbyteRecordMessage with random values. + * + * @return AirbyteRecordMessage object with dummy values that may be used to test insert permission. + */ + private static AirbyteRecordMessage getDummyRecord() { + final JsonNode dummyDataToInsert = Jsons.deserialize("{ \"field1\": true }"); + return new AirbyteRecordMessage() + .withStream("stream1") + .withData(dummyDataToInsert) + .withEmittedAt(1602637589000L); + } + protected DataSource getDataSource(final JsonNode config) { final JsonNode jdbcConfig = toJdbcConfig(config); return DataSourceFactory.create( diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java index 54f9348b9d148..c3e1a0febce57 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/JdbcBufferedConsumerFactory.java @@ -12,7 +12,6 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer; import io.airbyte.integrations.destination.buffered_stream_consumer.OnCloseFunction; @@ -22,6 +21,7 @@ import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.DestinationSyncMode; diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java index 6d8783ec12bfd..abd4e4f183e06 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyConsumerFactory.java @@ -9,7 +9,6 @@ import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.ExtendedNameTransformer; import io.airbyte.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer; import io.airbyte.integrations.destination.buffered_stream_consumer.CheckAndRemoveRecordWriter; @@ -20,6 +19,7 @@ import io.airbyte.integrations.destination.record_buffer.InMemoryRecordBufferingStrategy; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.ArrayList; import java.util.HashMap; @@ -77,7 +77,7 @@ private static Map createWrite final String stagingFolder = UUID.randomUUID().toString(); for (final var configuredStream : catalog.getStreams()) { final var stream = configuredStream.getStream(); - final var pair = AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream); + final var pair = AirbyteStreamNameNamespacePair.fromAirbyteStream(stream); final var copier = streamCopierFactory.create(defaultSchema, config, stagingFolder, configuredStream, namingResolver, database, sqlOperations); pairToCopier.put(pair, copier); diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyDestination.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyDestination.java index 1ed3752ab0a4b..b0677bf8287ba 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyDestination.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/CopyDestination.java @@ -14,6 +14,7 @@ import io.airbyte.integrations.base.AirbyteTraceMessageUtility; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.protocol.models.AirbyteConnectionStatus; @@ -68,7 +69,7 @@ public AirbyteConnectionStatus check(final JsonNode config) { final JdbcDatabase database = getDatabase(dataSource); final var nameTransformer = getNameTransformer(); final var outputSchema = nameTransformer.convertStreamName(config.get(schemaFieldName).asText()); - AbstractJdbcDestination.attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, getSqlOperations()); + performCreateInsertTestOnDestination(outputSchema, database, nameTransformer); return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); } catch (final ConnectionErrorException ex) { @@ -92,4 +93,11 @@ public AirbyteConnectionStatus check(final JsonNode config) { } } + protected void performCreateInsertTestOnDestination(final String outputSchema, + final JdbcDatabase database, + final NamingConventionTransformer nameTransformer) + throws Exception { + AbstractJdbcDestination.attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, getSqlOperations()); + } + } diff --git a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java index 39f1c4dc42c0d..f0eeb15666793 100644 --- a/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java +++ b/airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/staging/StagingConsumerFactory.java @@ -11,7 +11,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer; import io.airbyte.integrations.destination.buffered_stream_consumer.OnCloseFunction; @@ -21,6 +20,7 @@ import io.airbyte.integrations.destination.record_buffer.SerializedBufferingStrategy; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.DestinationSyncMode; diff --git a/airbyte-integrations/connectors/destination-kafka/src/main/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumer.java b/airbyte-integrations/connectors/destination-kafka/src/main/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumer.java index 005b558ace6d1..d4ceb260a109f 100644 --- a/airbyte-integrations/connectors/destination-kafka/src/main/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumer.java +++ b/airbyte-integrations/connectors/destination-kafka/src/main/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumer.java @@ -7,11 +7,11 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.HashMap; import java.util.Map; @@ -80,7 +80,7 @@ protected void acceptTracked(final AirbyteMessage airbyteMessage) { Map buildTopicMap() { return catalog.getStreams().stream() - .map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream.getStream())) + .map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream())) .collect(Collectors.toMap(Function.identity(), pair -> nameTransformer.getIdentifier(topicPattern .replaceAll("\\{namespace}", Optional.ofNullable(pair.getNamespace()).orElse("")) diff --git a/airbyte-integrations/connectors/destination-kafka/src/test/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumerTest.java b/airbyte-integrations/connectors/destination-kafka/src/test/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumerTest.java index f06345317bff8..ea00d2317964c 100644 --- a/airbyte-integrations/connectors/destination-kafka/src/test/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumerTest.java +++ b/airbyte-integrations/connectors/destination-kafka/src/test/java/io/airbyte/integrations/destination/kafka/KafkaRecordConsumerTest.java @@ -13,13 +13,13 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.jackson.MoreMappers; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.destination.StandardNameTransformer; import io.airbyte.integrations.standardtest.destination.PerStreamStateMessageTest; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.Field; diff --git a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java index 2bd0360f5a43b..b6718691e7413 100644 --- a/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java +++ b/airbyte-integrations/connectors/destination-kinesis/src/main/java/io/airbyte/integrations/destination/kinesis/KinesisMessageConsumer.java @@ -5,9 +5,9 @@ package io.airbyte.integrations.destination.kinesis; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.time.Instant; import java.util.Map; diff --git a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/Dockerfile index 6b4ecd23bc7b7..bb4f3c0cea1f1 100644 --- a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mongodb-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.8 +LABEL io.airbyte.version=0.1.9 LABEL io.airbyte.name=airbyte/destination-mongodb-strict-encrypt diff --git a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/main/java/io.airbyte.integrations.destination.mongodb/MongodbDestinationStrictEncrypt.java b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/main/java/io.airbyte.integrations.destination.mongodb/MongodbDestinationStrictEncrypt.java index 2cebbbd508bd6..69df91def8a0d 100644 --- a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/main/java/io.airbyte.integrations.destination.mongodb/MongodbDestinationStrictEncrypt.java +++ b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/main/java/io.airbyte.integrations.destination.mongodb/MongodbDestinationStrictEncrypt.java @@ -4,11 +4,16 @@ package io.airbyte.integrations.destination.mongodb; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.mongodb.MongoUtils; +import io.airbyte.db.mongodb.MongoUtils.MongoInstanceType; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.spec_modification.SpecModifyingDestination; +import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.ConnectorSpecification; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,6 +26,18 @@ public MongodbDestinationStrictEncrypt() { super(MongodbDestination.sshWrappedDestination()); } + @Override + public AirbyteConnectionStatus check(final JsonNode config) throws Exception { + final JsonNode instanceConfig = config.get(MongoUtils.INSTANCE_TYPE); + final MongoInstanceType instance = MongoInstanceType.fromValue(instanceConfig.get(MongoUtils.INSTANCE).asText()); + // If the MongoDb destination connector is not set up to use a TLS connection, then check should + // fail + if (instance.equals(MongoInstanceType.STANDALONE) && !MongoUtils.tlsEnabledForStandaloneInstance(config, instanceConfig)) { + throw new ConfigErrorException("TLS connection must be used to read from MongoDB."); + } + return super.check(config); + } + @Override public ConnectorSpecification modifySpec(final ConnectorSpecification originalSpec) throws Exception { final ConnectorSpecification spec = Jsons.clone(originalSpec); diff --git a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationStrictEncryptAcceptanceTest.java index 1ce8d1507a4cb..7446fd8503e97 100644 --- a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/destination/mongodb/MongodbDestinationStrictEncryptAcceptanceTest.java @@ -5,13 +5,18 @@ package io.airbyte.integrations.destination.mongodb; import static com.mongodb.client.model.Projections.excludeId; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import com.mongodb.client.MongoCursor; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.mongodb.MongoDatabase; +import io.airbyte.db.mongodb.MongoUtils; import io.airbyte.db.mongodb.MongoUtils.MongoInstanceType; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; import java.io.IOException; @@ -21,6 +26,7 @@ import java.util.List; import org.bson.Document; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class MongodbDestinationStrictEncryptAcceptanceTest extends DestinationAcceptanceTest { @@ -102,9 +108,27 @@ protected List retrieveRecords(final TestDestinationEnv testEnv, return result; } + @Test + void testCheck() throws Exception { + final JsonNode instanceConfig = Jsons.jsonNode(ImmutableMap.builder() + .put("instance", MongoInstanceType.STANDALONE.getType()) + .put("tls", false) + .build()); + + final JsonNode invalidStandaloneConfig = getConfig(); + + ((ObjectNode) invalidStandaloneConfig).put(MongoUtils.INSTANCE_TYPE, instanceConfig); + + final Throwable throwable = catchThrowable(() -> new MongodbDestinationStrictEncrypt().check(invalidStandaloneConfig)); + assertThat(throwable).isInstanceOf(ConfigErrorException.class); + assertThat(((ConfigErrorException) throwable) + .getDisplayMessage() + .contains("TLS connection must be used to read from MongoDB.")); + } + @Override protected void setup(final TestDestinationEnv testEnv) { - var credentials = String.format("%s:%s@", config.get(AUTH_TYPE).get(JdbcUtils.USERNAME_KEY).asText(), + final var credentials = String.format("%s:%s@", config.get(AUTH_TYPE).get(JdbcUtils.USERNAME_KEY).asText(), config.get(AUTH_TYPE).get(JdbcUtils.PASSWORD_KEY).asText()); final String connectionString = String.format("mongodb+srv://%s%s/%s?retryWrites=true&w=majority&tls=true", credentials, diff --git a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test/resources/expected_spec.json index 10f94c577c35c..825559049659f 100644 --- a/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/destination-mongodb-strict-encrypt/src/test/resources/expected_spec.json @@ -135,6 +135,120 @@ } } ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] } } } diff --git a/airbyte-integrations/connectors/destination-mongodb/Dockerfile b/airbyte-integrations/connectors/destination-mongodb/Dockerfile index cff1b88e848c3..e9dca239d3679 100644 --- a/airbyte-integrations/connectors/destination-mongodb/Dockerfile +++ b/airbyte-integrations/connectors/destination-mongodb/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION destination-mongodb COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.8 +LABEL io.airbyte.version=0.1.9 LABEL io.airbyte.name=airbyte/destination-mongodb diff --git a/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbDestination.java b/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbDestination.java index 0a46d2700d901..9743e90d0099f 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbDestination.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbDestination.java @@ -18,10 +18,10 @@ import io.airbyte.commons.util.MoreIterators; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.mongodb.MongoDatabase; +import io.airbyte.db.mongodb.MongoUtils; import io.airbyte.db.mongodb.MongoUtils.MongoInstanceType; import io.airbyte.integrations.BaseConnector; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.AirbyteTraceMessageUtility; import io.airbyte.integrations.base.Destination; import io.airbyte.integrations.base.IntegrationRunner; @@ -30,6 +30,7 @@ import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import io.airbyte.protocol.models.DestinationSyncMode; @@ -47,19 +48,6 @@ public class MongodbDestination extends BaseConnector implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(MongodbDestination.class); - private static final String MONGODB_SERVER_URL = "mongodb://%s%s:%s/%s?authSource=admin&ssl=%s"; - private static final String MONGODB_CLUSTER_URL = "mongodb+srv://%s%s/%s?retryWrites=true&w=majority&tls=true"; - private static final String MONGODB_REPLICA_URL = "mongodb://%s%s/%s?authSource=admin&directConnection=false&ssl=true"; - private static final String INSTANCE_TYPE = "instance_type"; - private static final String INSTANCE = "instance"; - private static final String CLUSTER_URL = "cluster_url"; - private static final String SERVER_ADDRESSES = "server_addresses"; - private static final String REPLICA_SET = "replica_set"; - private static final String AUTH_TYPE = "auth_type"; - private static final String AUTHORIZATION = "authorization"; - private static final String LOGIN_AND_PASSWORD = "login/password"; - private static final String AIRBYTE_DATA_HASH = "_airbyte_data_hash"; - private final MongodbNameTransformer namingResolver; public static Destination sshWrappedDestination() { @@ -132,11 +120,11 @@ public AirbyteMessageConsumer getConsumer(final JsonNode config, final Set documentsHash = new HashSet<>(); try (final MongoCursor cursor = collection.find().projection(excludeId()).iterator()) { while (cursor.hasNext()) { - documentsHash.add(cursor.next().get(AIRBYTE_DATA_HASH, String.class)); + documentsHash.add(cursor.next().get(MongoUtils.AIRBYTE_DATA_HASH, String.class)); } } - writeConfigs.put(AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream), + writeConfigs.put(AirbyteStreamNameNamespacePair.fromAirbyteStream(stream), new MongodbWriteConfig(collectionName, tmpCollectionName, configStream.getDestinationSyncMode(), collection, documentsHash)); } return new MongodbRecordConsumer(writeConfigs, database, catalog, outputRecordCollector); @@ -150,18 +138,18 @@ private MongoDatabase getDatabase(final JsonNode config) { @VisibleForTesting String getConnectionString(final JsonNode config) { - final var credentials = config.get(AUTH_TYPE).get(AUTHORIZATION).asText().equals(LOGIN_AND_PASSWORD) - ? String.format("%s:%s@", config.get(AUTH_TYPE).get(JdbcUtils.USERNAME_KEY).asText(), - config.get(AUTH_TYPE).get(JdbcUtils.PASSWORD_KEY).asText()) + final var credentials = config.get(MongoUtils.AUTH_TYPE).get(MongoUtils.AUTHORIZATION).asText().equals(MongoUtils.LOGIN_AND_PASSWORD) + ? String.format("%s:%s@", config.get(MongoUtils.AUTH_TYPE).get(JdbcUtils.USERNAME_KEY).asText(), + config.get(MongoUtils.AUTH_TYPE).get(JdbcUtils.PASSWORD_KEY).asText()) : StringUtils.EMPTY; // backward compatibility check // the old mongo db spec only includes host, port, database, and auth_type // the new spec replaces host and port with the instance_type property - if (config.has(INSTANCE_TYPE)) { + if (config.has(MongoUtils.INSTANCE_TYPE)) { return buildConnectionString(config, credentials); } else { - return String.format(MONGODB_SERVER_URL, credentials, config.get(JdbcUtils.HOST_KEY).asText(), + return String.format(MongoUtils.MONGODB_SERVER_URL, credentials, config.get(JdbcUtils.HOST_KEY).asText(), config.get(JdbcUtils.PORT_KEY).asText(), config.get(JdbcUtils.DATABASE_KEY).asText(), false); } } @@ -169,29 +157,33 @@ String getConnectionString(final JsonNode config) { private String buildConnectionString(final JsonNode config, final String credentials) { final StringBuilder connectionStrBuilder = new StringBuilder(); - final JsonNode instanceConfig = config.get(INSTANCE_TYPE); - final MongoInstanceType instance = MongoInstanceType.fromValue(instanceConfig.get(INSTANCE).asText()); + final JsonNode instanceConfig = config.get(MongoUtils.INSTANCE_TYPE); + final MongoInstanceType instance = MongoInstanceType.fromValue(instanceConfig.get(MongoUtils.INSTANCE).asText()); switch (instance) { case STANDALONE -> { // if there is no TLS present in spec, TLS should be enabled by default for strict encryption final var tls = !instanceConfig.has(JdbcUtils.TLS_KEY) || instanceConfig.get(JdbcUtils.TLS_KEY).asBoolean(); connectionStrBuilder.append( - String.format(MONGODB_SERVER_URL, credentials, instanceConfig.get(JdbcUtils.HOST_KEY).asText(), + String.format(MongoUtils.MONGODB_SERVER_URL, credentials, instanceConfig.get(JdbcUtils.HOST_KEY).asText(), instanceConfig.get(JdbcUtils.PORT_KEY).asText(), config.get(JdbcUtils.DATABASE_KEY).asText(), tls)); } case REPLICA -> { connectionStrBuilder.append( - String.format(MONGODB_REPLICA_URL, credentials, instanceConfig.get(SERVER_ADDRESSES).asText(), + String.format(MongoUtils.MONGODB_REPLICA_URL, + credentials, + instanceConfig.get(MongoUtils.SERVER_ADDRESSES).asText(), config.get(JdbcUtils.DATABASE_KEY).asText())); - if (instanceConfig.has(REPLICA_SET)) { - connectionStrBuilder.append(String.format("&replicaSet=%s", instanceConfig.get(REPLICA_SET).asText())); + if (instanceConfig.has(MongoUtils.REPLICA_SET)) { + connectionStrBuilder.append(String.format("&replicaSet=%s", instanceConfig.get(MongoUtils.REPLICA_SET).asText())); } } case ATLAS -> { connectionStrBuilder.append( - String.format(MONGODB_CLUSTER_URL, credentials, instanceConfig.get(CLUSTER_URL).asText(), config.get(JdbcUtils.DATABASE_KEY).asText())); + String.format(MongoUtils.MONGODB_CLUSTER_URL, credentials, + instanceConfig.get(MongoUtils.CLUSTER_URL).asText(), + config.get(JdbcUtils.DATABASE_KEY).asText())); } default -> throw new IllegalArgumentException("Unsupported instance type: " + instance); } diff --git a/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumer.java b/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumer.java index 77749890fa4a9..858e0377f9839 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumer.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/main/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumer.java @@ -13,10 +13,10 @@ import io.airbyte.commons.lang.Exceptions; import io.airbyte.db.mongodb.MongoDatabase; import io.airbyte.integrations.base.AirbyteMessageConsumer; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.nio.charset.Charset; import java.util.ArrayList; diff --git a/airbyte-integrations/connectors/destination-mongodb/src/test/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumerTest.java b/airbyte-integrations/connectors/destination-mongodb/src/test/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumerTest.java index 5a334c6476e90..ade0d60fea0ab 100644 --- a/airbyte-integrations/connectors/destination-mongodb/src/test/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumerTest.java +++ b/airbyte-integrations/connectors/destination-mongodb/src/test/java/io/airbyte/integrations/destination/mongodb/MongodbRecordConsumerTest.java @@ -5,10 +5,10 @@ package io.airbyte.integrations.destination.mongodb; import io.airbyte.db.mongodb.MongoDatabase; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.standardtest.destination.PerStreamStateMessageTest; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.Map; import java.util.function.Consumer; diff --git a/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumer.java b/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumer.java index abbabf34fd7a3..23cd92ac0d41b 100644 --- a/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumer.java +++ b/airbyte-integrations/connectors/destination-mqtt/src/main/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumer.java @@ -9,10 +9,10 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.Exceptions; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.HashMap; import java.util.Map; @@ -99,7 +99,7 @@ protected void acceptTracked(final AirbyteMessage airbyteMessage) { Map buildTopicMap() { return catalog.getStreams().stream() - .map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream.getStream())) + .map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream())) .collect(Collectors.toMap(Function.identity(), pair -> config.getTopicPattern() .replaceAll("\\{namespace}", Optional.ofNullable(pair.getNamespace()).orElse("")) .replaceAll("\\{stream}", Optional.ofNullable(pair.getName()).orElse("")), diff --git a/airbyte-integrations/connectors/destination-mqtt/src/test/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumerTest.java b/airbyte-integrations/connectors/destination-mqtt/src/test/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumerTest.java index 9f3c2d93bd1b6..25bb402a3204f 100644 --- a/airbyte-integrations/connectors/destination-mqtt/src/test/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumerTest.java +++ b/airbyte-integrations/connectors/destination-mqtt/src/test/java/io/airbyte/integrations/destination/mqtt/MqttRecordConsumerTest.java @@ -14,11 +14,11 @@ import com.google.common.collect.Sets; import com.hivemq.testcontainer.junit5.HiveMQTestContainerExtension; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; diff --git a/airbyte-integrations/connectors/destination-pubsub/src/main/java/io/airbyte/integrations/destination/pubsub/PubsubConsumer.java b/airbyte-integrations/connectors/destination-pubsub/src/main/java/io/airbyte/integrations/destination/pubsub/PubsubConsumer.java index 38f78ce0a7e39..7a44c899dbf4b 100644 --- a/airbyte-integrations/connectors/destination-pubsub/src/main/java/io/airbyte/integrations/destination/pubsub/PubsubConsumer.java +++ b/airbyte-integrations/connectors/destination-pubsub/src/main/java/io/airbyte/integrations/destination/pubsub/PubsubConsumer.java @@ -16,12 +16,12 @@ import com.google.pubsub.v1.PubsubMessage; import com.google.pubsub.v1.TopicName; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import java.io.ByteArrayInputStream; @@ -68,7 +68,7 @@ protected void startTracked() throws Exception { .setCredentialsProvider(FixedCredentialsProvider.create(credentials)).build(); for (final ConfiguredAirbyteStream configStream : catalog.getStreams()) { final Map attrs = Maps.newHashMap(); - final var key = AirbyteStreamNameNamespacePair.fromAirbyteSteam(configStream.getStream()); + final var key = AirbyteStreamNameNamespacePair.fromAirbyteStream(configStream.getStream()); attrs.put(PubsubDestination.STREAM, key.getName()); if (!Strings.isNullOrEmpty(key.getNamespace())) { attrs.put(PubsubDestination.NAMESPACE, key.getNamespace()); diff --git a/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java index 143eeb3c74d8d..8a6c0d409e7bb 100644 --- a/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-pubsub/src/test-integration/java/io/airbyte/integrations/destination/pubsub/PubsubDestinationAcceptanceTest.java @@ -35,11 +35,11 @@ import com.google.pubsub.v1.TopicName; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.string.Strings; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.JavaBaseConstants; import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; diff --git a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java index fde2db986903c..910da6dadd8e9 100644 --- a/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java +++ b/airbyte-integrations/connectors/destination-pulsar/src/main/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumer.java @@ -5,11 +5,11 @@ package io.airbyte.integrations.destination.pulsar; import io.airbyte.commons.lang.Exceptions; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.destination.NamingConventionTransformer; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.nio.charset.StandardCharsets; import java.util.HashMap; @@ -80,7 +80,7 @@ protected void acceptTracked(final AirbyteMessage airbyteMessage) { Map> buildProducerMap() { return catalog.getStreams().stream() - .map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream.getStream())) + .map(stream -> AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream())) .collect(Collectors.toMap(Function.identity(), pair -> { String topic = nameTransformer.getIdentifier(config.getTopicPattern() .replaceAll("\\{namespace}", Optional.ofNullable(pair.getNamespace()).orElse("")) diff --git a/airbyte-integrations/connectors/destination-pulsar/src/test/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumerTest.java b/airbyte-integrations/connectors/destination-pulsar/src/test/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumerTest.java index 7542922b8bb79..85b0b6b8ca19a 100644 --- a/airbyte-integrations/connectors/destination-pulsar/src/test/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumerTest.java +++ b/airbyte-integrations/connectors/destination-pulsar/src/test/java/io/airbyte/integrations/destination/pulsar/PulsarRecordConsumerTest.java @@ -14,7 +14,6 @@ import com.google.common.collect.Streams; import com.google.common.net.InetAddresses; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.integrations.destination.StandardNameTransformer; import io.airbyte.integrations.standardtest.destination.PerStreamStateMessageTest; @@ -22,6 +21,7 @@ import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; diff --git a/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisDestination.java b/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisDestination.java index 0634c881f56ce..8e854a30ac787 100644 --- a/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisDestination.java +++ b/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisDestination.java @@ -23,7 +23,6 @@ class RedisDestination extends BaseConnector implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(RedisDestination.class); - public static void main(String[] args) throws Exception { LOGGER.info("starting destination: {}", RedisDestination.class); final Destination destination = RedisDestination.sshWrappedDestination(); diff --git a/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisMessageConsumer.java b/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisMessageConsumer.java index 6fa8403af831f..d6576a41b41a1 100644 --- a/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisMessageConsumer.java +++ b/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisMessageConsumer.java @@ -6,9 +6,9 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.time.Instant; import java.util.Map; diff --git a/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisPoolManager.java b/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisPoolManager.java index 877d2e78b735d..a99177d086dc3 100644 --- a/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisPoolManager.java +++ b/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisPoolManager.java @@ -21,13 +21,13 @@ static Jedis initConnection(JsonNode jsonConfig) { final String host = jsonConfig.get(PARAM_HOST).asText(); final int port = jsonConfig.get(PARAM_PORT).asInt(6379); final String username = jsonConfig.has(PARAM_USERNAME) ? jsonConfig.get(PARAM_USERNAME).asText() : ""; - final String password = jsonConfig.has(PARAM_PASSWORD)? jsonConfig.get(PARAM_PASSWORD).asText() : ""; + final String password = jsonConfig.has(PARAM_PASSWORD) ? jsonConfig.get(PARAM_PASSWORD).asText() : ""; try { if (RedisSslUtil.isSsl(jsonConfig)) { RedisSslUtil.setupCertificates(jsonConfig.get(PARAM_SSL_MODE)); - jedis = new Jedis(host ,port, CONNECTION_TIMEOUT, true); + jedis = new Jedis(host, port, CONNECTION_TIMEOUT, true); } else { - jedis = new Jedis(host ,port ,CONNECTION_TIMEOUT, false); + jedis = new Jedis(host, port, CONNECTION_TIMEOUT, false); } jedis.auth(username, password); return jedis; diff --git a/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisSslUtil.java b/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisSslUtil.java index c3937d6038323..53162a8075ca6 100644 --- a/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisSslUtil.java +++ b/airbyte-integrations/connectors/destination-redis/src/main/java/io/airbyte/integrations/destination/redis/RedisSslUtil.java @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.redis; import static io.airbyte.integrations.destination.redis.RedisSslUtil.SslMode.VERIFY_IDENTITY; @@ -36,7 +40,8 @@ public class RedisSslUtil { private static final String TRUST_TYPE = "JKS"; /** - * set javax.net.ssl.keyStore and javax.net.ssl.trustStore based on provided ca.crt, client.crt, client.kay + * set javax.net.ssl.keyStore and javax.net.ssl.trustStore based on provided ca.crt, client.crt, + * client.kay * * @param sslModeConfig json ssl mode config */ @@ -70,8 +75,7 @@ private static String getOrGeneratePassword(final JsonNode sslModeConfig) { } /** - * The method generate certificates based on provided ca.crt, client.crt, client.key. - * Generated keys + * The method generate certificates based on provided ca.crt, client.crt, client.key. Generated keys * * @param caCertificate certificate to validate client certificate and key. * @param clientCertificate The client certificate. @@ -79,10 +83,10 @@ private static String getOrGeneratePassword(final JsonNode sslModeConfig) { * @param clientKeyPassword The client key password. */ private static void initCertificateStores( - final String caCertificate, - final String clientCertificate, - final String clientKey, - final String clientKeyPassword) + final String caCertificate, + final String clientCertificate, + final String clientKey, + final String clientKeyPassword) throws IOException, InterruptedException { LOGGER.info("Try to generate {}", CLIENT_KEY_STORE); @@ -142,7 +146,7 @@ private static SslMode getSslVerifyMode(JsonNode sslModeParam) { public enum SslMode { DISABLED("disable"), - VERIFY_IDENTITY( "verify-full"); + VERIFY_IDENTITY("verify-full"); public final List spec; @@ -158,5 +162,4 @@ public static Optional bySpec(final String spec) { } - } diff --git a/airbyte-integrations/connectors/destination-redis/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redis/src/main/resources/spec.json index 540f24d678a85..d15eb61e76461 100644 --- a/airbyte-integrations/connectors/destination-redis/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/destination-redis/src/main/resources/spec.json @@ -72,7 +72,12 @@ "title": "verify-full", "additionalProperties": false, "description": "Verify-full SSL mode.", - "required": ["mode", "ca_certificate", "client_certificate", "client_key"], + "required": [ + "mode", + "ca_certificate", + "client_certificate", + "client_key" + ], "properties": { "mode": { "type": "string", diff --git a/airbyte-integrations/connectors/destination-redpanda/.dockerignore b/airbyte-integrations/connectors/destination-redpanda/.dockerignore new file mode 100644 index 0000000000000..65c7d0ad3e73c --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/.dockerignore @@ -0,0 +1,3 @@ +* +!Dockerfile +!build diff --git a/airbyte-integrations/connectors/destination-redpanda/Dockerfile b/airbyte-integrations/connectors/destination-redpanda/Dockerfile new file mode 100644 index 0000000000000..73d945f86f135 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/Dockerfile @@ -0,0 +1,18 @@ +FROM airbyte/integration-base-java:dev AS build + +WORKDIR /airbyte +ENV APPLICATION destination-redpanda + +COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar + +RUN tar xf ${APPLICATION}.tar --strip-components=1 && rm -rf ${APPLICATION}.tar + +FROM airbyte/integration-base-java:dev + +WORKDIR /airbyte +ENV APPLICATION destination-redpanda + +COPY --from=build /airbyte /airbyte + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-redpanda diff --git a/airbyte-integrations/connectors/destination-redpanda/README.md b/airbyte-integrations/connectors/destination-redpanda/README.md new file mode 100644 index 0000000000000..fed5a2daa34ea --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/README.md @@ -0,0 +1,68 @@ +# Destination Redpanda + +This is the repository for the Redpanda destination connector in Java. +For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/redpanda). + +## Local development + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-redpanda:build +``` + +#### Create credentials +**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. +Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. + +**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. + +### Locally running the connector docker image + +#### Build +Build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-redpanda:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-redpanda:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-redpanda:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-redpanda:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-redpanda:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +We use `JUnit` for Java tests. + +### Unit and Integration Tests +Place unit tests under `src/test/io/airbyte/integrations/destinations/redpanda`. + +#### Acceptance Tests +Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in +`src/test-integration/java/io/airbyte/integrations/destinations/redpandaDestinationAcceptanceTest.java`. + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-redpanda:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-redpanda:integrationTest +``` + +## Dependency Management + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-redpanda/build.gradle b/airbyte-integrations/connectors/destination-redpanda/build.gradle new file mode 100644 index 0000000000000..72006ed6beb7c --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/build.gradle @@ -0,0 +1,25 @@ +plugins { + id 'application' + id 'airbyte-docker' + id 'airbyte-integration-test-java' +} + +application { + mainClass = 'io.airbyte.integrations.destination.redpanda.RedpandaDestination' +} + +dependencies { + implementation project(':airbyte-config:config-models') + implementation project(':airbyte-protocol:protocol-models') + implementation project(':airbyte-integrations:bases:base-java') + implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) + + // https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients + implementation 'org.apache.kafka:kafka-clients:3.3.1' + implementation 'org.apache.kafka:connect-json:3.3.1' + + testImplementation "org.testcontainers:redpanda:1.17.5" + + integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') + integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-redpanda') +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaConfig.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaConfig.java new file mode 100644 index 0000000000000..40c8b4fda2173 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaConfig.java @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import com.fasterxml.jackson.databind.JsonNode; +import java.util.Map; +import java.util.Optional; +import org.apache.kafka.clients.admin.Admin; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; + +public class RedpandaConfig { + + // host1:port1,host2:port2,... + private final String bootstrapServers; + + private final long bufferMemory; + + private final String compressionType; + + private final int retries; + + private final int batchSize; + + private final Optional topicNumPartitions; + + private final Optional topicReplicationFactor; + + private final int socketConnectionSetupTimeoutMs; + + private final int socketConnectionSetupTimeoutMaxMs; + + private RedpandaConfig(String bootstrapServers, + long bufferMemory, + String compressionType, + int retries, + int batchSize, + Optional topicNumPartitions, + Optional topicReplicationFactor, + int socketConnectionSetupTimeoutMs, + int socketConnectionSetupTimeoutMaxMs) { + this.bootstrapServers = bootstrapServers; + this.bufferMemory = bufferMemory; + this.compressionType = compressionType; + this.retries = retries; + this.batchSize = batchSize; + this.topicNumPartitions = topicNumPartitions; + this.topicReplicationFactor = topicReplicationFactor; + this.socketConnectionSetupTimeoutMs = socketConnectionSetupTimeoutMs; + this.socketConnectionSetupTimeoutMaxMs = socketConnectionSetupTimeoutMaxMs; + } + + public static RedpandaConfig createConfig(JsonNode jsonConfig) { + return new RedpandaConfig( + jsonConfig.get("bootstrap_servers").asText(), + jsonConfig.get("buffer_memory").asLong(33554432L), + jsonConfig.get("compression_type").asText("none"), + jsonConfig.get("retries").asInt(5), + jsonConfig.get("batch_size").asInt(16384), + Optional.of(jsonConfig.get("topic_num_partitions").asInt(1)), + Optional.of(((Integer) jsonConfig.get("topic_replication_factor").asInt(1)).shortValue()), + jsonConfig.get("socket_connection_setup_timeout_ms").asInt(10000), + jsonConfig.get("socket_connection_setup_timeout_max_ms").asInt(30000)); + } + + public KafkaProducer createKafkaProducer() { + return new KafkaProducer<>(Map.of( + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers, + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer", + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonSerializer", + ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory, + ProducerConfig.COMPRESSION_TYPE_CONFIG, compressionType, + ProducerConfig.RETRIES_CONFIG, retries, + ProducerConfig.BATCH_SIZE_CONFIG, batchSize, + ProducerConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG, socketConnectionSetupTimeoutMs, + ProducerConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG, socketConnectionSetupTimeoutMaxMs)); + + } + + public Admin createAdminClient() { + return AdminClient.create(Map.of( + AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers, + AdminClientConfig.RETRIES_CONFIG, retries, + AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG, socketConnectionSetupTimeoutMs, + AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG, socketConnectionSetupTimeoutMaxMs)); + } + + public Optional topicNumPartitions() { + return topicNumPartitions; + } + + public Optional topicReplicationFactor() { + return topicReplicationFactor; + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaDestination.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaDestination.java new file mode 100644 index 0000000000000..c72856ce77d4c --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaDestination.java @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.BaseConnector; +import io.airbyte.integrations.base.AirbyteMessageConsumer; +import io.airbyte.integrations.base.Destination; +import io.airbyte.integrations.base.IntegrationRunner; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.function.Consumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RedpandaDestination extends BaseConnector implements Destination { + + private static final Logger LOGGER = LoggerFactory.getLogger(RedpandaDestination.class); + + public static void main(String[] args) throws Exception { + new IntegrationRunner(new RedpandaDestination()).run(args); + } + + @Override + public AirbyteConnectionStatus check(JsonNode config) { + String topicName = "namespace.stream"; + RedpandaOperations redpandaOperations = null; + try { + RedpandaConfig redpandaConfig = RedpandaConfig.createConfig(config); + redpandaOperations = new RedpandaOperations(redpandaConfig); + redpandaOperations.createTopic( + List.of(new RedpandaOperations.TopicInfo(topicName, Optional.empty(), Optional.empty()))); + redpandaOperations.putRecordBlocking(topicName, UUID.randomUUID().toString(), Jsons.emptyObject()); + redpandaOperations.flush(); + return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); + } catch (Exception e) { + LOGGER.error("Error while trying to connect to Redpanda: ", e); + return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.FAILED); + } finally { + if (redpandaOperations != null) { + try { + redpandaOperations.deleteTopic(List.of(topicName)); + } catch (Exception e) { + LOGGER.error("Error while deleting Redpanda topic: ", e); + } + redpandaOperations.close(); + } + } + } + + @Override + public AirbyteMessageConsumer getConsumer(JsonNode config, + ConfiguredAirbyteCatalog configuredCatalog, + Consumer outputRecordCollector) { + RedpandaConfig redpandaConfig = RedpandaConfig.createConfig(config); + return new RedpandaMessageConsumer(configuredCatalog, new RedpandaOperations(redpandaConfig), redpandaConfig, + outputRecordCollector); + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaMessageConsumer.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaMessageConsumer.java new file mode 100644 index 0000000000000..828475d03ba8b --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaMessageConsumer.java @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import static io.airbyte.integrations.base.JavaBaseConstants.COLUMN_NAME_AB_ID; +import static io.airbyte.integrations.base.JavaBaseConstants.COLUMN_NAME_DATA; +import static io.airbyte.integrations.base.JavaBaseConstants.COLUMN_NAME_EMITTED_AT; + +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.time.Instant; +import java.util.Map; +import java.util.UUID; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RedpandaMessageConsumer extends FailureTrackingAirbyteMessageConsumer { + + private static final Logger LOGGER = LoggerFactory.getLogger(RedpandaMessageConsumer.class); + + private final Consumer outputRecordCollector; + + private final RedpandaOperations redpandaOperations; + + private final RedpandaConfig redpandaConfig; + + private final Map redpandaWriteConfigs; + + public RedpandaMessageConsumer(ConfiguredAirbyteCatalog configuredCatalog, + RedpandaOperations redpandaOperations, + RedpandaConfig redpandaConfig, + Consumer outputRecordCollector) { + this.outputRecordCollector = outputRecordCollector; + this.redpandaOperations = redpandaOperations; + this.redpandaConfig = redpandaConfig; + this.redpandaWriteConfigs = configuredCatalog.getStreams().stream() + .collect( + Collectors.toUnmodifiableMap(AirbyteStreamNameNamespacePair::fromConfiguredAirbyteSteam, + str -> new RedpandaWriteConfig( + new RedpandaNameTransformer().topicName(str.getStream().getNamespace(), + str.getStream().getName()), + str.getDestinationSyncMode()))); + } + + @Override + protected void startTracked() { + redpandaOperations.createTopic(redpandaWriteConfigs.values().stream() + .map(wc -> new RedpandaOperations.TopicInfo(wc.topicName(), redpandaConfig.topicNumPartitions(), + redpandaConfig.topicReplicationFactor())) + .collect(Collectors.toSet())); + } + + @Override + protected void acceptTracked(AirbyteMessage message) { + if (message.getType() == AirbyteMessage.Type.RECORD) { + var messageRecord = message.getRecord(); + + var streamConfig = + redpandaWriteConfigs.get(AirbyteStreamNameNamespacePair.fromRecordMessage(messageRecord)); + + if (streamConfig == null) { + throw new IllegalArgumentException("Unrecognized destination stream"); + } + + String key = UUID.randomUUID().toString(); + + var data = Jsons.jsonNode(Map.of( + COLUMN_NAME_AB_ID, key, + COLUMN_NAME_DATA, messageRecord.getData(), + COLUMN_NAME_EMITTED_AT, Instant.now())); + + var topic = streamConfig.topicName(); + + redpandaOperations.putRecord(topic, key, data, e -> { + LOGGER.error("Error while sending record to Redpanda with reason ", e); + try { + throw e; + } catch (Exception ex) { + throw new RuntimeException(ex); + } + }); + } else if (message.getType() == AirbyteMessage.Type.STATE) { + outputRecordCollector.accept(message); + } else { + LOGGER.warn("Unsupported airbyte message type: {}", message.getType()); + } + } + + @Override + protected void close(boolean hasFailed) { + redpandaOperations.close(); + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformer.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformer.java new file mode 100644 index 0000000000000..1f465bc41ca3b --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformer.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import io.airbyte.integrations.destination.StandardNameTransformer; + +public class RedpandaNameTransformer extends StandardNameTransformer { + + String topicName(String namespace, String stream) { + namespace = namespace != null ? namespace : ""; + var streamName = namespace + "_" + stream; + streamName = super.convertStreamName(streamName); + // max char length for redpanda topic name is 255 + return streamName.length() > 255 ? streamName.substring(0, 255) : streamName; + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaOperations.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaOperations.java new file mode 100644 index 0000000000000..6179fd71e7967 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaOperations.java @@ -0,0 +1,144 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import com.fasterxml.jackson.databind.JsonNode; +import java.io.Closeable; +import java.util.Collection; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import org.apache.kafka.clients.admin.Admin; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.errors.TopicExistsException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RedpandaOperations implements Closeable { + + private static final Logger LOGGER = LoggerFactory.getLogger(RedpandaOperations.class); + + private final Admin adminClient; + + private final KafkaProducer kafkaProducer; + + public RedpandaOperations(RedpandaConfig redpandaConfig) { + this.adminClient = redpandaConfig.createAdminClient(); + this.kafkaProducer = redpandaConfig.createKafkaProducer(); + } + + public void createTopic(Collection topics) { + var newTopics = topics.stream() + .map(tf -> new NewTopic(tf.name(), tf.numPartitions(), tf.replicationFactor())) + .collect(Collectors.toSet()); + + var createTopicsResult = adminClient.createTopics(newTopics); + + // we need to wait for results since data replication is directly dependent on topic creation + + createTopicsResult.values().values().forEach(f -> { + try { + syncWrapper(() -> f); + } catch (ExecutionException e) { + // errors related to already existing topics should be ignored + if (!(e.getCause() instanceof TopicExistsException)) { + throw new RuntimeException(e); + } + } + }); + } + + public void deleteTopic(Collection topics) { + + var deleteTopicsResult = adminClient.deleteTopics(topics); + + try { + syncWrapper(deleteTopicsResult::all); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } + + public Set listTopics() { + + var listTopics = adminClient.listTopics(); + + try { + return syncWrapper(listTopics::names); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + + } + + public void putRecord(String topic, String key, JsonNode data, Consumer consumer) { + var producerRecord = new ProducerRecord<>(topic, key, data); + + kafkaProducer.send(producerRecord, ((metadata, exception) -> { + if (exception != null) { + consumer.accept(exception); + } + })); + + } + + // used when testing write permissions on check + public void putRecordBlocking(String topic, String key, JsonNode data) { + + var producerRecord = new ProducerRecord<>(topic, key, data); + + try { + syncWrapper(kafkaProducer::send, producerRecord); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } + + public void flush() { + kafkaProducer.flush(); + } + + private T syncWrapper(Supplier> asyncFunction) throws ExecutionException { + try { + return asyncFunction.get().get(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException(e); + } + } + + private T syncWrapper(Function, Future> asyncFunction, + ProducerRecord producerRecord) + throws ExecutionException { + return syncWrapper(() -> asyncFunction.apply(producerRecord)); + } + + public record TopicInfo( + + String name, + + Optional numPartitions, + + Optional replicationFactor + + ) { + + } + + @Override + public void close() { + kafkaProducer.flush(); + kafkaProducer.close(); + adminClient.close(); + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfig.java b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfig.java new file mode 100644 index 0000000000000..aec13dbb3368d --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/main/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfig.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import io.airbyte.protocol.models.DestinationSyncMode; + +public record RedpandaWriteConfig( + + String topicName, + + DestinationSyncMode destinationSyncMode + +) {} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-redpanda/src/main/resources/spec.json new file mode 100644 index 0000000000000..89e41c686a267 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/main/resources/spec.json @@ -0,0 +1,76 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/redpanda", + "supportsIncremental": true, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": ["append"], + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Redpanda destination connector", + "type": "object", + "required": [ + "bootstrap_servers", + "buffer_memory", + "compression_type", + "retries", + "batch_size" + ], + "properties": { + "bootstrap_servers": { + "title": "Bootstrap Servers", + "description": "A list of host/port pairs to use for establishing the initial connection to the Redpanda cluster. The client will make use of all servers irrespective of which servers are specified here for bootstrapping—this list only impacts the initial hosts used to discover the full set of servers. This list should be in the form host1:port1,host2:port2,.... Since these servers are just used for the initial connection to discover the full cluster membership (which may change dynamically), this list need not contain the full set of servers (you may want more than one, though, in case a server is down).", + "type": "string", + "examples": ["redpanda-broker1:9092,redpanda-broker2:9092"] + }, + "buffer_memory": { + "title": "Buffer Memory", + "description": "The total bytes of memory the producer can use to buffer records waiting to be sent to the server.", + "type": "string", + "examples": 33554432 + }, + "compression_type": { + "title": "Compression Type", + "description": "The compression type for all data generated by the producer.", + "type": "string", + "default": "none", + "enum": ["none", "gzip", "snappy", "lz4", "zstd"] + }, + "batch_size": { + "title": "Batch Size", + "description": "The producer will attempt to batch records together into fewer requests whenever multiple records are being sent to the same partition.", + "type": "integer", + "examples": [16384] + }, + "retries": { + "title": "Retries", + "description": "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error.", + "type": "integer", + "examples": [2147483647] + }, + "topic_num_partitions": { + "title": "Number of topic partitions", + "description": "The number of topic partitions which will be created on topic creation", + "type": "integer", + "examples": [10] + }, + "topic_replication_factor": { + "title": "Topic replication factor", + "description": "The number of topics to which messages will be replicated", + "type": "integer", + "examples": [10] + }, + "socket_connection_setup_timeout_ms": { + "title": "Socket Connection Setup Timeout", + "description": "The amount of time the client will wait for the socket connection to be established.", + "type": "integer", + "examples": [10000] + }, + "socket_connection_setup_timeout_max_ms": { + "title": "Socket Connection Setup Max Timeout", + "description": "The maximum amount of time the client will wait for the socket connection to be established. The connection setup timeout will increase exponentially for each consecutive connection failure up to this maximum.", + "type": "integer", + "examples": [30000] + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumer.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumer.java new file mode 100644 index 0000000000000..5a85c61964bbd --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumer.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import java.util.Map; +import org.apache.kafka.clients.consumer.KafkaConsumer; + +public class RedpandaConsumer extends KafkaConsumer { + + public RedpandaConsumer(Map configs) { + super(configs); + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumerFactory.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumerFactory.java new file mode 100644 index 0000000000000..de23f1cd63609 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaConsumerFactory.java @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import java.util.Map; +import org.apache.kafka.clients.consumer.ConsumerConfig; + +public class RedpandaConsumerFactory { + + private RedpandaConsumerFactory() { + + } + + public static RedpandaConsumer getInstance(String bootstrapServers, String groupId) { + Map props = ImmutableMap.builder() + .put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers) + .put(ConsumerConfig.GROUP_ID_CONFIG, groupId) + .put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest") + .put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer") + .put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonDeserializer") + .build(); + + return new RedpandaConsumer<>(props); + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaContainerFactory.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaContainerFactory.java new file mode 100644 index 0000000000000..eec0a1fb80c3f --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaContainerFactory.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import org.testcontainers.redpanda.RedpandaContainer; + +class RedpandaContainerFactory { + + private RedpandaContainerFactory() { + + } + + public static RedpandaContainer createRedpandaContainer() { + return new RedpandaContainer("docker.redpanda.com/vectorized/redpanda:v22.2.7"); + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationAcceptanceTest.java new file mode 100644 index 0000000000000..9383c2cf14349 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationAcceptanceTest.java @@ -0,0 +1,150 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.JavaBaseConstants; +import io.airbyte.integrations.standardtest.destination.DestinationAcceptanceTest; +import io.airbyte.integrations.standardtest.destination.comparator.AdvancedTestDataComparator; +import io.airbyte.integrations.standardtest.destination.comparator.TestDataComparator; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; +import org.apache.kafka.clients.admin.Admin; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.TopicListing; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.redpanda.RedpandaContainer; + +public class RedpandaDestinationAcceptanceTest extends DestinationAcceptanceTest { + + private static final Logger LOGGER = LoggerFactory.getLogger(RedpandaDestinationAcceptanceTest.class); + + private static RedpandaContainer redpandaContainer; + + private RedpandaNameTransformer redpandaNameTransformer; + + private Admin adminClient; + + @BeforeAll + static void initContainer() { + redpandaContainer = RedpandaContainerFactory.createRedpandaContainer(); + redpandaContainer.start(); + } + + @AfterAll + static void stopContainer() { + redpandaContainer.stop(); + redpandaContainer.close(); + } + + @Override + protected void setup(TestDestinationEnv testEnv) { + this.redpandaNameTransformer = new RedpandaNameTransformer(); + this.adminClient = AdminClient.create(Map.of( + AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, redpandaContainer.getBootstrapServers(), + AdminClientConfig.RETRIES_CONFIG, 5, + AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG, 3000, + AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG, 30000)); + } + + @Override + protected void tearDown(TestDestinationEnv testEnv) throws ExecutionException, InterruptedException { + var topics = adminClient.listTopics().listings().get().stream() + .filter(tl -> !tl.isInternal()) + .map(TopicListing::name) + .collect(Collectors.toSet()); + + adminClient.deleteTopics(topics); + } + + @Override + protected String getImageName() { + return "airbyte/destination-redpanda:dev"; + } + + @Override + protected JsonNode getConfig() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("bootstrap_servers", redpandaContainer.getBootstrapServers()) + .put("compression_type", "none") + .put("batch_size", 16384) + .put("buffer_memory", "33554432") + .put("retries", 1) + .put("topic_num_partitions", 1) + .put("topic_replication_factor", 1) + .put("socket_connection_setup_timeout_ms", 3000) + .put("socket_connection_setup_timeout_max_ms", 3000) + .build()); + } + + @Override + protected JsonNode getFailCheckConfig() { + return Jsons.jsonNode(ImmutableMap.builder() + .put("bootstrap_servers", "127.0.0.9") + .put("compression_type", "none") + .put("batch_size", 16384) + .put("buffer_memory", "33554432") + .put("retries", 1) + .put("topic_num_partitions", 1) + .put("topic_replication_factor", 1) + .put("socket_connection_setup_timeout_ms", 3000) + .put("socket_connection_setup_timeout_max_ms", 3000) + .build()); + } + + @Override + protected TestDataComparator getTestDataComparator() { + return new AdvancedTestDataComparator(); + } + + @Override + protected boolean supportBasicDataTypeTest() { + return true; + } + + @Override + protected boolean supportArrayDataTypeTest() { + return true; + } + + @Override + protected boolean supportObjectDataTypeTest() { + return true; + } + + @Override + protected boolean implementsNamespaces() { + return true; + } + + @Override + protected List retrieveRecords(TestDestinationEnv testEnv, + String streamName, + String namespace, + JsonNode streamSchema) { + List records = new ArrayList<>(); + String bootstrapServers = redpandaContainer.getBootstrapServers(); + String groupId = redpandaNameTransformer.getIdentifier(namespace + "-" + streamName); + try (RedpandaConsumer redpandaConsumer = RedpandaConsumerFactory.getInstance(bootstrapServers, groupId)) { + String topicName = redpandaNameTransformer.topicName(namespace, streamName); + redpandaConsumer.subscribe(Collections.singletonList(topicName)); + redpandaConsumer.poll(Duration.ofSeconds(5)).iterator() + .forEachRemaining(r -> records.add(r.value().get(JavaBaseConstants.COLUMN_NAME_DATA))); + } + return records; + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationTest.java new file mode 100644 index 0000000000000..31d518c4ae402 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaDestinationTest.java @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.testcontainers.redpanda.RedpandaContainer; + +class RedpandaDestinationTest { + + private RedpandaContainer redpandaContainer; + + private RedpandaDestination redpandaDestination; + + @BeforeEach + void setup() { + this.redpandaDestination = new RedpandaDestination(); + this.redpandaContainer = RedpandaContainerFactory.createRedpandaContainer(); + this.redpandaContainer.start(); + } + + @AfterEach + void shutdown() { + this.redpandaContainer.stop(); + this.redpandaContainer.close(); + } + + @Test + void testCheckWithSuccess() { + + var jsonConfig = Jsons.jsonNode(ImmutableMap.builder() + .put("bootstrap_servers", redpandaContainer.getBootstrapServers()) + .put("compression_type", "none") + .put("batch_size", 16384) + .put("buffer_memory", "33554432") + .put("retries", 1) + .put("topic_num_partitions", 1) + .put("topic_replication_factor", 1) + .put("socket_connection_setup_timeout_ms", 3000) + .put("socket_connection_setup_timeout_max_ms", 3000) + .build()); + + var status = redpandaDestination.check(jsonConfig); + + assertThat(status.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.SUCCEEDED); + + } + + @Test + void testCheckWithFailure() { + + var jsonConfig = Jsons.jsonNode(ImmutableMap.builder() + .put("bootstrap_servers", "127.0.0.9") + .put("compression_type", "none") + .put("batch_size", 16384) + .put("buffer_memory", "33554432") + .put("retries", 1) + .put("topic_num_partitions", 1) + .put("topic_replication_factor", 1) + .put("socket_connection_setup_timeout_ms", 3000) + .put("socket_connection_setup_timeout_max_ms", 3000) + .build()); + + var status = redpandaDestination.check(jsonConfig); + + assertThat(status.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.FAILED); + + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaOperationsTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaOperationsTest.java new file mode 100644 index 0000000000000..bbdaa484dbe30 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test-integration/java/io/airbyte/integrations/destination/redpanda/RedpandaOperationsTest.java @@ -0,0 +1,122 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.testcontainers.redpanda.RedpandaContainer; + +class RedpandaOperationsTest { + + private static final String TEST_TOPIC = "test_topic"; + + private RedpandaOperations redpandaOperations; + + private RedpandaConsumer redpandaConsumer; + + private RedpandaContainer redpandaContainer; + + @BeforeEach + void setup() { + this.redpandaContainer = RedpandaContainerFactory.createRedpandaContainer(); + this.redpandaContainer.start(); + var jsonConfig = Jsons.jsonNode(ImmutableMap.builder() + .put("bootstrap_servers", redpandaContainer.getBootstrapServers()) + .put("compression_type", "none") + .put("batch_size", 16384) + .put("buffer_memory", "33554432") + .put("retries", 1) + .put("topic_num_partitions", 1) + .put("topic_replication_factor", 1) + .put("socket_connection_setup_timeout_ms", 3000) + .put("socket_connection_setup_timeout_max_ms", 3000) + .put("", false) + .build()); + this.redpandaOperations = new RedpandaOperations(RedpandaConfig.createConfig(jsonConfig)); + this.redpandaConsumer = RedpandaConsumerFactory.getInstance(redpandaContainer.getBootstrapServers(), TEST_TOPIC); + } + + @AfterEach + void shutdown() { + this.redpandaOperations.close(); + this.redpandaConsumer.close(); + this.redpandaContainer.stop(); + this.redpandaContainer.close(); + } + + @Test + void testPutRecord() { + + redpandaOperations.putRecord(TEST_TOPIC, UUID.randomUUID().toString(), Jsons.jsonNode(Map.of("attr_1", "data1")), e -> {}); + redpandaOperations.putRecord(TEST_TOPIC, UUID.randomUUID().toString(), Jsons.jsonNode(Map.of("attr_1", "data2")), e -> {}); + redpandaOperations.flush(); + + List records = new ArrayList<>(); + redpandaConsumer.subscribe(Collections.singletonList(TEST_TOPIC)); + redpandaConsumer.poll(Duration.ofSeconds(5)).iterator().forEachRemaining(r -> records.add(r.value())); + + assertThat(records) + .hasSize(2); + } + + @Test + void testCreateTopic() { + + var topicInfo = new RedpandaOperations.TopicInfo(TEST_TOPIC, Optional.of(1), Optional.of((short) 1)); + redpandaOperations.createTopic(Set.of(topicInfo)); + + Set topics = redpandaOperations.listTopics(); + + assertThat(topics).anyMatch(topic -> topic.equals(TEST_TOPIC)); + } + + @Test + void testDeleteTopic() { + + // given + var topicInfo = new RedpandaOperations.TopicInfo(TEST_TOPIC, Optional.of(1), Optional.of((short) 1)); + redpandaOperations.createTopic(Set.of(topicInfo)); + + // when + redpandaOperations.deleteTopic(Set.of(TEST_TOPIC)); + + // then + Set topics = redpandaOperations.listTopics(); + + assertThat(topics).isEmpty(); + + } + + @Test + void testPutRecordBlocking() { + + redpandaOperations.putRecordBlocking(TEST_TOPIC, UUID.randomUUID().toString(), Jsons.jsonNode(Map.of("attr_1", "data1"))); + redpandaOperations.putRecordBlocking(TEST_TOPIC, UUID.randomUUID().toString(), Jsons.jsonNode(Map.of("attr_1", "data2"))); + redpandaOperations.flush(); + + List records = new ArrayList<>(); + redpandaConsumer.subscribe(Collections.singletonList(TEST_TOPIC)); + redpandaConsumer.poll(Duration.ofSeconds(5)).iterator().forEachRemaining(r -> records.add(r.value())); + + assertThat(records) + .hasSize(2); + + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaConfigTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaConfigTest.java new file mode 100644 index 0000000000000..830e9d57fcef5 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaConfigTest.java @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.airbyte.commons.json.Jsons; +import java.util.Comparator; +import java.util.Map; +import java.util.Optional; +import org.junit.jupiter.api.Test; + +class RedpandaConfigTest { + + @Test + void testRedpandaConfig() { + + var jsonConfig = Jsons.jsonNode(Map.of( + "bootstrap_servers", "host1:port1,host2:port2", + "buffer_memory", 33554432L, + "compression_type", "none", + "retries", 5, + "batch_size", 16384, + "topic_num_partitions", 1, + "topic_replication_factor", 1, + "socket_connection_setup_timeout_ms", 10000, + "socket_connection_setup_timeout_max_ms", 30000)); + + var redpandaConfig = RedpandaConfig.createConfig(jsonConfig); + + assertThat(redpandaConfig) + .usingComparatorForFields(new OptionalComparator(), "topicNumPartitions", "topicReplicationFactor") + .hasFieldOrPropertyWithValue("bootstrapServers", "host1:port1,host2:port2") + .hasFieldOrPropertyWithValue("bufferMemory", 33554432L) + .hasFieldOrPropertyWithValue("compressionType", "none") + .hasFieldOrPropertyWithValue("retries", 5) + .hasFieldOrPropertyWithValue("batchSize", 16384) + .hasFieldOrPropertyWithValue("topicNumPartitions", Optional.of(1)) + .hasFieldOrPropertyWithValue("topicReplicationFactor", Optional.of((short) 1)) + .hasFieldOrPropertyWithValue("socketConnectionSetupTimeoutMs", 10000) + .hasFieldOrPropertyWithValue("socketConnectionSetupTimeoutMaxMs", 30000); + + } + + private static class OptionalComparator implements Comparator> { + + @Override + public int compare(Optional o1, Optional o2) { + return Integer.compare(o1.get(), o2.get()); + } + + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformerTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformerTest.java new file mode 100644 index 0000000000000..1a7c366f4da77 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaNameTransformerTest.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Test; + +class RedpandaNameTransformerTest { + + @Test + void testTransformTopicName() { + + var redpandaNameTransformer = new RedpandaNameTransformer(); + + String topicName = redpandaNameTransformer.topicName("namespace", "stream"); + + assertThat(topicName).isEqualTo("namespace_stream"); + + } + +} diff --git a/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfigTest.java b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfigTest.java new file mode 100644 index 0000000000000..7cf8975141eb8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-redpanda/src/test/java/io/airbyte/integrations/destination/redpanda/RedpandaWriteConfigTest.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.redpanda; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.airbyte.protocol.models.DestinationSyncMode; +import org.junit.jupiter.api.Test; + +class RedpandaWriteConfigTest { + + @Test + void testRedpandaWriteConfig() { + + var writeConfig = new RedpandaWriteConfig("namespace_stream", DestinationSyncMode.OVERWRITE); + + assertThat(writeConfig) + .hasFieldOrPropertyWithValue("topicName", "namespace_stream") + .hasFieldOrPropertyWithValue("destinationSyncMode", DestinationSyncMode.OVERWRITE); + + } + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/.dockerignore b/airbyte-integrations/connectors/destination-s3-glue/.dockerignore new file mode 100644 index 0000000000000..65c7d0ad3e73c --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/.dockerignore @@ -0,0 +1,3 @@ +* +!Dockerfile +!build diff --git a/airbyte-integrations/connectors/destination-s3-glue/Dockerfile b/airbyte-integrations/connectors/destination-s3-glue/Dockerfile new file mode 100644 index 0000000000000..21987c18424f8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/Dockerfile @@ -0,0 +1,18 @@ +FROM airbyte/integration-base-java:dev AS build + +WORKDIR /airbyte +ENV APPLICATION destination-s3-glue + +COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar + +RUN tar xf ${APPLICATION}.tar --strip-components=1 && rm -rf ${APPLICATION}.tar + +FROM airbyte/integration-base-java:dev + +WORKDIR /airbyte +ENV APPLICATION destination-s3-glue + +COPY --from=build /airbyte /airbyte + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/destination-s3-glue diff --git a/airbyte-integrations/connectors/destination-s3-glue/README.md b/airbyte-integrations/connectors/destination-s3-glue/README.md new file mode 100644 index 0000000000000..79550c6cf39bc --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/README.md @@ -0,0 +1,68 @@ +# Destination S3 Glue + +This is the repository for the S3 Glue destination connector in Java. +For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/destinations/s3-glue). + +## Local development + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:destination-s3-glue:build +``` + +#### Create credentials +**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. +Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. + +**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. + +### Locally running the connector docker image + +#### Build +Build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:destination-s3-glue:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/destination-s3-glue:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-s3-glue:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/destination-s3-glue:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/destination-s3-glue:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +We use `JUnit` for Java tests. + +### Unit and Integration Tests +Place unit tests under `src/test/io/airbyte/integrations/destinations/s3_glue`. + +#### Acceptance Tests +Airbyte has a standard test suite that all destination connectors must pass. Implement the `TODO`s in +`src/test-integration/java/io/airbyte/integrations/destinations/s3_glueDestinationAcceptanceTest.java`. + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:destination-s3-glue:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:destination-s3-glue:integrationTest +``` + +## Dependency Management + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/destination-s3-glue/build.gradle b/airbyte-integrations/connectors/destination-s3-glue/build.gradle new file mode 100644 index 0000000000000..9499fbc68045a --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/build.gradle @@ -0,0 +1,25 @@ +plugins { + id 'application' + id 'airbyte-docker' + id 'airbyte-integration-test-java' +} + +application { + mainClass = 'io.airbyte.integrations.destination.s3_glue.S3GlueDestination' +} + +dependencies { + implementation project(':airbyte-config:config-models') + implementation project(':airbyte-protocol:protocol-models') + implementation project(':airbyte-integrations:bases:base-java') + implementation project(':airbyte-integrations:bases:base-java-s3') + implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) + + // https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk-glue + implementation 'com.amazonaws:aws-java-sdk-glue:1.12.334' + + integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') + integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-s3-glue') + integrationTestJavaImplementation project(':airbyte-integrations:bases:s3-destination-base-integration-test') + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueConstants.java b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueConstants.java new file mode 100644 index 0000000000000..0283122c6ea2b --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueConstants.java @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +public class GlueConstants { + + private GlueConstants() { + + } + + public static final String GLUE_DATABASE = "glue_database"; + + public static final String SERIALIZATION_LIBRARY = "glue_serialization_library"; + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueDestinationConfig.java b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueDestinationConfig.java new file mode 100644 index 0000000000000..7481730140987 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueDestinationConfig.java @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import static io.airbyte.integrations.destination.s3.constant.S3Constants.ACCESS_KEY_ID; +import static io.airbyte.integrations.destination.s3.constant.S3Constants.SECRET_ACCESS_KEY; +import static io.airbyte.integrations.destination.s3.constant.S3Constants.S_3_BUCKET_REGION; +import static io.airbyte.integrations.destination.s3_glue.GlueConstants.GLUE_DATABASE; +import static io.airbyte.integrations.destination.s3_glue.GlueConstants.SERIALIZATION_LIBRARY; + +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; +import com.amazonaws.services.glue.AWSGlue; +import com.amazonaws.services.glue.AWSGlueClient; +import com.amazonaws.services.glue.AWSGlueClientBuilder; +import com.fasterxml.jackson.databind.JsonNode; +import org.apache.commons.lang3.StringUtils; + +public class GlueDestinationConfig { + + private String database; + + private String region; + + private String accessKeyId; + + private String secretAccessKey; + + private String serializationLibrary; + + private GlueDestinationConfig() { + + } + + private GlueDestinationConfig(String database, + String region, + String accessKeyId, + String secretAccessKey, + String serializationLibrary) { + this.database = database; + this.region = region; + this.accessKeyId = accessKeyId; + this.secretAccessKey = secretAccessKey; + this.serializationLibrary = serializationLibrary; + } + + public static GlueDestinationConfig getInstance(JsonNode jsonNode) { + return new GlueDestinationConfig( + jsonNode.get(GLUE_DATABASE) != null ? jsonNode.get(GLUE_DATABASE).asText() : null, + jsonNode.get(S_3_BUCKET_REGION) != null ? jsonNode.get(S_3_BUCKET_REGION).asText() : null, + jsonNode.get(ACCESS_KEY_ID) != null ? jsonNode.get(ACCESS_KEY_ID).asText() : null, + jsonNode.get(SECRET_ACCESS_KEY) != null ? jsonNode.get(SECRET_ACCESS_KEY).asText() : null, + jsonNode.get(SERIALIZATION_LIBRARY) != null ? jsonNode.get(SERIALIZATION_LIBRARY).asText() : "org.openx.data.jsonserde.JsonSerDe"); + } + + public AWSGlue getAWSGlueInstance() { + AWSGlueClientBuilder builder = AWSGlueClient.builder(); + AWSCredentialsProvider awsCredentialsProvider; + if (!StringUtils.isBlank(accessKeyId) && !StringUtils.isBlank(secretAccessKey)) { + AWSCredentials awsCreds = new BasicAWSCredentials(accessKeyId, secretAccessKey); + awsCredentialsProvider = new AWSStaticCredentialsProvider(awsCreds); + } else { + awsCredentialsProvider = new DefaultAWSCredentialsProviderChain(); + } + + builder.withCredentials(awsCredentialsProvider); + + if (!StringUtils.isBlank(region)) { + builder.withRegion(region); + } + + return builder.build(); + + } + + public String getDatabase() { + return database; + } + + public String getSerializationLibrary() { + return serializationLibrary; + } + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueOperations.java b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueOperations.java new file mode 100644 index 0000000000000..8cbd43dd86fc1 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/GlueOperations.java @@ -0,0 +1,175 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import com.amazonaws.services.glue.AWSGlue; +import com.amazonaws.services.glue.model.Column; +import com.amazonaws.services.glue.model.CreateTableRequest; +import com.amazonaws.services.glue.model.DeleteTableRequest; +import com.amazonaws.services.glue.model.EntityNotFoundException; +import com.amazonaws.services.glue.model.GetTableRequest; +import com.amazonaws.services.glue.model.SerDeInfo; +import com.amazonaws.services.glue.model.StorageDescriptor; +import com.amazonaws.services.glue.model.TableInput; +import com.amazonaws.services.glue.model.UpdateTableRequest; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Preconditions; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +// TODO (itaseski) implement wrapper for retry logic on transient errors +public class GlueOperations implements MetastoreOperations { + + private final ObjectMapper objectMapper; + + private final AWSGlue awsGlueClient; + + public GlueOperations(AWSGlue awsGlueClient) { + Preconditions.checkArgument(awsGlueClient != null); + this.awsGlueClient = awsGlueClient; + this.objectMapper = new ObjectMapper(); + } + + @Override + public void upsertTable(String databaseName, + String tableName, + String location, + JsonNode jsonSchema, + String serializationLibrary) { + try { + GetTableRequest getTableRequest = new GetTableRequest() + .withDatabaseName(databaseName) + .withName(tableName); + + // Will throw EntityNotFoundException if table doesn't exist + awsGlueClient.getTable(getTableRequest); + + UpdateTableRequest updateTableRequest = new UpdateTableRequest() + .withDatabaseName(databaseName) + .withTableInput( + new TableInput() + .withName(tableName) + // .withTableType("GOVERNED") + .withStorageDescriptor( + new StorageDescriptor() + .withLocation(location) + .withColumns(transformSchema(jsonSchema)) + .withInputFormat("org.apache.hadoop.mapred.TextInputFormat") + .withOutputFormat("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat") + .withSerdeInfo( + new SerDeInfo() + .withSerializationLibrary(serializationLibrary) + .withParameters(Map.of("paths", ","))) + + ) + .withPartitionKeys(List.of()) + .withParameters(Map.of("classification", "json"))); + + awsGlueClient.updateTable(updateTableRequest); + } catch (EntityNotFoundException enfe) { + CreateTableRequest createTableRequest = new CreateTableRequest() + .withDatabaseName(databaseName) + .withTableInput( + new TableInput() + .withName(tableName) + // .withTableType("GOVERNED") + .withStorageDescriptor( + new StorageDescriptor() + .withLocation(location) + .withColumns(transformSchema(jsonSchema)) + .withInputFormat("org.apache.hadoop.mapred.TextInputFormat") + .withOutputFormat("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat") + .withSerdeInfo( + new SerDeInfo() + .withSerializationLibrary(serializationLibrary) + .withParameters(Map.of("paths", ",")))) + .withPartitionKeys(List.of()) + .withParameters(Map.of("classification", "json"))); + + awsGlueClient.createTable(createTableRequest); + } + } + + @Override + public void deleteTable(String databaseName, String tableName) { + + DeleteTableRequest deleteTableRequest = new DeleteTableRequest() + .withDatabaseName(databaseName) + .withName(tableName); + + awsGlueClient.deleteTable(deleteTableRequest); + + } + + private Collection transformSchema(JsonNode jsonSchema) { + if (jsonSchema.has("properties")) { + Map properties = objectMapper.convertValue(jsonSchema.get("properties"), new TypeReference<>() {}); + return properties.entrySet().stream() + .map(es -> new Column().withName(es.getKey()).withType(transformSchemaRecursive(es.getValue()))) + .collect(Collectors.toSet()); + } else { + return Collections.emptySet(); + } + } + + private String transformSchemaRecursive(JsonNode jsonNode) { + String type = filterTypes(jsonNode.get("type")).iterator().next(); + return switch (type) { + // TODO(itaseski) support date-time and timestamp airbyte types + case "string" -> "string"; + case "number" -> { + if (jsonNode.has("airbyte_type") && jsonNode.get("airbyte_type").asText().equals("integer")) { + yield "int"; + } + yield "float"; + } + case "boolean" -> "boolean"; + case "integer" -> "int"; + case "array" -> { + String arrayType = "array<"; + Set itemTypes = filterTypes(jsonNode.get("items").get("type")); + if (itemTypes.size() > 1) { + // TODO(itaseski) use union instead of array when having multiple types (rare occurrence)? + arrayType += "string>"; + } else { + String subtype = transformSchemaRecursive(jsonNode.get("items")); + arrayType += (subtype + ">"); + } + yield arrayType; + } + case "object" -> { + String objectType = "struct<"; + Map properties = objectMapper.convertValue(jsonNode.get("properties"), new TypeReference<>() {}); + String columnTypes = properties.entrySet().stream() + .map(p -> p.getKey() + " : " + transformSchemaRecursive(p.getValue())) + .collect(Collectors.joining(",")); + objectType += (columnTypes + ">"); + yield objectType; + } + default -> type; + }; + } + + private Set filterTypes(JsonNode type) { + if (type.isArray()) { + Set types = objectMapper.convertValue(type, new TypeReference<>() {}); + return types.stream().filter(t -> !t.equals("null")).collect(Collectors.toSet()); + } else { + return Set.of(type.asText()); + } + } + + @Override + public void close() { + awsGlueClient.shutdown(); + } + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/MetastoreOperations.java b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/MetastoreOperations.java new file mode 100644 index 0000000000000..f112c3f58bc4f --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/MetastoreOperations.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import com.fasterxml.jackson.databind.JsonNode; + +// TODO (itaseskii) allow config based implementation of different metastores i.e Hive, Nessie, etc. +public interface MetastoreOperations extends AutoCloseable { + + // TODO (itaseskii) extend metadata with data format (json, avro, parquet) + void upsertTable(String databaseName, String tableName, String location, JsonNode jsonSchema, String serializationLibrary); + + void deleteTable(String databaseName, String tableName); + + @Override + void close(); + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueConsumerFactory.java b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueConsumerFactory.java new file mode 100644 index 0000000000000..3d14735a2866d --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueConsumerFactory.java @@ -0,0 +1,174 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.base.Preconditions; +import io.airbyte.commons.functional.CheckedBiConsumer; +import io.airbyte.commons.functional.CheckedBiFunction; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.AirbyteMessageConsumer; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.destination.NamingConventionTransformer; +import io.airbyte.integrations.destination.buffered_stream_consumer.BufferedStreamConsumer; +import io.airbyte.integrations.destination.buffered_stream_consumer.OnCloseFunction; +import io.airbyte.integrations.destination.buffered_stream_consumer.OnStartFunction; +import io.airbyte.integrations.destination.record_buffer.SerializableBuffer; +import io.airbyte.integrations.destination.record_buffer.SerializedBufferingStrategy; +import io.airbyte.integrations.destination.s3.BlobStorageOperations; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.WriteConfig; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.DestinationSyncMode; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.apache.commons.io.FileUtils; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class S3GlueConsumerFactory { + + private static final Logger LOGGER = LoggerFactory.getLogger(S3GlueConsumerFactory.class); + + private static final DateTime SYNC_DATETIME = DateTime.now(DateTimeZone.UTC); + + public AirbyteMessageConsumer create(final Consumer outputRecordCollector, + final BlobStorageOperations storageOperations, + final MetastoreOperations metastoreOperations, + final NamingConventionTransformer namingResolver, + final CheckedBiFunction onCreateBuffer, + final S3DestinationConfig s3Config, + final GlueDestinationConfig glueConfig, + final ConfiguredAirbyteCatalog catalog) { + final List writeConfigs = createWriteConfigs(storageOperations, s3Config, catalog); + return new BufferedStreamConsumer( + outputRecordCollector, + onStartFunction(storageOperations, writeConfigs), + new SerializedBufferingStrategy( + onCreateBuffer, + catalog, + flushBufferFunction(storageOperations, writeConfigs, catalog)), + onCloseFunction(storageOperations, metastoreOperations, writeConfigs, glueConfig, s3Config), + catalog, + storageOperations::isValidData); + } + + private static List createWriteConfigs(final BlobStorageOperations storageOperations, + final S3DestinationConfig config, + final ConfiguredAirbyteCatalog catalog) { + return catalog.getStreams() + .stream() + .map(toWriteConfig(storageOperations, config)) + .collect(Collectors.toList()); + } + + private static Function toWriteConfig( + final BlobStorageOperations storageOperations, + final S3DestinationConfig s3Config) { + return stream -> { + Preconditions.checkNotNull(stream.getDestinationSyncMode(), "Undefined destination sync mode"); + final AirbyteStream abStream = stream.getStream(); + final String namespace = abStream.getNamespace(); + final String streamName = abStream.getName(); + final String bucketPath = s3Config.getBucketPath(); + final String customOutputFormat = String.join("/", bucketPath, s3Config.getPathFormat()); + final String fullOutputPath = storageOperations.getBucketObjectPath(namespace, streamName, SYNC_DATETIME, customOutputFormat); + final DestinationSyncMode syncMode = stream.getDestinationSyncMode(); + final JsonNode jsonSchema = abStream.getJsonSchema(); + final String location = "s3://" + s3Config.getBucketName() + "/" + + fullOutputPath.substring(0, fullOutputPath.lastIndexOf("/") + 1); + final S3GlueWriteConfig writeConfig = + new S3GlueWriteConfig(namespace, streamName, bucketPath, customOutputFormat, fullOutputPath, syncMode, + jsonSchema, location); + LOGGER.info("Write config: {}", writeConfig); + return writeConfig; + }; + } + + private OnStartFunction onStartFunction(final BlobStorageOperations storageOperations, final List writeConfigs) { + return () -> { + LOGGER.info("Preparing bucket in destination started for {} streams", writeConfigs.size()); + for (final WriteConfig writeConfig : writeConfigs) { + if (writeConfig.getSyncMode().equals(DestinationSyncMode.OVERWRITE)) { + final String namespace = writeConfig.getNamespace(); + final String stream = writeConfig.getStreamName(); + final String outputBucketPath = writeConfig.getOutputBucketPath(); + final String pathFormat = writeConfig.getPathFormat(); + LOGGER.info("Clearing storage area in destination started for namespace {} stream {} bucketObject {} pathFormat {}", + namespace, stream, outputBucketPath, pathFormat); + storageOperations.cleanUpBucketObject(namespace, stream, outputBucketPath, pathFormat); + LOGGER.info("Clearing storage area in destination completed for namespace {} stream {} bucketObject {}", namespace, stream, + outputBucketPath); + } + } + LOGGER.info("Preparing storage area in destination completed."); + }; + } + + private static AirbyteStreamNameNamespacePair toNameNamespacePair(final WriteConfig config) { + return new AirbyteStreamNameNamespacePair(config.getStreamName(), config.getNamespace()); + } + + private CheckedBiConsumer flushBufferFunction(final BlobStorageOperations storageOperations, + final List writeConfigs, + final ConfiguredAirbyteCatalog catalog) { + final Map pairToWriteConfig = + writeConfigs.stream() + .collect(Collectors.toUnmodifiableMap(S3GlueConsumerFactory::toNameNamespacePair, Function.identity())); + + return (pair, writer) -> { + LOGGER.info("Flushing buffer for stream {} ({}) to storage", pair.getName(), FileUtils.byteCountToDisplaySize(writer.getByteCount())); + if (!pairToWriteConfig.containsKey(pair)) { + throw new IllegalArgumentException( + String.format("Message contained record from a stream %s that was not in the catalog. \ncatalog: %s", pair, Jsons.serialize(catalog))); + } + + final WriteConfig writeConfig = pairToWriteConfig.get(pair); + try (writer) { + writer.flush(); + writeConfig.addStoredFile(storageOperations.uploadRecordsToBucket( + writer, + writeConfig.getNamespace(), + writeConfig.getStreamName(), + writeConfig.getFullOutputPath())); + } catch (final Exception e) { + LOGGER.error("Failed to flush and upload buffer to storage:", e); + throw new RuntimeException("Failed to upload buffer to storage", e); + } + }; + } + + private OnCloseFunction onCloseFunction(final BlobStorageOperations storageOperations, + final MetastoreOperations metastoreOperations, + final List writeConfigs, + GlueDestinationConfig glueDestinationConfig, + S3DestinationConfig s3DestinationConfig) { + return (hasFailed) -> { + if (hasFailed) { + LOGGER.info("Cleaning up destination started for {} streams", writeConfigs.size()); + for (final WriteConfig writeConfig : writeConfigs) { + storageOperations.cleanUpBucketObject(writeConfig.getFullOutputPath(), writeConfig.getStoredFiles()); + writeConfig.clearStoredFiles(); + } + LOGGER.info("Cleaning up destination completed."); + } else { + for (final S3GlueWriteConfig writeConfig : writeConfigs) { + metastoreOperations.upsertTable(glueDestinationConfig.getDatabase(), + writeConfig.getStreamName(), writeConfig.getLocation(), writeConfig.getJsonSchema(), + glueDestinationConfig.getSerializationLibrary()); + } + } + }; + } + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueDestination.java b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueDestination.java new file mode 100644 index 0000000000000..24858a39a91c8 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueDestination.java @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.integrations.base.AirbyteMessageConsumer; +import io.airbyte.integrations.base.IntegrationRunner; +import io.airbyte.integrations.destination.NamingConventionTransformer; +import io.airbyte.integrations.destination.record_buffer.FileBuffer; +import io.airbyte.integrations.destination.s3.BaseS3Destination; +import io.airbyte.integrations.destination.s3.S3DestinationConfig; +import io.airbyte.integrations.destination.s3.S3StorageOperations; +import io.airbyte.integrations.destination.s3.SerializedBufferFactory; +import io.airbyte.integrations.destination.s3.StorageProvider; +import io.airbyte.integrations.destination.s3.util.S3NameTransformer; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import java.util.function.Consumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class S3GlueDestination extends BaseS3Destination { + + private static final Logger LOGGER = LoggerFactory.getLogger(S3GlueDestination.class); + + public S3GlueDestination() { + super(); + } + + public static void main(String[] args) throws Exception { + new IntegrationRunner(new S3GlueDestination()).run(args); + } + + @Override + public AirbyteConnectionStatus check(JsonNode config) { + var status = super.check(config); + if (status.getStatus() == AirbyteConnectionStatus.Status.FAILED) { + return status; + } + final GlueDestinationConfig glueConfig = GlueDestinationConfig.getInstance(config); + MetastoreOperations metastoreOperations = null; + String tableName = "test_table"; + try { + metastoreOperations = new GlueOperations(glueConfig.getAWSGlueInstance()); + metastoreOperations.upsertTable(glueConfig.getDatabase(), tableName, "s3://", Jsons.emptyObject(), glueConfig.getSerializationLibrary()); + + return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); + } catch (Exception e) { + LOGGER.error("Error while trying to perform check with Glue: ", e); + return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.FAILED); + } finally { + if (metastoreOperations != null) { + try { + metastoreOperations.deleteTable(glueConfig.getDatabase(), tableName); + } catch (Exception e) { + LOGGER.error("Error while deleting Glue table"); + } + metastoreOperations.close(); + } + } + } + + @Override + public AirbyteMessageConsumer getConsumer(JsonNode config, + ConfiguredAirbyteCatalog configuredCatalog, + Consumer outputRecordCollector) { + final S3DestinationConfig s3Config = configFactory.getS3DestinationConfig(config, storageProvider()); + final GlueDestinationConfig glueConfig = GlueDestinationConfig.getInstance(config); + final NamingConventionTransformer nameTransformer = new S3NameTransformer(); + return new S3GlueConsumerFactory().create( + outputRecordCollector, + new S3StorageOperations(nameTransformer, s3Config.getS3Client(), s3Config), + // TODO (itaseski) add Glue name transformer + new GlueOperations(glueConfig.getAWSGlueInstance()), + nameTransformer, + SerializedBufferFactory.getCreateFunction(s3Config, FileBuffer::new), + s3Config, + glueConfig, + configuredCatalog); + } + + @Override + public StorageProvider storageProvider() { + return StorageProvider.AWS_S3; + } + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueWriteConfig.java b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueWriteConfig.java new file mode 100644 index 0000000000000..e88c06821b32f --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/main/java/io/airbyte/integrations/destination/s3_glue/S3GlueWriteConfig.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.integrations.destination.s3.WriteConfig; +import io.airbyte.protocol.models.DestinationSyncMode; + +public class S3GlueWriteConfig extends WriteConfig { + + private final JsonNode jsonSchema; + + private final String location; + + public S3GlueWriteConfig(String namespace, + String streamName, + String outputBucketPath, + String pathFormat, + String fullOutputPath, + DestinationSyncMode syncMode, + JsonNode jsonSchema, + String location) { + super(namespace, streamName, outputBucketPath, pathFormat, fullOutputPath, syncMode); + this.jsonSchema = jsonSchema; + this.location = location; + } + + public JsonNode getJsonSchema() { + return jsonSchema; + } + + public String getLocation() { + return location; + } + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/main/resources/spec.json b/airbyte-integrations/connectors/destination-s3-glue/src/main/resources/spec.json new file mode 100644 index 0000000000000..21b988a4d0727 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/main/resources/spec.json @@ -0,0 +1,190 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/s3", + "supportsIncremental": true, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": ["overwrite", "append"], + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "S3 Destination Spec", + "type": "object", + "required": [ + "s3_bucket_name", + "s3_bucket_path", + "s3_bucket_region", + "format", + "glue_database", + "glue_serialization_library" + ], + "properties": { + "access_key_id": { + "type": "string", + "description": "The access key ID to access the S3 bucket. Airbyte requires Read and Write permissions to the given bucket. Read more here.", + "title": "S3 Key ID", + "airbyte_secret": true, + "examples": ["A012345678910EXAMPLE"], + "order": 0 + }, + "secret_access_key": { + "type": "string", + "description": "The corresponding secret to the access key ID. Read more here", + "title": "S3 Access Key", + "airbyte_secret": true, + "examples": ["a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"], + "order": 1 + }, + "s3_bucket_name": { + "title": "S3 Bucket Name", + "type": "string", + "description": "The name of the S3 bucket. Read more here.", + "examples": ["airbyte_sync"], + "order": 2 + }, + "s3_bucket_path": { + "title": "S3 Bucket Path", + "description": "Directory under the S3 bucket where data will be written. Read more here", + "type": "string", + "examples": ["data_sync/test"], + "order": 3 + }, + "s3_bucket_region": { + "title": "S3 Bucket Region", + "type": "string", + "default": "", + "description": "The region of the S3 bucket. See here for all region codes.", + "enum": [ + "", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", + "af-south-1", + "ap-east-1", + "ap-south-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "cn-north-1", + "cn-northwest-1", + "eu-central-1", + "eu-north-1", + "eu-south-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "sa-east-1", + "me-south-1", + "us-gov-east-1", + "us-gov-west-1" + ], + "order": 4 + }, + "format": { + "title": "Output Format", + "type": "object", + "description": "Format of the data output. See here for more details", + "oneOf": [ + { + "title": "JSON Lines: Newline-delimited JSON", + "required": ["format_type"], + "properties": { + "format_type": { + "title": "Format Type", + "type": "string", + "enum": ["JSONL"], + "default": "JSONL" + }, + "compression": { + "title": "Compression", + "type": "object", + "description": "Whether the output files should be compressed. If compression is selected, the output filename will have an extra extension (GZIP: \".jsonl.gz\").", + "oneOf": [ + { + "title": "No Compression", + "requires": "compression_type", + "properties": { + "compression_type": { + "type": "string", + "enum": ["No Compression"], + "default": "No Compression" + } + } + }, + { + "title": "GZIP", + "requires": "compression_type", + "properties": { + "compression_type": { + "type": "string", + "enum": ["GZIP"], + "default": "GZIP" + } + } + } + ] + }, + "flatten_data": { + "title": "Flatten Data", + "description": "If true data will be flattened and won't be nested in the _airbyte_data field", + "type": "boolean", + "default": true + } + } + } + ], + "order": 5 + }, + "s3_endpoint": { + "title": "Endpoint", + "type": "string", + "default": "", + "description": "Your S3 endpoint url. Read more here", + "examples": ["http://localhost:9000"], + "order": 6 + }, + "s3_path_format": { + "title": "S3 Path Format", + "description": "Format string on how data will be organized inside the S3 bucket directory. Read more here", + "type": "string", + "examples": [ + "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + ], + "order": 7 + }, + "file_name_pattern": { + "type": "string", + "description": "The pattern allows you to set the file-name format for the S3 staging file(s)", + "title": "S3 Filename pattern", + "examples": [ + "{date}", + "{date:yyyy_MM}", + "{timestamp}", + "{part_number}", + "{sync_id}" + ], + "order": 8 + }, + "glue_database": { + "type": "string", + "description": "Name of the glue database for creating the tables, leave blank if no integration", + "title": "Glue database name", + "examples": ["airbyte_database"], + "order": 9 + }, + "glue_serialization_library": { + "title": "Serialization Library", + "description": "The library that your query engine will use for reading and writing data in your lake.", + "type": "string", + "enum": [ + "org.openx.data.jsonserde.JsonSerDe", + "org.apache.hive.hcatalog.data.JsonSerDe" + ], + "default": "org.openx.data.jsonserde.JsonSerDe", + "order": 10 + } + } + } +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/GlueTestClient.java b/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/GlueTestClient.java new file mode 100644 index 0000000000000..eb54c05c9f7e4 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/GlueTestClient.java @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import com.amazonaws.services.glue.AWSGlue; +import com.amazonaws.services.glue.model.BatchDeleteTableRequest; +import com.amazonaws.services.glue.model.GetTablesRequest; +import com.amazonaws.services.glue.model.GetTablesResult; +import com.amazonaws.services.glue.model.Table; +import java.io.Closeable; +import java.util.ArrayList; +import java.util.List; + +public class GlueTestClient implements Closeable { + + private final AWSGlue glueClient; + + public GlueTestClient(AWSGlue glueClient) { + this.glueClient = glueClient; + } + + private List getAllTables(String databaseName) { + + List
tables = new ArrayList<>(); + String nextToken = null; + do { + GetTablesRequest getTablesRequest = + new GetTablesRequest().withDatabaseName(databaseName).withNextToken(nextToken); + + GetTablesResult getTablesResult = glueClient.getTables(getTablesRequest); + tables.addAll(getTablesResult.getTableList()); + + nextToken = getTablesResult.getNextToken(); + + } while (nextToken != null); + + return tables; + + } + + private BatchDeleteTableRequest getBatchDeleteRequest(String databaseName, List
tables) { + List tablesToDelete = tables.stream().map(Table::getName).toList(); + return new BatchDeleteTableRequest() + .withDatabaseName(databaseName) + .withTablesToDelete(tablesToDelete); + } + + public void purgeDatabase(String databaseName) { + int countRetries = 0; + while (countRetries < 5) { + try { + List
allTables = getAllTables(databaseName); + BatchDeleteTableRequest batchDeleteTableRequest = getBatchDeleteRequest(databaseName, allTables); + glueClient.batchDeleteTable(batchDeleteTableRequest); + return; + } catch (Exception e) { + countRetries++; + } + } + } + + @Override + public void close() { + glueClient.shutdown(); + } + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/S3GlueJsonlDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/S3GlueJsonlDestinationAcceptanceTest.java new file mode 100644 index 0000000000000..070330b8b8ca0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/S3GlueJsonlDestinationAcceptanceTest.java @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import io.airbyte.integrations.destination.s3.S3BaseJsonlDestinationAcceptanceTest; + +public class S3GlueJsonlDestinationAcceptanceTest extends S3BaseJsonlDestinationAcceptanceTest { + + @Override + protected void tearDown(TestDestinationEnv testEnv) { + super.tearDown(testEnv); + + GlueDestinationConfig glueDestinationConfig = GlueDestinationConfig.getInstance(configJson); + try (var glueTestClient = new GlueTestClient(glueDestinationConfig.getAWSGlueInstance())) { + + glueTestClient.purgeDatabase(glueDestinationConfig.getDatabase()); + + } + } + + @Override + protected String getImageName() { + return "airbyte/destination-s3-glue:dev"; + } + +} diff --git a/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/S3GlueJsonlGzipDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/S3GlueJsonlGzipDestinationAcceptanceTest.java new file mode 100644 index 0000000000000..0e5d68a87185e --- /dev/null +++ b/airbyte-integrations/connectors/destination-s3-glue/src/test-integration/java/io/airbyte/integrations/destination/s3_glue/S3GlueJsonlGzipDestinationAcceptanceTest.java @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.s3_glue; + +import io.airbyte.integrations.destination.s3.S3BaseJsonlGzipDestinationAcceptanceTest; + +public class S3GlueJsonlGzipDestinationAcceptanceTest extends S3BaseJsonlGzipDestinationAcceptanceTest { + + @Override + protected void tearDown(TestDestinationEnv testEnv) { + super.tearDown(testEnv); + + GlueDestinationConfig glueDestinationConfig = GlueDestinationConfig.getInstance(configJson); + try (var glueTestClient = new GlueTestClient(glueDestinationConfig.getAWSGlueInstance())) { + + glueTestClient.purgeDatabase(glueDestinationConfig.getDatabase()); + + } + } + + @Override + protected String getImageName() { + return "airbyte/destination-s3-glue:dev"; + } + +} diff --git a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java index a03053e8a3dd0..91482584ce66d 100644 --- a/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java +++ b/airbyte-integrations/connectors/destination-scylla/src/main/java/io/airbyte/integrations/destination/scylla/ScyllaMessageConsumer.java @@ -5,9 +5,9 @@ package io.airbyte.integrations.destination.scylla; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.FailureTrackingAirbyteMessageConsumer; import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.Map; import java.util.function.Consumer; diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index 47e84df50aa07..114ea3add10b5 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -20,5 +20,5 @@ RUN tar xf ${APPLICATION}.tar --strip-components=1 ENV ENABLE_SENTRY true -LABEL io.airbyte.version=0.4.38 +LABEL io.airbyte.version=0.4.40 LABEL io.airbyte.name=airbyte/destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/build.gradle b/airbyte-integrations/connectors/destination-snowflake/build.gradle index 066a3f3639aec..98abaec31d1e8 100644 --- a/airbyte-integrations/connectors/destination-snowflake/build.gradle +++ b/airbyte-integrations/connectors/destination-snowflake/build.gradle @@ -5,7 +5,7 @@ plugins { } application { - mainClass = 'io.airbyte.integrations.destination.snowflake.SnowflakeDestination' + mainClass = 'io.airbyte.integrations.destination.snowflake.SnowflakeDestinationRunner' // enable when profiling applicationDefaultJvmArgs = [ '-XX:+ExitOnOutOfMemoryError', @@ -47,7 +47,7 @@ dependencies { // this is a configuration to make mockito work with final classes testImplementation 'org.mockito:mockito-inline:2.13.0' - + integrationTestJavaImplementation project(':airbyte-commons-worker') integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-destination-test') integrationTestJavaImplementation project(':airbyte-integrations:connectors:destination-snowflake') integrationTestJavaImplementation 'org.apache.commons:commons-lang3:3.11' diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/OssCloudEnvVarConsts.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/OssCloudEnvVarConsts.java new file mode 100644 index 0000000000000..ba1db1fef26c3 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/OssCloudEnvVarConsts.java @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake; + +public class OssCloudEnvVarConsts { + + public static final String AIRBYTE_OSS = "airbyte_oss"; + public static final String AIRBYTE_CLOUD = "airbyte_cloud"; + +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeCopyAzureBlobStorageDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeCopyAzureBlobStorageDestination.java index 6eecd117b3882..d18580b495a15 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeCopyAzureBlobStorageDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeCopyAzureBlobStorageDestination.java @@ -10,6 +10,8 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.base.AirbyteMessageConsumer; import io.airbyte.integrations.destination.ExtendedNameTransformer; +import io.airbyte.integrations.destination.NamingConventionTransformer; +import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; import io.airbyte.integrations.destination.jdbc.SqlOperations; import io.airbyte.integrations.destination.jdbc.copy.CopyConsumerFactory; import io.airbyte.integrations.destination.jdbc.copy.CopyDestination; @@ -21,6 +23,12 @@ public class SnowflakeCopyAzureBlobStorageDestination extends CopyDestination { + private final String airbyteEnvironment; + + public SnowflakeCopyAzureBlobStorageDestination(final String airbyteEnvironment) { + this.airbyteEnvironment = airbyteEnvironment; + } + @Override public AirbyteMessageConsumer getConsumer(final JsonNode config, final ConfiguredAirbyteCatalog catalog, @@ -50,7 +58,7 @@ public ExtendedNameTransformer getNameTransformer() { @Override public DataSource getDataSource(final JsonNode config) { - return SnowflakeDatabase.createDataSource(config); + return SnowflakeDatabase.createDataSource(config, airbyteEnvironment); } @Override @@ -63,6 +71,15 @@ public SqlOperations getSqlOperations() { return new SnowflakeSqlOperations(); } + @Override + protected void performCreateInsertTestOnDestination(final String outputSchema, + final JdbcDatabase database, + final NamingConventionTransformer nameTransformer) + throws Exception { + AbstractJdbcDestination.attemptTableOperations(outputSchema, database, nameTransformer, + getSqlOperations(), true); + } + private String getConfiguredSchema(final JsonNode config) { return config.get("schema").asText(); } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java index e9ce4c3506e10..dd112b5a51e83 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.java @@ -56,7 +56,7 @@ public class SnowflakeDatabase { private static final String CONNECTION_STRING_IDENTIFIER_KEY = "application"; private static final String CONNECTION_STRING_IDENTIFIER_VAL = "Airbyte_Connector"; - public static HikariDataSource createDataSource(final JsonNode config) { + public static HikariDataSource createDataSource(final JsonNode config, final String airbyteEnvironment) { final HikariDataSource dataSource = new HikariDataSource(); final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", @@ -129,7 +129,7 @@ public static HikariDataSource createDataSource(final JsonNode config) { // https://docs.snowflake.com/en/user-guide/jdbc-parameters.html#application // identify airbyte traffic to snowflake to enable partnership & optimization opportunities - properties.put("application", "airbyte"); + properties.put("application", airbyteEnvironment); // see envs in OssCloudEnvVarConsts class // Needed for JDK17 - see // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow properties.put("JDBC_QUERY_RESULT_FORMAT", "JSON"); diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.java index eb2080c1c360c..16c44a9245ed4 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.java @@ -4,8 +4,6 @@ package io.airbyte.integrations.destination.snowflake; -import io.airbyte.integrations.base.Destination; -import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.destination.jdbc.copy.SwitchingDestination; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @@ -21,14 +19,9 @@ enum DestinationType { INTERNAL_STAGING } - public SnowflakeDestination() { - super(DestinationType.class, SnowflakeDestinationResolver::getTypeFromConfig, SnowflakeDestinationResolver.getTypeToDestination()); - } - - public static void main(final String[] args) throws Exception { - final Destination destination = new SnowflakeDestination(); - new IntegrationRunner(destination).run(args); - SCHEDULED_EXECUTOR_SERVICE.shutdownNow(); + public SnowflakeDestination(final String airbyteEnvironment) { + super(DestinationType.class, SnowflakeDestinationResolver::getTypeFromConfig, + SnowflakeDestinationResolver.getTypeToDestination(airbyteEnvironment)); } } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationResolver.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationResolver.java index 9322b6d5a9841..96b7cfb2df4ef 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationResolver.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationResolver.java @@ -37,11 +37,12 @@ public static boolean isAzureBlobCopy(final JsonNode config) { && config.get("loading_method").has("azure_blob_storage_account_name"); } - public static Map getTypeToDestination() { - final SnowflakeS3StagingDestination s3StagingDestination = new SnowflakeS3StagingDestination(); - final SnowflakeGcsStagingDestination gcsStagingDestination = new SnowflakeGcsStagingDestination(); - final SnowflakeInternalStagingDestination internalStagingDestination = new SnowflakeInternalStagingDestination(); - final SnowflakeCopyAzureBlobStorageDestination azureBlobStorageDestination = new SnowflakeCopyAzureBlobStorageDestination(); + public static Map getTypeToDestination( + final String airbyteEnvironment) { + final SnowflakeS3StagingDestination s3StagingDestination = new SnowflakeS3StagingDestination(airbyteEnvironment); + final SnowflakeGcsStagingDestination gcsStagingDestination = new SnowflakeGcsStagingDestination(airbyteEnvironment); + final SnowflakeInternalStagingDestination internalStagingDestination = new SnowflakeInternalStagingDestination(airbyteEnvironment); + final SnowflakeCopyAzureBlobStorageDestination azureBlobStorageDestination = new SnowflakeCopyAzureBlobStorageDestination(airbyteEnvironment); return ImmutableMap.of( DestinationType.COPY_S3, s3StagingDestination, diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationRunner.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationRunner.java new file mode 100644 index 0000000000000..b324bff94be70 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationRunner.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake; + +import static io.airbyte.integrations.destination.snowflake.SnowflakeDestination.SCHEDULED_EXECUTOR_SERVICE; + +import io.airbyte.integrations.base.adaptive.AdaptiveDestinationRunner; + +public class SnowflakeDestinationRunner { + + public static void main(final String[] args) throws Exception { + AdaptiveDestinationRunner.baseOnEnv() + .withOssDestination(() -> new SnowflakeDestination(OssCloudEnvVarConsts.AIRBYTE_OSS)) + .withCloudDestination(() -> new SnowflakeDestination(OssCloudEnvVarConsts.AIRBYTE_CLOUD)) + .run(args); + SCHEDULED_EXECUTOR_SERVICE.shutdownNow(); + } + +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsStagingDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsStagingDestination.java index ad8e8f0456ddb..af5fb0c98e732 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsStagingDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeGcsStagingDestination.java @@ -40,13 +40,15 @@ public class SnowflakeGcsStagingDestination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeGcsStagingDestination.class); + private String airbyteEnvironment; - public SnowflakeGcsStagingDestination() { - this(new SnowflakeSQLNameTransformer()); + public SnowflakeGcsStagingDestination(final String airbyteEnvironment) { + this(new SnowflakeSQLNameTransformer(), airbyteEnvironment); } - public SnowflakeGcsStagingDestination(final SnowflakeSQLNameTransformer nameTransformer) { + public SnowflakeGcsStagingDestination(final SnowflakeSQLNameTransformer nameTransformer, final String airbyteEnvironment) { super("", nameTransformer, new SnowflakeSqlOperations()); + this.airbyteEnvironment = airbyteEnvironment; } @Override @@ -60,7 +62,9 @@ public AirbyteConnectionStatus check(final JsonNode config) { try { final JdbcDatabase database = getDatabase(dataSource); final String outputSchema = super.getNamingResolver().getIdentifier(config.get("schema").asText()); - attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, snowflakeGcsStagingSqlOperations); + + attemptTableOperations(outputSchema, database, nameTransformer, snowflakeGcsStagingSqlOperations, + true); attemptWriteAndDeleteGcsObject(gcsConfig, outputSchema); return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); @@ -99,7 +103,7 @@ public static Storage getStorageClient(final GcsConfig gcsConfig) throws IOExcep @Override protected DataSource getDataSource(final JsonNode config) { - return SnowflakeDatabase.createDataSource(config); + return SnowflakeDatabase.createDataSource(config, airbyteEnvironment); } @Override diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestination.java deleted file mode 100644 index 1c394eeed505e..0000000000000 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestination.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.destination.snowflake; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import io.airbyte.db.jdbc.JdbcDatabase; -import io.airbyte.integrations.base.Destination; -import io.airbyte.integrations.destination.jdbc.AbstractJdbcDestination; -import java.util.Collections; -import java.util.Map; -import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SnowflakeInsertDestination extends AbstractJdbcDestination implements Destination { - - private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDestination.class); - - public SnowflakeInsertDestination() { - // the driver class is a no op because we override getDatabase. - super("", new SnowflakeSQLNameTransformer(), new SnowflakeSqlOperations()); - } - - @Override - protected DataSource getDataSource(final JsonNode config) { - return SnowflakeDatabase.createDataSource(config); - } - - @Override - protected JdbcDatabase getDatabase(final DataSource dataSource) { - return SnowflakeDatabase.getDatabase(dataSource); - } - - @Override - protected Map getDefaultConnectionProperties(final JsonNode config) { - return Collections.emptyMap(); - } - - // this is a no op since we override getDatabase. - @Override - public JsonNode toJdbcConfig(final JsonNode config) { - return Jsons.emptyObject(); - } - -} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java index c5339360faa11..115c7cbf644dc 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingDestination.java @@ -29,13 +29,15 @@ public class SnowflakeInternalStagingDestination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeInternalStagingDestination.class); + private String airbyteEnvironment; - public SnowflakeInternalStagingDestination() { - this(new SnowflakeSQLNameTransformer()); + public SnowflakeInternalStagingDestination(final String airbyteEnvironment) { + this(new SnowflakeSQLNameTransformer(), airbyteEnvironment); } - public SnowflakeInternalStagingDestination(final NamingConventionTransformer nameTransformer) { + public SnowflakeInternalStagingDestination(final NamingConventionTransformer nameTransformer, final String airbyteEnvironment) { super("", nameTransformer, new SnowflakeInternalStagingSqlOperations(nameTransformer)); + this.airbyteEnvironment = airbyteEnvironment; } @Override @@ -46,8 +48,9 @@ public AirbyteConnectionStatus check(final JsonNode config) { try { final JdbcDatabase database = getDatabase(dataSource); final String outputSchema = nameTransformer.getIdentifier(config.get("schema").asText()); - attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, snowflakeInternalStagingSqlOperations); - attemptSQLCreateAndDropStages(outputSchema, database, nameTransformer, snowflakeInternalStagingSqlOperations); + attemptTableOperations(outputSchema, database, nameTransformer, + snowflakeInternalStagingSqlOperations, true); + attemptStageOperations(outputSchema, database, nameTransformer, snowflakeInternalStagingSqlOperations); return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); } catch (final Exception e) { LOGGER.error("Exception while checking connection: ", e); @@ -63,10 +66,10 @@ public AirbyteConnectionStatus check(final JsonNode config) { } } - private static void attemptSQLCreateAndDropStages(final String outputSchema, - final JdbcDatabase database, - final NamingConventionTransformer namingResolver, - final SnowflakeInternalStagingSqlOperations sqlOperations) + private static void attemptStageOperations(final String outputSchema, + final JdbcDatabase database, + final NamingConventionTransformer namingResolver, + final SnowflakeInternalStagingSqlOperations sqlOperations) throws Exception { // verify we have permissions to create/drop stage @@ -78,7 +81,7 @@ private static void attemptSQLCreateAndDropStages(final String outputSchema, @Override protected DataSource getDataSource(final JsonNode config) { - return SnowflakeDatabase.createDataSource(config); + return SnowflakeDatabase.createDataSource(config, airbyteEnvironment); } @Override diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StagingDestination.java b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StagingDestination.java index c34a87ddc6fd3..264ce665e2df2 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StagingDestination.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeS3StagingDestination.java @@ -34,13 +34,15 @@ public class SnowflakeS3StagingDestination extends AbstractJdbcDestination implements Destination { private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeS3StagingDestination.class); + private String airbyteEnvironment; - public SnowflakeS3StagingDestination() { - this(new SnowflakeSQLNameTransformer()); + public SnowflakeS3StagingDestination(final String airbyteEnvironment) { + this(new SnowflakeSQLNameTransformer(), airbyteEnvironment); } - public SnowflakeS3StagingDestination(final SnowflakeSQLNameTransformer nameTransformer) { + public SnowflakeS3StagingDestination(final SnowflakeSQLNameTransformer nameTransformer, final String airbyteEnvironment) { super("", nameTransformer, new SnowflakeSqlOperations()); + this.airbyteEnvironment = airbyteEnvironment; } @Override @@ -60,8 +62,9 @@ public AirbyteConnectionStatus check(final JsonNode config) { try { final JdbcDatabase database = getDatabase(dataSource); final String outputSchema = super.getNamingResolver().getIdentifier(config.get("schema").asText()); - attemptSQLCreateAndDropTableOperations(outputSchema, database, nameTransformer, snowflakeS3StagingSqlOperations); - attemptSQLCreateAndDropStages(outputSchema, database, nameTransformer, snowflakeS3StagingSqlOperations); + attemptTableOperations(outputSchema, database, nameTransformer, snowflakeS3StagingSqlOperations, + true); + attemptStageOperations(outputSchema, database, nameTransformer, snowflakeS3StagingSqlOperations); return new AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); } catch (final Exception e) { LOGGER.error("Exception while checking connection: ", e); @@ -77,10 +80,10 @@ public AirbyteConnectionStatus check(final JsonNode config) { } } - private static void attemptSQLCreateAndDropStages(final String outputSchema, - final JdbcDatabase database, - final NamingConventionTransformer namingResolver, - final SnowflakeS3StagingSqlOperations sqlOperations) + private static void attemptStageOperations(final String outputSchema, + final JdbcDatabase database, + final NamingConventionTransformer namingResolver, + final SnowflakeS3StagingSqlOperations sqlOperations) throws Exception { // verify we have permissions to create/drop stage @@ -92,7 +95,7 @@ private static void attemptSQLCreateAndDropStages(final String outputSchema, @Override protected DataSource getDataSource(final JsonNode config) { - return SnowflakeDatabase.createDataSource(config); + return SnowflakeDatabase.createDataSource(config, airbyteEnvironment); } @Override diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.java index 33491e8886674..7995f460153ff 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.java @@ -33,7 +33,7 @@ void testCheckFailsWithInvalidPermissions() throws Exception { // this connector should be updated with multiple credentials, each with a clear purpose (valid, // invalid: insufficient permissions, invalid: wrong password, etc..) final JsonNode credentialsJsonString = Jsons.deserialize(Files.readString(Paths.get("secrets/config.json"))); - final AirbyteConnectionStatus check = new SnowflakeDestination().check(credentialsJsonString); + final AirbyteConnectionStatus check = new SnowflakeDestination(OssCloudEnvVarConsts.AIRBYTE_OSS).check(credentialsJsonString); assertEquals(AirbyteConnectionStatus.Status.FAILED, check.getStatus()); } @@ -41,7 +41,7 @@ void testCheckFailsWithInvalidPermissions() throws Exception { public void testInvalidSchemaName() throws Exception { final JsonNode config = getConfig(); final String schema = config.get("schema").asText(); - final DataSource dataSource = SnowflakeDatabase.createDataSource(config); + final DataSource dataSource = SnowflakeDatabase.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS); try { final JdbcDatabase database = SnowflakeDatabase.getDatabase(dataSource); assertDoesNotThrow(() -> syncWithNamingResolver(database, schema)); diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java index 44c675f2c31de..8e4367bc3e85b 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/java/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.destination.snowflake; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -14,6 +15,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.commons.string.Strings; +import io.airbyte.config.StandardCheckConnectionOutput; import io.airbyte.config.StandardCheckConnectionOutput.Status; import io.airbyte.db.factory.DataSourceFactory; import io.airbyte.db.jdbc.JdbcDatabase; @@ -41,6 +43,11 @@ public class SnowflakeInsertDestinationAcceptanceTest extends DestinationAcceptanceTest { private static final NamingConventionTransformer NAME_TRANSFORMER = new SnowflakeSQLNameTransformer(); + protected static final String NO_ACTIVE_WAREHOUSE_ERR_MSG = + "No active warehouse selected in the current session. Select an active warehouse with the 'use warehouse' command."; + + protected static final String NO_USER_PRIVILEGES_ERR_MSG = + "Schema 'TEXT_SCHEMA' already exists, but current role has no privileges on it."; // this config is based on the static config, and it contains a random // schema name that is different for each test run @@ -166,7 +173,7 @@ protected void setup(final TestDestinationEnv testEnv) throws Exception { this.config = Jsons.clone(getStaticConfig()); ((ObjectNode) config).put("schema", schemaName); - dataSource = SnowflakeDatabase.createDataSource(config); + dataSource = SnowflakeDatabase.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS); database = SnowflakeDatabase.getDatabase(dataSource); database.execute(createSchemaQuery); } @@ -178,6 +185,30 @@ protected void tearDown(final TestDestinationEnv testEnv) throws Exception { DataSourceFactory.close(dataSource); } + @Test + public void testCheckWithNoActiveWarehouseConnection() throws Exception { + // Config to user(creds) that has no warehouse assigned + final JsonNode config = Jsons.deserialize(IOs.readFile( + Path.of("secrets/internal_staging_config_no_active_warehouse.json"))); + + StandardCheckConnectionOutput standardCheckConnectionOutput = runCheck(config); + + assertEquals(Status.FAILED, standardCheckConnectionOutput.getStatus()); + assertThat(standardCheckConnectionOutput.getMessage()).contains(NO_ACTIVE_WAREHOUSE_ERR_MSG); + } + + @Test + public void testCheckWithNoTextSchemaPermissionConnection() throws Exception { + // Config to user (creds) that has no permission to schema + final JsonNode config = Jsons.deserialize(IOs.readFile( + Path.of("secrets/config_no_text_schema_permission.json"))); + + StandardCheckConnectionOutput standardCheckConnectionOutput = runCheck(config); + + assertEquals(Status.FAILED, standardCheckConnectionOutput.getStatus()); + assertThat(standardCheckConnectionOutput.getMessage()).contains(NO_USER_PRIVILEGES_ERR_MSG); + } + @Test public void testBackwardCompatibilityAfterAddingOauth() { final JsonNode deprecatedStyleConfig = Jsons.clone(config); @@ -192,7 +223,7 @@ public void testBackwardCompatibilityAfterAddingOauth() { @Test void testCheckWithKeyPairAuth() throws Exception { final JsonNode credentialsJsonString = Jsons.deserialize(IOs.readFile(Path.of("secrets/config_key_pair.json"))); - final AirbyteConnectionStatus check = new SnowflakeDestination().check(credentialsJsonString); + final AirbyteConnectionStatus check = new SnowflakeDestination(OssCloudEnvVarConsts.AIRBYTE_OSS).check(credentialsJsonString); assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, check.getStatus()); } diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java index 2578ca4282858..c13737d8818cf 100644 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbDestinationTest.java @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.yugabytedb; import static org.assertj.core.api.Assertions.assertThat; @@ -10,41 +14,41 @@ class YugabytedbDestinationTest { - private YugabytedbDestination yugabytedbDestination; + private YugabytedbDestination yugabytedbDestination; - @BeforeEach - void setup() { - yugabytedbDestination = new YugabytedbDestination(); - } + @BeforeEach + void setup() { + yugabytedbDestination = new YugabytedbDestination(); + } - @Test - void testToJdbcConfig() { + @Test + void testToJdbcConfig() { - var config = Jsons.jsonNode(ImmutableMap.builder() - .put("host", "localhost") - .put("port", 5433) - .put("database", "yugabyte") - .put("username", "yugabyte") - .put("password", "yugabyte") - .put("schema", "public") - .build()); + var config = Jsons.jsonNode(ImmutableMap.builder() + .put("host", "localhost") + .put("port", 5433) + .put("database", "yugabyte") + .put("username", "yugabyte") + .put("password", "yugabyte") + .put("schema", "public") + .build()); - var jdbcConfig = yugabytedbDestination.toJdbcConfig(config); + var jdbcConfig = yugabytedbDestination.toJdbcConfig(config); - assertThat(jdbcConfig.get("schema").asText()).isEqualTo("public"); - assertThat(jdbcConfig.get("username").asText()).isEqualTo("yugabyte"); - assertThat(jdbcConfig.get("password").asText()).isEqualTo("yugabyte"); - assertThat(jdbcConfig.get("jdbc_url").asText()).isEqualTo("jdbc:yugabytedb://localhost:5433/yugabyte"); + assertThat(jdbcConfig.get("schema").asText()).isEqualTo("public"); + assertThat(jdbcConfig.get("username").asText()).isEqualTo("yugabyte"); + assertThat(jdbcConfig.get("password").asText()).isEqualTo("yugabyte"); + assertThat(jdbcConfig.get("jdbc_url").asText()).isEqualTo("jdbc:yugabytedb://localhost:5433/yugabyte"); - } + } - @Test - void testGetDefaultConnectionProperties() { + @Test + void testGetDefaultConnectionProperties() { - var map = yugabytedbDestination.getDefaultConnectionProperties(Jsons.jsonNode(Collections.emptyMap())); + var map = yugabytedbDestination.getDefaultConnectionProperties(Jsons.jsonNode(Collections.emptyMap())); - assertThat(map).isEmpty(); + assertThat(map).isEmpty(); - } + } } diff --git a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java index 5565bc9d2ef95..a27f79206675b 100644 --- a/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java +++ b/airbyte-integrations/connectors/destination-yugabytedb/src/test/java/io/airbyte/integrations/destination/yugabytedb/YugabytedbNamingTransformerTest.java @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.destination.yugabytedb; import static org.assertj.core.api.Assertions.assertThat; @@ -7,21 +11,20 @@ class YugabytedbNamingTransformerTest { - private YugabytedbNamingTransformer yugabytedbNamingTransformer; - - @BeforeEach - void setup() { - yugabytedbNamingTransformer = new YugabytedbNamingTransformer(); - } + private YugabytedbNamingTransformer yugabytedbNamingTransformer; - @Test - void testApplyDefaultCase() { + @BeforeEach + void setup() { + yugabytedbNamingTransformer = new YugabytedbNamingTransformer(); + } - var defaultCase = yugabytedbNamingTransformer.applyDefaultCase("DEFAULT_CASE"); + @Test + void testApplyDefaultCase() { - assertThat(defaultCase).isEqualTo("default_case"); + var defaultCase = yugabytedbNamingTransformer.applyDefaultCase("DEFAULT_CASE"); - } + assertThat(defaultCase).isEqualTo("default_case"); + } } diff --git a/airbyte-integrations/connectors/source-aha/.dockerignore b/airbyte-integrations/connectors/source-aha/.dockerignore new file mode 100644 index 0000000000000..b2f40d77950bf --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_aha +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-aha/Dockerfile b/airbyte-integrations/connectors/source-aha/Dockerfile new file mode 100644 index 0000000000000..4e35b33f850fb --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_aha ./source_aha + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-aha diff --git a/airbyte-integrations/connectors/source-aha/README.md b/airbyte-integrations/connectors/source-aha/README.md new file mode 100644 index 0000000000000..16158b89d2db9 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/README.md @@ -0,0 +1,79 @@ +# Aha Source + +This is the repository for the Aha configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/aha). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-aha:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/aha) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_aha/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source aha test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-aha:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-aha:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-aha:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-aha:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-aha:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-aha:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-aha:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-aha:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-aha/__init__.py b/airbyte-integrations/connectors/source-aha/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-aha/acceptance-test-config.yml b/airbyte-integrations/connectors/source-aha/acceptance-test-config.yml new file mode 100644 index 0000000000000..ccf5036734e4a --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-aha:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_aha/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-aha/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-aha/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-aha/build.gradle b/airbyte-integrations/connectors/source-aha/build.gradle new file mode 100644 index 0000000000000..a4363e64843c1 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_aha' +} diff --git a/airbyte-integrations/connectors/source-aha/integration_tests/__init__.py b/airbyte-integrations/connectors/source-aha/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-aha/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-aha/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-aha/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-aha/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-aha/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-aha/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..5e433039fa1de --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/integration_tests/configured_catalog.json @@ -0,0 +1,49 @@ +{ + "streams": [ + { + "stream": { + "name": "features", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "products", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "ideas", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "goals", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-aha/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-aha/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..e41b4cc7edc79 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "abb74d48e6d2dadcc1f0bb", + "url": "https://905c7285-30d2-4419-a9e4-8eef125ce6c1.aha.io" +} diff --git a/airbyte-integrations/connectors/source-aha/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-aha/integration_tests/sample_config.json new file mode 100644 index 0000000000000..c13f52d73c36a --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "Your API key", + "url": "Your Aha URL Instance" +} diff --git a/airbyte-integrations/connectors/source-aha/main.py b/airbyte-integrations/connectors/source-aha/main.py new file mode 100644 index 0000000000000..feff5fa4fea7e --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_aha import SourceAha + +if __name__ == "__main__": + source = SourceAha() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-aha/requirements.txt b/airbyte-integrations/connectors/source-aha/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-aha/setup.py b/airbyte-integrations/connectors/source-aha/setup.py new file mode 100644 index 0000000000000..58773a233b863 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_aha", + description="Source implementation for Aha.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-aha/source_aha/__init__.py b/airbyte-integrations/connectors/source-aha/source_aha/__init__.py new file mode 100644 index 0000000000000..8d8a64a1ce882 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceAha + +__all__ = ["SourceAha"] diff --git a/airbyte-integrations/connectors/source-aha/source_aha/aha.yaml b/airbyte-integrations/connectors/source-aha/source_aha/aha.yaml new file mode 100644 index 0000000000000..9dd3fd470f69f --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/aha.yaml @@ -0,0 +1,109 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + selector_features: + extractor: + field_pointer: ["features"] + selector_products: + extractor: + field_pointer: ["products"] + selector_ideas: + extractor: + field_pointer: ["ideas"] + selector_users: + extractor: + field_pointer: ["users"] + selector_goals: + extractor: + field_pointer: ["goals"] + requester: + url_base: "{{ config['url'] }}/api/v1" + http_method: "GET" + authenticator: + type: "BearerAuthenticator" + api_token: "{{ config['api_key'] }}" + increment_paginator: + type: "DefaultPaginator" + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "per_page" + pagination_strategy: + type: "PageIncrement" + page_size: 5 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + requester: + $ref: "*ref(definitions.requester)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + features_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_features)" + $options: + name: "features" + path: "/features" + products_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_products)" + $options: + name: "products" + path: "/products" + ideas_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_ideas)" + $options: + name: "ideas" + path: "/ideas" + users_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_users)" + $options: + name: "users" + path: "/users" + goals_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_goals)" + $options: + name: "goals" + path: "/goals" + +streams: + - "*ref(definitions.features_stream)" + - "*ref(definitions.products_stream)" + - "*ref(definitions.ideas_stream)" + - "*ref(definitions.users_stream)" + - "*ref(definitions.goals_stream)" + +check: + stream_names: + - "features" + - "products" + - "ideas" + - "users" + - "goals" diff --git a/airbyte-integrations/connectors/source-aha/source_aha/schemas/features.json b/airbyte-integrations/connectors/source-aha/source_aha/schemas/features.json new file mode 100644 index 0000000000000..191cda4b85d70 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/schemas/features.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "reference_num": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "resource": { + "type": ["null", "string"] + }, + "product_id": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-aha/source_aha/schemas/goals.json b/airbyte-integrations/connectors/source-aha/source_aha/schemas/goals.json new file mode 100644 index 0000000000000..f790fd5f716f1 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/schemas/goals.json @@ -0,0 +1,72 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "reference_num": { + "type": ["null", "string"] + }, + "effort": { + "type": ["null", "number"] + }, + "value": { + "type": ["null", "number"] + }, + "position": { + "type": ["null", "number"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "product_id": { + "type": ["null", "string"] + }, + "progress": { + "type": ["null", "string"] + }, + "progress_source": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "resource": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "object"] + }, + "success_metric": { + "type": ["null", "object"] + }, + "project": { + "type": ["null", "object"] + }, + "timeframe": { + "type": ["null", "object"] + }, + "initiatives": { + "type": ["null", "array"] + }, + "comments_count": { + "type": ["null", "integer"] + }, + "features": { + "type": ["null", "array"] + }, + "releases": { + "type": ["null", "array"] + }, + "custom_fields": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-aha/source_aha/schemas/ideas.json b/airbyte-integrations/connectors/source-aha/source_aha/schemas/ideas.json new file mode 100644 index 0000000000000..92e79e7e8d7dd --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/schemas/ideas.json @@ -0,0 +1,61 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "reference_num": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "workflow_status": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "position": { + "type": ["null", "integer"] + }, + "complete": { + "type": ["null", "boolean"] + }, + "color": { + "type": ["null", "string"] + } + } + }, + "description": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "body": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "attachments": { + "type": ["null", "array"] + } + } + }, + "url": { + "type": ["null", "string"] + }, + "resource": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-aha/source_aha/schemas/products.json b/airbyte-integrations/connectors/source-aha/source_aha/schemas/products.json new file mode 100644 index 0000000000000..69e4f907710bd --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/schemas/products.json @@ -0,0 +1,24 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "reference_prefix": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "product_line": { + "type": ["null", "boolean"] + }, + "created_at": { + "type": ["null", "string"] + }, + "workspace_type": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-aha/source_aha/schemas/users.json b/airbyte-integrations/connectors/source-aha/source_aha/schemas/users.json new file mode 100644 index 0000000000000..df54138e9695b --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/schemas/users.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "accessed_at": { + "type": ["null", "string"] + }, + "product_roles": { + "type": ["null", "array"] + }, + "enabled": { + "type": ["null", "boolean"] + }, + "paid_seat": { + "type": ["null", "boolean"] + }, + "administrator": { + "type": ["null", "boolean"] + }, + "administrator_roles": { + "type": ["null", "object"] + }, + "identity_provider": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-aha/source_aha/source.py b/airbyte-integrations/connectors/source-aha/source_aha/source.py new file mode 100644 index 0000000000000..e91575fa8cd90 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceAha(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "aha.yaml"}) diff --git a/airbyte-integrations/connectors/source-aha/source_aha/spec.yaml b/airbyte-integrations/connectors/source-aha/source_aha/spec.yaml new file mode 100644 index 0000000000000..fc55fc9176f71 --- /dev/null +++ b/airbyte-integrations/connectors/source-aha/source_aha/spec.yaml @@ -0,0 +1,18 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/aha +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Aha Spec + type: object + required: + - api_key + - url + additionalProperties: true + properties: + api_key: + type: string + description: API Key + title: API Bearer Token + url: + type: string + description: URL + title: Aha Url Instance diff --git a/airbyte-integrations/connectors/source-amazon-ads/Dockerfile b/airbyte-integrations/connectors/source-amazon-ads/Dockerfile index 68fcde896d8f7..a47bfb1d7151e 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-amazon-ads/Dockerfile @@ -13,5 +13,5 @@ ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.24 +LABEL io.airbyte.version=0.1.25 LABEL io.airbyte.name=airbyte/source-amazon-ads diff --git a/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json b/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json index d1ba1851a4b5f..9400deac956e6 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-amazon-ads/integration_tests/spec.json @@ -38,33 +38,17 @@ "default": "NA", "order": 4 }, - "report_wait_timeout": { - "title": "Report Wait Timeout", - "description": "Timeout duration in minutes for Reports. Default is 60 minutes.", - "default": 60, - "examples": [60, 120], - "order": 5, - "type": "integer" - }, - "report_generation_max_retries": { - "title": "Report Generation Maximum Retries", - "description": "Maximum retries Airbyte will attempt for fetching report data. Default is 5.", - "default": 5, - "examples": [5, 10, 15], - "order": 6, - "type": "integer" - }, "start_date": { "title": "Start Date", "description": "The Start date for collecting reports, should not be more than 60 days in the past. In YYYY-MM-DD format", "examples": ["2022-10-10", "2022-10-22"], - "order": 7, + "order": 5, "type": "string" }, "profiles": { "title": "Profile IDs", "description": "Profile IDs you want to fetch data for. See docs for more details.", - "order": 8, + "order": 6, "type": "array", "items": { "type": "integer" @@ -79,7 +63,7 @@ }, "type": "array", "uniqueItems": true, - "order": 9 + "order": 7 } }, "required": ["client_id", "client_secret", "refresh_token"], diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml index 5523b774e86fc..d77e1120f72fb 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/spec.yaml @@ -46,27 +46,6 @@ connectionSpecification: type: string default: NA order: 4 - report_wait_timeout: - title: Report Wait Timeout - description: Timeout duration in minutes for Reports. Default is 60 minutes. - default: 60 - examples: - - 60 - - 120 - order: 5 - type: integer - report_generation_max_retries: - title: Report Generation Maximum Retries - description: - Maximum retries Airbyte will attempt for fetching report data. - Default is 5. - default: 5 - examples: - - 5 - - 10 - - 15 - order: 6 - type: integer start_date: title: Start Date description: @@ -75,14 +54,14 @@ connectionSpecification: examples: - "2022-10-10" - "2022-10-22" - order: 7 + order: 5 type: string profiles: title: Profile IDs description: Profile IDs you want to fetch data for. See docs for more details. - order: 8 + order: 6 type: array items: type: integer @@ -97,7 +76,7 @@ connectionSpecification: - archived type: array uniqueItems: true - order: 9 + order: 7 required: - client_id - client_secret diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/report_streams.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/report_streams.py index 9945f65edbe59..c6228d52d334e 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/report_streams.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/report_streams.py @@ -22,7 +22,7 @@ from pydantic import BaseModel from source_amazon_ads.schemas import CatalogModel, MetricsReport, Profile from source_amazon_ads.streams.common import BasicAmazonAdsStream -from source_amazon_ads.utils import iterate_one_by_one +from source_amazon_ads.utils import get_typed_env, iterate_one_by_one class RecordType(str, Enum): @@ -112,14 +112,16 @@ class ReportStream(BasicAmazonAdsStream, ABC): ] def __init__(self, config: Mapping[str, Any], profiles: List[Profile], authenticator: Oauth2Authenticator): + super().__init__(config, profiles) self._state = {} self._authenticator = authenticator self._session = requests.Session() self._model = self._generate_model() - self.report_wait_timeout = config.get("report_wait_timeout", 60) - self.report_generation_maximum_retries = config.get("report_generation_max_retries", 5) self._start_date: Optional[Date] = config.get("start_date") - super().__init__(config, profiles) + # Timeout duration in minutes for Reports. Default is 180 minutes. + self.report_wait_timeout: int = get_typed_env("REPORT_WAIT_TIMEOUT", 180) + # Maximum retries Airbyte will attempt for fetching report data. Default is 5. + self.report_generation_maximum_retries: int = get_typed_env("REPORT_GENERATION_MAX_RETRIES", 5) @property def model(self) -> CatalogModel: @@ -407,7 +409,7 @@ def _download_report(self, report_info: ReportInfo, url: str) -> List[dict]: def get_error_display_message(self, exception: BaseException) -> Optional[str]: if isinstance(exception, ReportGenerationInProgress): - return f'Report(s) generation time took more than {self.report_wait_timeout} minutes, please increase the "report_wait_timeout" parameter in configuration.' + return f"Report(s) generation time took more than {self.report_wait_timeout} minutes and failed because of Amazon API issues. Please wait some time and run synchronization again." return super().get_error_display_message(exception) def _get_response_error_details(self, response) -> Optional[str]: diff --git a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/utils.py b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/utils.py index caa66c1d13bbb..881567c04c9a1 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/utils.py +++ b/airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/utils.py @@ -2,6 +2,12 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +import logging +import os +from typing import Union + +logger = logging.getLogger("airbyte") + def iterate_one_by_one(*iterables): iterables = list(iterables) @@ -13,3 +19,14 @@ def iterate_one_by_one(*iterables): pass else: iterables.append(iterable) + + +def get_typed_env(name: str, default: Union[str, int]) -> Union[str, int]: + convert = type(default) + assert convert in [str, int] + value = os.environ.get(name, default) + try: + return convert(value) + except ValueError: + logger.warning(f"Cannot convert environment variable {name}={value!r} to type {convert}") + return default diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py index f53a10777ae06..a72a5f709e33f 100644 --- a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_report_streams.py @@ -244,7 +244,7 @@ def test_display_report_stream_init_too_many_requests(mocker, config): ), ( [ - (lambda x: x > 5, None, "2021-01-02 04:04:05"), + (lambda x: x > 5, None, "2021-01-02 06:04:05"), ], ReportGenerationInProgress, ), @@ -259,11 +259,11 @@ def test_display_report_stream_init_too_many_requests(mocker, config): ( [ (lambda x: True, "FAILURE", None), - (lambda x: x >= 10, None, "2021-01-02 04:04:05"), - (lambda x: x >= 15, None, "2021-01-02 05:04:05"), - (lambda x: x >= 20, None, "2021-01-02 06:04:05"), - (lambda x: x >= 25, None, "2021-01-02 07:04:05"), - (lambda x: x >= 30, None, "2021-01-02 08:04:05"), + (lambda x: x >= 10, None, "2021-01-02 06:04:05"), + (lambda x: x >= 15, None, "2021-01-02 09:04:05"), + (lambda x: x >= 20, None, "2021-01-02 12:04:05"), + (lambda x: x >= 25, None, "2021-01-02 15:04:05"), + (lambda x: x >= 30, None, "2021-01-02 18:04:05"), ], ReportGenerationFailure, ), diff --git a/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_utils.py b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_utils.py new file mode 100644 index 0000000000000..6c488e674425a --- /dev/null +++ b/airbyte-integrations/connectors/source-amazon-ads/unit_tests/test_utils.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from source_amazon_ads.utils import get_typed_env + + +def test_get_typed_env(monkeypatch): + assert get_typed_env("REPORT_WAIT_TIMEOUT", 180) == 180 + assert get_typed_env("BOOLEAN_PARAM", "1") == "1" + assert get_typed_env("STRING_PARAM", "string") == "string" + monkeypatch.setenv("REPORT_WAIT_TIMEOUT", "60") + assert get_typed_env("REPORT_WAIT_TIMEOUT", 180) == 60 + monkeypatch.setenv("REPORT_WAIT_TIMEOUT", "60") + assert get_typed_env("REPORT_WAIT_TIMEOUT", "180") == "60" + monkeypatch.setenv("REPORT_WAIT_TIMEOUT", "string") + assert get_typed_env("REPORT_WAIT_TIMEOUT", 180) == 180 diff --git a/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml b/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml new file mode 100644 index 0000000000000..d634ced570a54 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-bigquery:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-bigquery/build.gradle b/airbyte-integrations/connectors/source-bigquery/build.gradle index be8411702ae59..5c6168f9a9718 100644 --- a/airbyte-integrations/connectors/source-bigquery/build.gradle +++ b/airbyte-integrations/connectors/source-bigquery/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-bigquery/src/main/java/io/airbyte/integrations/source/bigquery/BigQuerySource.java b/airbyte-integrations/connectors/source-bigquery/src/main/java/io/airbyte/integrations/source/bigquery/BigQuerySource.java index b1a520a39585f..cf8c8bd80a226 100644 --- a/airbyte-integrations/connectors/source-bigquery/src/main/java/io/airbyte/integrations/source/bigquery/BigQuerySource.java +++ b/airbyte-integrations/connectors/source-bigquery/src/main/java/io/airbyte/integrations/source/bigquery/BigQuerySource.java @@ -153,9 +153,9 @@ public AutoCloseableIterator queryTableIncremental(final BigQueryDatab @Override protected AutoCloseableIterator queryTableFullRefresh(final BigQueryDatabase database, - final List columnNames, - final String schemaName, - final String tableName) { + final List columnNames, + final String schemaName, + final String tableName) { LOGGER.info("Queueing query for table: {}", tableName); return queryTable(database, String.format("SELECT %s FROM %s", enquoteIdentifierList(columnNames, getQuoteString()), diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..3086a588fab55 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/dummy_config.json @@ -0,0 +1,5 @@ +{ + "dataset_id": "dataset", + "project_id": "project", + "credentials_json": "credentials" +} diff --git a/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..ac749a77def87 --- /dev/null +++ b/airbyte-integrations/connectors/source-bigquery/src/test-integration/resources/expected_spec.json @@ -0,0 +1,32 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/bigquery", + "supportsIncremental": true, + "supportsNormalization": true, + "supportsDBT": true, + "supported_destination_sync_modes": [], + "supported_sync_modes": ["overwrite", "append", "append_dedup"], + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "BigQuery Source Spec", + "type": "object", + "required": ["project_id", "credentials_json"], + "properties": { + "project_id": { + "type": "string", + "description": "The GCP project ID for the project containing the target BigQuery dataset.", + "title": "Project ID" + }, + "dataset_id": { + "type": "string", + "description": "The dataset ID to search for tables and views. If you are only loading data from one dataset, setting this option could result in much faster schema discovery.", + "title": "Default Dataset ID" + }, + "credentials_json": { + "type": "string", + "description": "The contents of your Service Account Key JSON file. See the docs for more information on how to obtain this key.", + "title": "Credentials JSON", + "airbyte_secret": true + } + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/.dockerignore b/airbyte-integrations/connectors/source-breezometer/.dockerignore new file mode 100644 index 0000000000000..d7f230e92a77f --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_breezometer +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-breezometer/Dockerfile b/airbyte-integrations/connectors/source-breezometer/Dockerfile new file mode 100644 index 0000000000000..818b51f63f097 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_breezometer ./source_breezometer + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-breezometer diff --git a/airbyte-integrations/connectors/source-breezometer/README.md b/airbyte-integrations/connectors/source-breezometer/README.md new file mode 100644 index 0000000000000..6eac3506be21c --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/README.md @@ -0,0 +1,79 @@ +# Breezometer Source + +This is the repository for the Breezometer configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/breezometer). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-breezometer:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/breezometer) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_breezometer/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source breezometer test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-breezometer:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-breezometer:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-breezometer:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-breezometer:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-breezometer:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-breezometer:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-breezometer:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-breezometer:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-breezometer/__init__.py b/airbyte-integrations/connectors/source-breezometer/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-breezometer/acceptance-test-config.yml b/airbyte-integrations/connectors/source-breezometer/acceptance-test-config.yml new file mode 100644 index 0000000000000..778fb3bea9a4a --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/acceptance-test-config.yml @@ -0,0 +1,20 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-breezometer:dev +tests: + spec: + - spec_path: "source_breezometer/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-breezometer/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-breezometer/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-breezometer/bootstrap.md b/airbyte-integrations/connectors/source-breezometer/bootstrap.md new file mode 100644 index 0000000000000..5417cf9a43aa9 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/bootstrap.md @@ -0,0 +1,20 @@ +# Breezometer + +## Overview + +Breezometer connector lets you request environment information like air quality, pollen forecast, current and forecasted weather and wildfires for a specific location. + +## Authentication + +Breezometer uses a token to authenticate. Every BreezoMeter account is assigned an API Key. The key stores permissions, rate limits, daily quota, API usage, and billing information associated with an account. To send an authenticated API request, you add the API key parameter and include your key to the request URL. For more informations, consult the [documentation](https://docs.breezometer.com/api-documentation/introduction/#authentication). + +## Endpoints + +- [Air Quality - Current](https://docs.breezometer.com/api-documentation/air-quality-api/v2/#current-conditions) +- [Air Quality - Forecast](https://docs.breezometer.com/api-documentation/air-quality-api/v2/#hourly-forecast) +- [Air Quality - Historical](https://docs.breezometer.com/api-documentation/air-quality-api/v2/#hourly-history) +- [Pollen - Forecast](https://docs.breezometer.com/api-documentation/pollen-api/v2/#daily-forecast) +- [Weather - Current](https://docs.breezometer.com/api-documentation/weather-api/v1/#current-conditions) +- [Weather - Forecast](https://docs.breezometer.com/api-documentation/weather-api/v1/#hourly-forecast) +- [Wildfire - Burnt Area](https://docs.breezometer.com/api-documentation/wildfire-tracker-api/v1/#burnt-area-api) +- [Wildfire - Locate](https://docs.breezometer.com/api-documentation/wildfire-tracker-api/v1/#current-conditions) diff --git a/airbyte-integrations/connectors/source-breezometer/build.gradle b/airbyte-integrations/connectors/source-breezometer/build.gradle new file mode 100644 index 0000000000000..929c6da376100 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_breezometer' +} diff --git a/airbyte-integrations/connectors/source-breezometer/integration_tests/__init__.py b/airbyte-integrations/connectors/source-breezometer/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-breezometer/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-breezometer/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-breezometer/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-breezometer/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..3f05ec1594705 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/integration_tests/configured_catalog.json @@ -0,0 +1,76 @@ +{ + "streams": [ + { + "stream": { + "name": "air_quality_current", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "air_quality_forecast", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "air_quality_historical", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "pollen_forecast", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "weather_current", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "weather_forecast", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "wildfire_burnt_area", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "wildfire_locate", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-breezometer/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-breezometer/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..60a05af2129b4 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/integration_tests/invalid_config.json @@ -0,0 +1,9 @@ +{ + "api_key": "", + "latitude": "54.675003", + "longitude": "-113.550282", + "days_to_forecast": 5, + "hours_to_forecast": 900, + "historic_hours": -1, + "radius": 10 +} diff --git a/airbyte-integrations/connectors/source-breezometer/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-breezometer/integration_tests/sample_config.json new file mode 100644 index 0000000000000..1a91da037faa2 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/integration_tests/sample_config.json @@ -0,0 +1,9 @@ +{ + "api_key": "", + "latitude": "54.675003", + "longitude": "-113.550282", + "days_to_forecast": 2, + "hours_to_forecast": 30, + "historic_hours": 99, + "radius": 10 +} diff --git a/airbyte-integrations/connectors/source-breezometer/main.py b/airbyte-integrations/connectors/source-breezometer/main.py new file mode 100644 index 0000000000000..f78c3d79a9205 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_breezometer import SourceBreezometer + +if __name__ == "__main__": + source = SourceBreezometer() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-breezometer/requirements.txt b/airbyte-integrations/connectors/source-breezometer/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-breezometer/setup.py b/airbyte-integrations/connectors/source-breezometer/setup.py new file mode 100644 index 0000000000000..fb959469de7ef --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_breezometer", + description="Source implementation for Breezometer.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/__init__.py b/airbyte-integrations/connectors/source-breezometer/source_breezometer/__init__.py new file mode 100644 index 0000000000000..12da2795f1447 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceBreezometer + +__all__ = ["SourceBreezometer"] diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/breezometer.yaml b/airbyte-integrations/connectors/source-breezometer/source_breezometer/breezometer.yaml new file mode 100644 index 0000000000000..9d04e5eed7fb1 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/breezometer.yaml @@ -0,0 +1,175 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["data"] + + air_quality_current_stream: + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + url_base: "https://api.breezometer.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + lat: "{{ config['latitude'] }}" + lon: "{{ config['longitude'] }}" + $options: + name: "air_quality_current" + primary_key: "datetime" + path: "/air-quality/v2/current-conditions" + + air_quality_forecast_stream: + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + url_base: "https://api.breezometer.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + lat: "{{ config['latitude'] }}" + lon: "{{ config['longitude'] }}" + hours: "{{ config['hours_to_forecast'] }}" + $options: + name: "air_quality_forecast" + primary_key: "datetime" + path: "/air-quality/v2/forecast/hourly" + + air_quality_historical_stream: + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + url_base: "https://api.breezometer.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + lat: "{{ config['latitude'] }}" + lon: "{{ config['longitude'] }}" + hours: "{{ config['historic_hours'] }}" + $options: + name: "air_quality_historical" + primary_key: "datetime" + path: "/air-quality/v2/historical/hourly" + + pollen_forecast_stream: + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + url_base: "https://api.breezometer.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + lat: "{{ config['latitude'] }}" + lon: "{{ config['longitude'] }}" + days: "{{ config['days_to_forecast'] }}" + $options: + name: "pollen_forecast" + primary_key: "index_id" + path: "/pollen/v2/forecast/daily" + + weather_current_stream: + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + url_base: "https://api.breezometer.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + lat: "{{ config['latitude'] }}" + lon: "{{ config['longitude'] }}" + $options: + name: "weather_current" + primary_key: "datetime" + path: "/weather/v1/current-conditions" + + weather_forecast_stream: + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + url_base: "https://api.breezometer.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + lat: "{{ config['latitude'] }}" + lon: "{{ config['longitude'] }}" + days: "{{ config['days_to_forecast'] }}" + $options: + name: "weather_forecast" + path: "/weather/v1/forecast/daily" + + wildfire_burnt_area_stream: + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + url_base: "https://api.breezometer.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + lat: "{{ config['latitude'] }}" + lon: "{{ config['longitude'] }}" + radius: "{{ config['radius'] }}" + $options: + name: "wildfire_burnt_area" + path: "/fires/v1/burnt-area" + + wildfire_locate_stream: + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + url_base: "https://api.breezometer.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + lat: "{{ config['latitude'] }}" + lon: "{{ config['longitude'] }}" + radius: "{{ config['radius'] }}" + $options: + name: "wildfire_locate" + path: "/fires/v1/locate-and-track" + +streams: + - "*ref(definitions.air_quality_current_stream)" + - "*ref(definitions.air_quality_forecast_stream)" + - "*ref(definitions.air_quality_historical_stream)" + - "*ref(definitions.pollen_forecast_stream)" + - "*ref(definitions.weather_current_stream)" + - "*ref(definitions.weather_forecast_stream)" + - "*ref(definitions.wildfire_burnt_area_stream)" + - "*ref(definitions.wildfire_locate_stream)" + +check: + stream_names: + - "pollen_forecast" diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_current.json b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_current.json new file mode 100644 index 0000000000000..ec3a11998b71a --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_current.json @@ -0,0 +1,40 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "datetime": { + "type": ["null", "string"] + }, + "data_available": { + "type": ["null", "boolean"] + }, + "indexes": { + "type": ["null", "object"], + "properties": { + "baqi": { + "type": ["null", "object"], + "properties": { + "display_name": { + "type": ["null", "string"] + }, + "aqi": { + "type": ["null", "integer"] + }, + "aqi_display": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "dominant_pollutant": { + "type": ["null", "string"] + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_forecast.json b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_forecast.json new file mode 100644 index 0000000000000..ec3a11998b71a --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_forecast.json @@ -0,0 +1,40 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "datetime": { + "type": ["null", "string"] + }, + "data_available": { + "type": ["null", "boolean"] + }, + "indexes": { + "type": ["null", "object"], + "properties": { + "baqi": { + "type": ["null", "object"], + "properties": { + "display_name": { + "type": ["null", "string"] + }, + "aqi": { + "type": ["null", "integer"] + }, + "aqi_display": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "dominant_pollutant": { + "type": ["null", "string"] + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_historical.json b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_historical.json new file mode 100644 index 0000000000000..ec3a11998b71a --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/air_quality_historical.json @@ -0,0 +1,40 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "datetime": { + "type": ["null", "string"] + }, + "data_available": { + "type": ["null", "boolean"] + }, + "indexes": { + "type": ["null", "object"], + "properties": { + "baqi": { + "type": ["null", "object"], + "properties": { + "display_name": { + "type": ["null", "string"] + }, + "aqi": { + "type": ["null", "integer"] + }, + "aqi_display": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "dominant_pollutant": { + "type": ["null", "string"] + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/pollen_forecast.json b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/pollen_forecast.json new file mode 100644 index 0000000000000..3b6840b394b5a --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/pollen_forecast.json @@ -0,0 +1,104 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "date": { + "type": ["null", "string"] + }, + "index_id": { + "type": ["null", "string"] + }, + "index_display_name": { + "type": ["null", "string"] + }, + "types": { + "type": ["null", "object"], + "properties": { + "grass": { + "type": ["null", "object"], + "properties": { + "display_name": { + "type": ["null", "string"] + }, + "in_season": { + "type": ["null", "boolean"] + }, + "data_available": { + "type": ["null", "boolean"] + }, + "index": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "integer"] + }, + "category": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + } + } + } + } + }, + "tree": { + "type": ["null", "object"], + "properties": { + "display_name": { + "type": ["null", "string"] + }, + "in_season": { + "type": ["null", "boolean"] + }, + "data_available": { + "type": ["null", "boolean"] + }, + "index": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "integer"] + }, + "category": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + } + } + } + } + }, + "weed": { + "type": ["null", "object"], + "properties": { + "display_name": { + "type": ["null", "string"] + }, + "in_season": { + "type": ["null", "boolean"] + }, + "data_available": { + "type": ["null", "boolean"] + }, + "index": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "integer"] + }, + "category": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + } + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/weather_current.json b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/weather_current.json new file mode 100644 index 0000000000000..d3eb8ca656ce8 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/weather_current.json @@ -0,0 +1,128 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "datetime": { + "type": ["null", "string"] + }, + "is_day_time": { + "type": ["null", "boolean"] + }, + "icon_code": { + "type": ["null", "integer"] + }, + "weather_text": { + "type": ["null", "string"] + }, + "temperature": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "number"] + }, + "units": { + "type": ["null", "string"] + } + } + }, + "feels_like_temperature": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "number"] + }, + "units": { + "type": ["null", "string"] + } + } + }, + "relative_humidity": { + "type": ["null", "integer"] + }, + "precipitation": { + "type": ["null", "object"], + "properties": { + "precipitation_probability": { + "type": ["null", "integer"] + }, + "total_precipitation": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "number"] + }, + "units": { + "type": ["null", "string"] + } + } + } + } + }, + "wind": { + "type": ["null", "object"], + "properties": { + "speed": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "number"] + }, + "units": { + "type": ["null", "string"] + } + } + }, + "direction": { + "type": ["null", "integer"] + } + } + }, + "wind_gust": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "number"] + }, + "units": { + "type": ["null", "string"] + } + } + }, + "pressure": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "number"] + }, + "units": { + "type": ["null", "string"] + } + } + }, + "visibility": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "integer"] + }, + "units": { + "type": ["null", "string"] + } + } + }, + "dew_point": { + "type": ["null", "object"], + "properties": { + "value": { + "type": ["null", "number"] + }, + "units": { + "type": ["null", "string"] + } + } + }, + "cloud_cover": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/weather_forecast.json b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/weather_forecast.json new file mode 100644 index 0000000000000..77f229bc845d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/weather_forecast.json @@ -0,0 +1,40 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "start_date": { + "type": ["null", "string"] + }, + "sun": { + "type": ["null", "object"], + "properties": { + "sunrise_time": { + "type": ["null", "string"] + }, + "sunset_time": { + "type": ["null", "string"] + } + } + }, + "moon": { + "type": ["null", "object"], + "properties": { + "moonrise_time": { + "type": ["null", "string"] + }, + "moonset_time": { + "type": ["null", "string"] + }, + "moon_phase": { + "type": ["null", "string"] + }, + "moon_age": { + "type": ["null", "number"] + } + } + }, + "max_uv_index": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/wildfire_burnt_area.json b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/wildfire_burnt_area.json new file mode 100644 index 0000000000000..46ec033ec16dd --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/wildfire_burnt_area.json @@ -0,0 +1,66 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "type": { + "type": ["null", "string"] + }, + "features": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "properties": { + "type": ["null", "object"], + "properties": { + "CurrentLat": { + "type": ["null", "number"] + }, + "CurrentLon": { + "type": ["null", "number"] + }, + "DiscoveryDateTime": { + "type": ["null", "string"] + }, + "EventDescription": { + "type": ["null", "string"] + }, + "LastUpdated": { + "type": ["null", "string"] + } + } + }, + "geometry": { + "type": ["null", "object"], + "properties": { + "type": { + "type": ["null", "string"] + }, + "coordinates": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "number"] + } + } + } + } + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/wildfire_locate.json b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/wildfire_locate.json new file mode 100644 index 0000000000000..46ec033ec16dd --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/schemas/wildfire_locate.json @@ -0,0 +1,66 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema", + "type": "object", + "properties": { + "type": { + "type": ["null", "string"] + }, + "features": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "properties": { + "type": ["null", "object"], + "properties": { + "CurrentLat": { + "type": ["null", "number"] + }, + "CurrentLon": { + "type": ["null", "number"] + }, + "DiscoveryDateTime": { + "type": ["null", "string"] + }, + "EventDescription": { + "type": ["null", "string"] + }, + "LastUpdated": { + "type": ["null", "string"] + } + } + }, + "geometry": { + "type": ["null", "object"], + "properties": { + "type": { + "type": ["null", "string"] + }, + "coordinates": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "array"], + "items": { + "type": ["null", "number"] + } + } + } + } + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/source.py b/airbyte-integrations/connectors/source-breezometer/source_breezometer/source.py new file mode 100644 index 0000000000000..1ed8d1f8927d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceBreezometer(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "breezometer.yaml"}) diff --git a/airbyte-integrations/connectors/source-breezometer/source_breezometer/spec.yaml b/airbyte-integrations/connectors/source-breezometer/source_breezometer/spec.yaml new file mode 100644 index 0000000000000..4b7ee6bc74a45 --- /dev/null +++ b/airbyte-integrations/connectors/source-breezometer/source_breezometer/spec.yaml @@ -0,0 +1,54 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/breezometer +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Breezometer Spec + type: object + required: + - api_key + - latitude + - longitude + additionalProperties: true + properties: + api_key: + type: string + tittle: API Key + description: >- + Your API Access Key. See here. + airbyte_secret: true + latitude: + type: string + tittle: Latitude + description: Latitude of the monitored location. + examples: + - "54.675003" + longitude: + type: string + tittle: Longitude + description: Longitude of the monitored location. + examples: + - "-113.550282" + days_to_forecast: + type: integer + tittle: Days to Forecast + description: Number of days to forecast. Minimum 1, maximum 3. Valid for Polen and Weather Forecast streams. + examples: + - 3 + hours_to_forecast: + type: integer + tittle: Hours to Forecast + description: Number of hours to forecast. Minimum 1, maximum 96. Valid for Air Quality Forecast stream. + examples: + - 30 + historic_hours: + type: integer + tittle: Historic Hours + description: Number of hours retireve from Air Quality History stream. Minimum 1, maximum 720. + examples: + - 30 + radius: + type: integer + tittle: Radius + description: Desired radius from the location provided. Minimum 5, maximum 100. Valid for Wildfires streams. + examples: + - 50 diff --git a/airbyte-integrations/connectors/source-callrail/.dockerignore b/airbyte-integrations/connectors/source-callrail/.dockerignore new file mode 100644 index 0000000000000..e3ded779dbc6a --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_callrail +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-callrail/Dockerfile b/airbyte-integrations/connectors/source-callrail/Dockerfile new file mode 100644 index 0000000000000..51f9529dc1abe --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_callrail ./source_callrail + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-callrail diff --git a/airbyte-integrations/connectors/source-callrail/README.md b/airbyte-integrations/connectors/source-callrail/README.md new file mode 100644 index 0000000000000..b2b7e461e8314 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/README.md @@ -0,0 +1,79 @@ +# Callrail Source + +This is the repository for the Callrail configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/callrail). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-callrail:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/callrail) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_callrail/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source callrail test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-callrail:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-callrail:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-callrail:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-callrail:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-callrail:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-callrail:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-callrail:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-callrail:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-callrail/__init__.py b/airbyte-integrations/connectors/source-callrail/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-callrail/acceptance-test-config.yml b/airbyte-integrations/connectors/source-callrail/acceptance-test-config.yml new file mode 100644 index 0000000000000..a3a585ceda932 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/acceptance-test-config.yml @@ -0,0 +1,21 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-callrail:dev +tests: + spec: + - spec_path: "source_callrail/spec.yaml" + timeout_seconds: 60 + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["calls", "conversations"] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-callrail/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-callrail/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-callrail/build.gradle b/airbyte-integrations/connectors/source-callrail/build.gradle new file mode 100644 index 0000000000000..d40b8a205eead --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_callrail' +} diff --git a/airbyte-integrations/connectors/source-callrail/integration_tests/__init__.py b/airbyte-integrations/connectors/source-callrail/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-callrail/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-callrail/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..e0b1e51d2c7f3 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/integration_tests/abnormal_state.json @@ -0,0 +1,8 @@ +{ + "calls": { + "start_time": "2999-10-30T00:00:00.000Z" + }, + "conversations": { + "last_message_at": "2999-10-30T00:00:00.000Z" + } +} diff --git a/airbyte-integrations/connectors/source-callrail/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-callrail/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-callrail/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-callrail/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..0376bbedff5eb --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/integration_tests/configured_catalog.json @@ -0,0 +1,22 @@ +{ + "streams": [ + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "companies", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-callrail/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-callrail/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..6de15facc50d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "api_key": "", + "account_id": "", + "start_date": "2019-01-01" +} diff --git a/airbyte-integrations/connectors/source-callrail/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-callrail/integration_tests/sample_config.json new file mode 100644 index 0000000000000..ea4fef222d219 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "api_key": "XXXXXXXXXXXXXXXXXX", + "account_id": "XXXXXXXXXXXXXXXXXX", + "start_date": "2019-01-01" +} diff --git a/airbyte-integrations/connectors/source-callrail/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-callrail/integration_tests/sample_state.json new file mode 100644 index 0000000000000..b4c5967112fee --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/integration_tests/sample_state.json @@ -0,0 +1,14 @@ +{ + "calls": { + "start_time": "2022-10-13T13:51:44.830-07:00" + }, + "conversations": { + "last_message_at": "2022-10-13T13:51:44.830-07:00" + }, + "users": { + "created_at": "2022-10-13T13:51:44.830-07:00" + }, + "companies": { + "created_at": "2022-10-13T13:51:44.830-07:00" + } +} diff --git a/airbyte-integrations/connectors/source-callrail/main.py b/airbyte-integrations/connectors/source-callrail/main.py new file mode 100644 index 0000000000000..e288525e93630 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_callrail import SourceCallrail + +if __name__ == "__main__": + source = SourceCallrail() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-callrail/requirements.txt b/airbyte-integrations/connectors/source-callrail/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-callrail/setup.py b/airbyte-integrations/connectors/source-callrail/setup.py new file mode 100644 index 0000000000000..2c1999dd5e2cd --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_callrail", + description="Source implementation for Callrail.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-callrail/source_callrail/__init__.py b/airbyte-integrations/connectors/source-callrail/source_callrail/__init__.py new file mode 100644 index 0000000000000..f958cab4ebe91 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/source_callrail/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceCallrail + +__all__ = ["SourceCallrail"] diff --git a/airbyte-integrations/connectors/source-callrail/source_callrail/callrail.yaml b/airbyte-integrations/connectors/source-callrail/source_callrail/callrail.yaml new file mode 100644 index 0000000000000..1952b75b63d99 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/source_callrail/callrail.yaml @@ -0,0 +1,127 @@ +version: "0.1.0" + +definitions: + page_size: 100 + step: "100d" + + schema_loader: + type: JsonSchema + file_path: "./source_callrail/schemas/{{ options['name'] }}.json" + + requester: + type: HttpRequester + name: "{{ options['name'] }}" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "Authorization" + api_token: "Token token={{ config.api_key }}" + + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config.start_date }}" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ today_utc() }}" + datetime_format: "%Y-%m-%d" + step: "*ref(definitions.step)" + cursor_field: "{{ options.stream_cursor_field }}" + start_time_option: + field_name: "start_date" + inject_into: "request_parameter" + datetime_format: "%Y-%m-%dT%H:%M:%S.%f%z" + + retriever: + type: SimpleRetriever + $options: + url_base: "https://api.callrail.com/v3/a/" + name: "{{ options['name'] }}" + primary_key: "{{ options['primary_key'] }}" + record_selector: + extractor: + type: DpathExtractor + field_pointer: ["{{ options['name'] }}"] + paginator: + type: DefaultPaginator + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ headers['link']['next']['url'] }}" + stop_condition: "{{ 'next' not in headers['link'] }}" + page_size: 100 + page_size_option: + field_name: "per_page" + inject_into: "request_parameter" + page_token_option: + inject_into: "path" + stream_slicer: + $ref: "*ref(definitions.stream_slicer)" + + calls_stream: + $options: + name: "calls" + stream_cursor_field: "start_time" + primary_key: "id" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "{{ config['account_id'] }}/calls.json?" + request_options_provider: + request_parameters: + fields: "call_type,company_id,company_name,company_time_zone,created_at,device_type,first_call,formatted_call_type,formatted_customer_location,formatted_business_phone_number,formatted_customer_name,prior_calls,formatted_customer_name_or_phone_number,formatted_customer_phone_number,formatted_duration,formatted_tracking_phone_number,formatted_tracking_source,formatted_value,good_lead_call_id,good_lead_call_time,lead_status,note,source,source_name,tags,total_calls,value,waveforms,tracker_id,speaker_percent,keywords,medium,campaign,referring_url,landing_page_url,last_requested_url,referrer_domain,utm_source,utm_medium,utm_term,utm_content,utm_campaign,utma,utmb,utmc,utmv,utmz,ga,gclid,fbclid,msclkid,milestones,timeline_url,keywords_spotted,call_highlights,agent_email,keypad_entries" + + conversations_stream: + $options: + name: "conversations" + stream_cursor_field: "last_message_at" + primary_key: "id" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "{{ config['account_id'] }}/text-messages.json?" + request_options_provider: + request_parameters: + fields: "id,company_id,initial_tracker_id,current_tracker_id,customer_name,customer_phone_number,initial_tracking_number,current_tracking_number,last_message_at,state,company_time_zone,formatted_customer_phone_number,formatted_initial_tracking_number,formatted_current_tracking_number,formatted_customer_name,recent_messages" + + users_stream: + $options: + name: "users" + stream_cursor_field: "created_at" + primary_key: "id" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "{{ config['account_id'] }}/users.json?" + + companies_stream: + $options: + name: "companies" + stream_cursor_field: "created_at" + primary_key: "id" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "{{ config['account_id'] }}/companies.json?" + +streams: + - "*ref(definitions.calls_stream)" + - "*ref(definitions.conversations_stream)" + - "*ref(definitions.users_stream)" + - "*ref(definitions.companies_stream)" + +check: + type: CheckStream + stream_names: + - users diff --git a/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/calls.json b/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/calls.json new file mode 100644 index 0000000000000..605b0baa364a8 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/calls.json @@ -0,0 +1,231 @@ +{ + "type": "object", + "properties": { + "answered": { + "type": ["null", "boolean"] + }, + "business_phone_number": { + "type": ["null", "string"] + }, + "customer_city": { + "type": ["null", "string"] + }, + "customer_country": { + "type": ["null", "string"] + }, + "customer_name": { + "type": ["null", "string"] + }, + "customer_phone_number": { + "type": ["null", "string"] + }, + "customer_state": { + "type": ["null", "string"] + }, + "direction": { + "type": ["null", "string"] + }, + "duration": { + "type": ["null", "integer"] + }, + "id": { + "type": ["null", "string"] + }, + "recording": { + "type": ["null", "string"] + }, + "recording_duration": { + "type": ["null", "integer"] + }, + "recording_player": { + "type": ["null", "string"] + }, + "start_time": { + "type": ["null", "string"], + "format": "date-time" + }, + "tracking_phone_number": { + "type": ["null", "string"] + }, + "voicemail": { + "type": ["null", "boolean"] + }, + "call_type": { + "type": ["null", "string"] + }, + "company_id": { + "type": ["null", "string"] + }, + "company_name": { + "type": ["null", "string"] + }, + "company_time_zone": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "device_type": { + "type": ["null", "string"] + }, + "first_call": { + "type": ["null", "boolean"] + }, + "formatted_call_type": { + "type": ["null", "string"] + }, + "formatted_customer_location": { + "type": ["null", "string"] + }, + "formatted_business_phone_number": { + "type": ["null", "string"] + }, + "formatted_customer_name": { + "type": ["null", "string"] + }, + "prior_calls": { + "type": ["null", "integer"] + }, + "formatted_customer_name_or_phone_number": { + "type": ["null", "string"] + }, + "formatted_customer_phone_number": { + "type": ["null", "string"] + }, + "formatted_duration": { + "type": ["null", "string"] + }, + "formatted_tracking_phone_number": { + "type": ["null", "string"] + }, + "formatted_tracking_source": { + "type": ["null", "string"] + }, + "formatted_value": { + "type": ["null", "string"] + }, + "good_lead_call_id": { + "type": ["null", "string"] + }, + "good_lead_call_time": { + "type": ["null", "string"], + "format": "date-time" + }, + "lead_status": { + "type": ["null", "string"] + }, + "note": { + "type": ["null", "string"] + }, + "source": { + "type": ["null", "string"] + }, + "source_name": { + "type": ["null", "string"] + }, + "tags": { + "type": ["null", "array"], + "items": {} + }, + "total_calls": { + "type": ["null", "integer"] + }, + "value": { + "type": ["null", "string"] + }, + "waveforms": { + "type": ["null", "array"], + "items": {} + }, + "tracker_id": { + "type": ["null", "string"] + }, + "speaker_percent": { + "type": ["null", "array"], + "items": {} + }, + "keywords": { + "type": ["null", "string"] + }, + "medium": { + "type": ["null", "string"] + }, + "campaign": { + "type": ["null", "string"] + }, + "referring_url": { + "type": ["null", "string"] + }, + "landing_page_url": { + "type": ["null", "string"] + }, + "last_requested_url": { + "type": ["null", "string"] + }, + "referrer_domain": { + "type": ["null", "string"] + }, + "utm_source": { + "type": ["null", "string"] + }, + "utm_medium": { + "type": ["null", "string"] + }, + "utm_term": { + "type": ["null", "string"] + }, + "utm_content": { + "type": ["null", "string"] + }, + "utm_campaign": { + "type": ["null", "string"] + }, + "utma": { + "type": ["null", "string"] + }, + "utmb": { + "type": ["null", "string"] + }, + "utmc": { + "type": ["null", "string"] + }, + "utmv": { + "type": ["null", "string"] + }, + "utmz": { + "type": ["null", "string"] + }, + "ga": { + "type": ["null", "string"] + }, + "gclid": { + "type": ["null", "string"] + }, + "fbclid": { + "type": ["null", "string"] + }, + "msclkid": { + "type": ["null", "string"] + }, + "timeline_url": { + "type": ["null", "string"] + }, + "keywords_spotted": { + "type": ["null", "array"], + "items": {}, + "additionalProperties": true + }, + "call_highlights": { + "type": ["null", "array"], + "items": {}, + "additionalProperties": true + }, + "agent_email": { + "type": ["null", "string"] + }, + "keypad_entries": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/companies.json b/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/companies.json new file mode 100644 index 0000000000000..dc4fdec1432e9 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/companies.json @@ -0,0 +1,58 @@ +{ + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "time_zone": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "disabled_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "dni_active": { + "type": ["null", "boolean"] + }, + "script_url": { + "type": ["null", "string"] + }, + "callscore_enabled": { + "type": ["null", "boolean"] + }, + "lead_scoring_enabled": { + "type": ["null", "boolean"] + }, + "swap_exclude_jquery": { + "type": ["null", "string"] + }, + "swap_ppc_override": { + "type": ["null", "string"] + }, + "swap_landing_override": { + "type": ["null", "string"] + }, + "swap_cookie_duration": { + "type": "integer" + }, + "callscribe_enabled": { + "type": ["null", "boolean"] + }, + "keyword_spotting_enabled": { + "type": ["null", "boolean"] + }, + "form_capture": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/conversations.json b/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/conversations.json new file mode 100644 index 0000000000000..552bb1fd02589 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/conversations.json @@ -0,0 +1,36 @@ +{ + "type": "object", + "properties": { + "id": { "type": ["null", "string"] }, + "initial_tracker_id": { "type": ["null", "string"] }, + "current_tracker_id": { "type": ["null", "string"] }, + "customer_name": { "type": ["null", "string"] }, + "customer_phone_number": { "type": ["null", "string"] }, + "initial_tracking_number": { "type": ["null", "string"] }, + "current_tracking_number": { "type": ["null", "string"] }, + "last_message_at": { "type": ["null", "string"], "format": "date-time" }, + "state": { "type": ["null", "string"] }, + "formatted_customer_phone_number": { "type": ["null", "string"] }, + "formatted_initial_tracking_number": { "type": ["null", "string"] }, + "formatted_current_tracking_number": { "type": ["null", "string"] }, + "formatted_customer_name": { "type": ["null", "string"] }, + "company_time_zone": { "type": ["null", "string"] }, + "tracker_name": { "type": ["null", "string"] }, + "company_name": { "type": ["null", "string"] }, + "company_id": { "type": ["null", "string"] }, + "recent_messages": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "direction": { "type": ["null", "string"] }, + "content": { "type": ["null", "string"] }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/users.json b/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/users.json new file mode 100644 index 0000000000000..9db6c2d1e53b8 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/source_callrail/schemas/users.json @@ -0,0 +1,13 @@ +{ + "type": "object", + "properties": { + "email": { "type": ["null", "string"] }, + "id": { "type": ["null", "string"] }, + "created_at": { "type": ["null", "string"], "format": "date-time" }, + "role": { "type": ["null", "string"] }, + "first_name": { "type": ["null", "string"] }, + "last_name": { "type": ["null", "string"] }, + "name": { "type": ["null", "string"] }, + "accepted": { "type": ["null", "boolean"] } + } +} diff --git a/airbyte-integrations/connectors/source-callrail/source_callrail/source.py b/airbyte-integrations/connectors/source-callrail/source_callrail/source.py new file mode 100644 index 0000000000000..63b2db66475c7 --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/source_callrail/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceCallrail(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "callrail.yaml"}) diff --git a/airbyte-integrations/connectors/source-callrail/source_callrail/spec.yaml b/airbyte-integrations/connectors/source-callrail/source_callrail/spec.yaml new file mode 100644 index 0000000000000..6fe35b647e27a --- /dev/null +++ b/airbyte-integrations/connectors/source-callrail/source_callrail/spec.yaml @@ -0,0 +1,25 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/callrail +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Call Rail Spec + type: object + required: + - api_key + - account_id + - start_date + additionalProperties: true + properties: + api_key: + type: string + description: API access key + airbyte_secret: true + account_id: + type: string + description: Account ID + airbyte_secret: true + start_date: + type: string + description: Start getting data from that date. + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + examples: + - "%Y-%m-%d" diff --git a/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml new file mode 100644 index 0000000000000..c1cb0b594e5b9 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-clickhouse:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-clickhouse/build.gradle b/airbyte-integrations/connectors/source-clickhouse/build.gradle index e15b1e743883a..1e9f4cc21a28e 100644 --- a/airbyte-integrations/connectors/source-clickhouse/build.gradle +++ b/airbyte-integrations/connectors/source-clickhouse/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..49e2346d457fc --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 8123, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..16411f19eccca --- /dev/null +++ b/airbyte-integrations/connectors/source-clickhouse/src/test-integration/resources/expected_spec.json @@ -0,0 +1,177 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/clickhouse", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "ClickHouse Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "description": "The host endpoint of the Clickhouse cluster.", + "title": "Host", + "type": "string", + "order": 0 + }, + "port": { + "description": "The port of the database.", + "title": "Port", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 8123, + "examples": ["8123"], + "order": 1 + }, + "database": { + "description": "The name of the database.", + "title": "Database", + "type": "string", + "examples": ["default"], + "order": 2 + }, + "username": { + "description": "The username which is used to access the database.", + "title": "Username", + "type": "string", + "order": 3 + }, + "password": { + "description": "The password associated with this username.", + "title": "Password", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", + "type": "string", + "order": 5 + }, + "ssl": { + "title": "SSL Connection", + "description": "Encrypt data using SSL.", + "type": "boolean", + "default": true, + "order": 6 + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-clickup-api/.dockerignore b/airbyte-integrations/connectors/source-clickup-api/.dockerignore new file mode 100644 index 0000000000000..9ab3ec98e5482 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_clickup_api +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-clickup-api/Dockerfile b/airbyte-integrations/connectors/source-clickup-api/Dockerfile new file mode 100644 index 0000000000000..337d4783a837c --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_clickup_api ./source_clickup_api + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-clickup-api diff --git a/airbyte-integrations/connectors/source-clickup-api/README.md b/airbyte-integrations/connectors/source-clickup-api/README.md new file mode 100644 index 0000000000000..e9ee8b7163880 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/README.md @@ -0,0 +1,79 @@ +# Clickup Api Source + +This is the repository for the Clickup Api configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/clickup-api). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-clickup-api:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/clickup-api) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_clickup_api/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source clickup-api test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-clickup-api:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-clickup-api:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-clickup-api:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-clickup-api:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-clickup-api:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-clickup-api:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-clickup-api:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-clickup-api:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-clickup-api/__init__.py b/airbyte-integrations/connectors/source-clickup-api/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-clickup-api/acceptance-test-config.yml b/airbyte-integrations/connectors/source-clickup-api/acceptance-test-config.yml new file mode 100644 index 0000000000000..fc7a209a99c32 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/acceptance-test-config.yml @@ -0,0 +1,28 @@ +acceptance_tests: + basic_read: + tests: + - config_path: secrets/config.json + empty_streams: [] + connection: + tests: + - config_path: secrets/config.json + status: succeed + - config_path: integration_tests/invalid_config.json + status: failed + discovery: + tests: + - config_path: secrets/config.json + full_refresh: + tests: + - config_path: secrets/config.json + configured_catalog_path: integration_tests/configured_catalog.json + ignored_fields: + "team": ["members", "last_active"] + spec: + tests: + - spec_path: source_clickup_api/spec.yaml + timeout_seconds: 1200 + incremental: + bypass_reason: "Incremental syncs are not supported on this connector." +connector_image: airbyte/source-clickup-api:dev +test_strictness_level: low diff --git a/airbyte-integrations/connectors/source-clickup-api/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-clickup-api/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-clickup-api/build.gradle b/airbyte-integrations/connectors/source-clickup-api/build.gradle new file mode 100644 index 0000000000000..c280b1726da86 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_clickup_api' +} diff --git a/airbyte-integrations/connectors/source-clickup-api/integration_tests/__init__.py b/airbyte-integrations/connectors/source-clickup-api/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-clickup-api/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-clickup-api/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-clickup-api/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-clickup-api/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-clickup-api/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-clickup-api/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..7de7512458f1a --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/integration_tests/configured_catalog.json @@ -0,0 +1,58 @@ +{ + "streams": [ + { + "stream": { + "name": "user", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "team", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "list", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "task", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "space", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "folder", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-clickup-api/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-clickup-api/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..4d7174d98f606 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_token": "this should be an incomplete config file, used in standard tests" +} diff --git a/airbyte-integrations/connectors/source-clickup-api/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-clickup-api/integration_tests/sample_config.json new file mode 100644 index 0000000000000..71ea52349f44c --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/integration_tests/sample_config.json @@ -0,0 +1 @@ +{ "api_token": "abcdefgh" } diff --git a/airbyte-integrations/connectors/source-clickup-api/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-clickup-api/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-clickup-api/main.py b/airbyte-integrations/connectors/source-clickup-api/main.py new file mode 100644 index 0000000000000..6505974ecb32d --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_clickup_api import SourceClickupApi + +if __name__ == "__main__": + source = SourceClickupApi() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-clickup-api/requirements.txt b/airbyte-integrations/connectors/source-clickup-api/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-clickup-api/setup.py b/airbyte-integrations/connectors/source-clickup-api/setup.py new file mode 100644 index 0000000000000..04b79827f7601 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.4", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_clickup_api", + description="Source implementation for Clickup Api.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/__init__.py b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/__init__.py new file mode 100644 index 0000000000000..65af1b541ed47 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceClickupApi + +__all__ = ["SourceClickupApi"] diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/clickup_api.yaml b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/clickup_api.yaml new file mode 100644 index 0000000000000..b4aba2d4d3f8a --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/clickup_api.yaml @@ -0,0 +1,131 @@ +version: "0.1.0" + +definitions: + schema_loader: + type: JsonSchema + file_path: "./source_clickup_api/schemas/{{ options['name'] }}.json" + singleSelector: + type: RecordSelector + extractor: + type: DpathExtractor + field_pointer: [] + arraySelector: + type: RecordSelector + extractor: + type: DpathExtractor + field_pointer: ["{{ options['name'] }}s"] + requester: + type: HttpRequester + name: "{{ options['name'] }}" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "Authorization" + api_token: "{{ config['api_token'] }}" + paginator: + type: NoPagination + retriever: + type: SimpleRetriever + $options: + url_base: "https://api.clickup.com/api/v2" + name: "{{ options['name'] }}" + primary_key: "{{ options['primary_key'] }}" + +streams: + - type: DeclarativeStream + $options: + name: "user" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/user" + paginator: + $ref: "*ref(definitions.paginator)" + record_selector: + $ref: "*ref(definitions.singleSelector)" + - type: DeclarativeStream + $options: + name: "team" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/team" + paginator: + $ref: "*ref(definitions.paginator)" + record_selector: + $ref: "*ref(definitions.arraySelector)" + - type: DeclarativeStream + $options: + name: "list" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "folder/{{ config['folder_id'] }}/list" + paginator: + $ref: "*ref(definitions.paginator)" + record_selector: + $ref: "*ref(definitions.arraySelector)" + - type: DeclarativeStream + $options: + name: "space" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "team/{{ config['team_id'] }}/space" + paginator: + $ref: "*ref(definitions.paginator)" + record_selector: + $ref: "*ref(definitions.arraySelector)" + - type: DeclarativeStream + $options: + name: "folder" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "space/{{ config['space_id'] }}/folder" + paginator: + $ref: "*ref(definitions.paginator)" + record_selector: + $ref: "*ref(definitions.arraySelector)" + - type: DeclarativeStream + $options: + name: "task" + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "list/{{ config['list_id'] }}/task" + record_selector: + $ref: "*ref(definitions.arraySelector)" + paginator: + type: DefaultPaginator + page_size_option: + inject_into: "request_parameter" + field_name: "page_size" + pagination_strategy: + type: "OffsetIncrement" + page_size: 1 + page_token_option: + field_name: "page" + inject_into: "request_parameter" + +check: + type: CheckStream + stream_names: ["user", "team", "space", "folder", "list", "task"] diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/folder.json b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/folder.json new file mode 100644 index 0000000000000..06205fe9fe352 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/folder.json @@ -0,0 +1,44 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "orderindex": { + "type": ["null", "integer"] + }, + "override_statuses": { + "type": ["null", "boolean"] + }, + "hidden": { + "type": ["null", "boolean"] + }, + "task_count": { + "type": ["null", "string"] + }, + "lists": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"] + } + }, + "space": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "access": { + "type": ["null", "boolean"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/list.json b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/list.json new file mode 100644 index 0000000000000..2f3fecb65156d --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/list.json @@ -0,0 +1,76 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "orderindex": { + "type": ["null", "integer"] + }, + "content": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string", "object"] + }, + "priority": { + "type": ["null", "string"] + }, + "assignee": { + "type": ["null", "string"] + }, + "task_count": { + "type": ["null", "integer"] + }, + "due_date": { + "type": ["null", "string"] + }, + "start_date": { + "type": ["null", "string"] + }, + "archived": { + "type": ["null", "boolean"] + }, + "override_statuses": { + "type": ["null", "boolean"] + }, + "permission_level": { + "type": ["null", "string"] + }, + "folder": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "hidden": { + "type": ["null", "boolean"] + }, + "access": { + "type": ["null", "boolean"] + } + } + }, + "space": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "access": { + "type": ["null", "boolean"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/space.json b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/space.json new file mode 100644 index 0000000000000..fa6712a9b9b83 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/space.json @@ -0,0 +1,98 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "statuses": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "status": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "orderindex": { + "type": ["null", "integer"] + }, + "type": { + "type": ["null", "string"] + } + } + } + }, + "private": { + "type": ["null", "boolean"] + }, + "multiple_assignees": { + "type": ["null", "boolean"] + }, + "assignees": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"] + } + }, + "features": { + "type": ["null", "object"], + "properties": { + "due_dates": { + "type": ["null", "object"], + "properties": { + "enabled": { + "type": ["null", "boolean"] + }, + "start_date": { + "type": ["null", "boolean"] + }, + "remap_due_dates": { + "type": ["null", "boolean"] + }, + "remap_closed_due_date": { + "type": ["null", "boolean"] + } + } + }, + "time_tracking": { + "type": ["null", "object"], + "properties": { + "enabled": { + "type": ["null", "boolean"] + } + } + }, + "tags": { + "type": ["null", "object"], + "properties": { + "enabled": { + "type": ["null", "boolean"] + } + } + }, + "time_estimates": { + "type": ["null", "object"], + "properties": { + "enabled": { + "type": ["null", "boolean"] + } + } + }, + "checklists": { + "type": ["null", "object"], + "properties": { + "enabled": { + "type": ["null", "boolean"] + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/task.json b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/task.json new file mode 100644 index 0000000000000..6f47b17b3bdb3 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/task.json @@ -0,0 +1,121 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "object"], + "properties": { + "status": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "orderindex": { + "type": ["null", "integer"] + }, + "type": { + "type": ["null", "string"] + } + } + }, + "orderindex": { + "type": ["null", "string"] + }, + "date_created": { + "type": ["null", "string"] + }, + "date_updated": { + "type": ["null", "string"] + }, + "date_closed": { + "type": ["null", "string"] + }, + "creator": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "integer"] + }, + "username": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "profilePicture": { + "type": ["null", "string"] + } + } + }, + "assignees": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"] + } + }, + "checklists": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"] + } + }, + "tags": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"] + } + }, + "parent": { + "type": ["null", "string"] + }, + "priority": { + "type": ["null", "object"] + }, + "due_date": { + "type": ["null", "string"] + }, + "start_date": { + "type": ["null", "string"] + }, + "time_estimate": { + "type": ["null", "string"] + }, + "time_spent": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "folder": { + "type": ["null", "object"], + "properties": { + "id": { + "type": ["null", "string"] + } + } + }, + "space": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + } + } + }, + "list": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/team.json b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/team.json new file mode 100644 index 0000000000000..1fd32bf9e59ce --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/team.json @@ -0,0 +1,38 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "avatar": { + "type": ["null", "string"] + }, + "members": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "username": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "profilePicture": { + "type": ["null", "string"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/user.json b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/user.json new file mode 100644 index 0000000000000..91490f5548b33 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/schemas/user.json @@ -0,0 +1,23 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "user": { + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "username": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "profilePicture": { + "type": ["null", "string"] + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/source.py b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/source.py new file mode 100644 index 0000000000000..43d94a1e2a852 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceClickupApi(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "clickup_api.yaml"}) diff --git a/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/spec.yaml b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/spec.yaml new file mode 100644 index 0000000000000..ab7e35cb89476 --- /dev/null +++ b/airbyte-integrations/connectors/source-clickup-api/source_clickup_api/spec.yaml @@ -0,0 +1,36 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: ClickUp Api Spec + type: object + required: + - api_token + additionalProperties: true + properties: + # 'TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.': + api_token: + type: string + description: >- + Every ClickUp API call required authentication. This field is your personal API token. See here. + airbyte_secret: true + team_id: + type: string + description: >- + The ID of your team in ClickUp. Retrieve it from the `/team` of the ClickUp API. See here. + space_id: + type: string + description: >- + The ID of your space in your workspace. Retrieve it from the `/team/{team_id}/space` of the ClickUp API. See here. + folder_id: + type: string + description: >- + The ID of your folder in your space. Retrieve it from the `/space/{space_id}/folder` of the ClickUp API. See here. + list_id: + type: string + description: >- + The ID of your list in your folder. Retrieve it from the `/folder/{folder_id}/list` of the ClickUp API. See here. diff --git a/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml new file mode 100644 index 0000000000000..be8bdf914af30 --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-cockroachdb:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-cockroachdb/build.gradle b/airbyte-integrations/connectors/source-cockroachdb/build.gradle index 1a73d68242aa2..67e77014637fb 100644 --- a/airbyte-integrations/connectors/source-cockroachdb/build.gradle +++ b/airbyte-integrations/connectors/source-cockroachdb/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..49e2346d457fc --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 8123, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..953149a5ef739 --- /dev/null +++ b/airbyte-integrations/connectors/source-cockroachdb/src/test-integration/resources/expected_spec.json @@ -0,0 +1,62 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/cockroachdb", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Cockroach Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the database.", + "type": "string", + "order": 0 + }, + "port": { + "title": "Port", + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 5432, + "examples": ["5432"], + "order": 1 + }, + "database": { + "title": "DB Name", + "description": "Name of the database.", + "type": "string", + "order": 2 + }, + "username": { + "title": "User", + "description": "Username to use to access the database.", + "type": "string", + "order": 3 + }, + "password": { + "title": "Password", + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", + "type": "string", + "order": 5 + }, + "ssl": { + "title": "Connect using SSL", + "description": "Encrypt client/server communications for increased security.", + "type": "boolean", + "default": false, + "order": 6 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-coda/.dockerignore b/airbyte-integrations/connectors/source-coda/.dockerignore new file mode 100644 index 0000000000000..b79a53f6d7af9 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_coda +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-coda/Dockerfile b/airbyte-integrations/connectors/source-coda/Dockerfile new file mode 100644 index 0000000000000..f8d116a3a9246 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_coda ./source_coda + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-coda diff --git a/airbyte-integrations/connectors/source-coda/README.md b/airbyte-integrations/connectors/source-coda/README.md new file mode 100644 index 0000000000000..85b13c68d5616 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/README.md @@ -0,0 +1,132 @@ +# Coda Source + +This is the repository for the Coda source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/coda). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-coda:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/coda) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_coda/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source coda test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-coda:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-coda:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-coda:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-coda:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-coda:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-coda:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-coda:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-coda:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-coda/acceptance-test-config.yml b/airbyte-integrations/connectors/source-coda/acceptance-test-config.yml new file mode 100644 index 0000000000000..efb1873158325 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/acceptance-test-config.yml @@ -0,0 +1,37 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-coda:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_coda/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: + - name: formulas + bypass_reason: "no records" + - name: permissions + bypass_reason: "no records" +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-coda/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-coda/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-coda/build.gradle b/airbyte-integrations/connectors/source-coda/build.gradle new file mode 100644 index 0000000000000..d685c0c4bc743 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_coda' +} diff --git a/airbyte-integrations/connectors/source-coda/integration_tests/__init__.py b/airbyte-integrations/connectors/source-coda/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-coda/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-coda/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-coda/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-coda/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-coda/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-coda/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..b6bef3c3fd7f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/integration_tests/configured_catalog.json @@ -0,0 +1,67 @@ +{ + "streams": [ + { + "stream": { + "name": "docs", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "permissions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "categories", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "pages", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "tables", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "formulas", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "controls", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-coda/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-coda/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..f8cbdf072c870 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "auth_token": "invalid authentication token" +} diff --git a/airbyte-integrations/connectors/source-coda/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-coda/integration_tests/sample_config.json new file mode 100644 index 0000000000000..cbbdb13d6e975 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "auth_token": "1111102a999ce-a634-42a4-a0b9-90324c436b6a" +} diff --git a/airbyte-integrations/connectors/source-coda/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-coda/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-coda/main.py b/airbyte-integrations/connectors/source-coda/main.py new file mode 100644 index 0000000000000..0a5c909b0d31a --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_coda import SourceCoda + +if __name__ == "__main__": + source = SourceCoda() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-coda/requirements.txt b/airbyte-integrations/connectors/source-coda/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-coda/setup.py b/airbyte-integrations/connectors/source-coda/setup.py new file mode 100644 index 0000000000000..574189c1bced7 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.2.5", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_coda", + description="Source implementation for Coda.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-coda/source_coda/__init__.py b/airbyte-integrations/connectors/source-coda/source_coda/__init__.py new file mode 100755 index 0000000000000..2b7dca0819d94 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceCoda + +__all__ = ["SourceCoda"] diff --git a/airbyte-integrations/connectors/source-coda/source_coda/schemas/categories.json b/airbyte-integrations/connectors/source-coda/source_coda/schemas/categories.json new file mode 100644 index 0000000000000..a90ed1fede997 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/schemas/categories.json @@ -0,0 +1,9 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "name": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-coda/source_coda/schemas/controls.json b/airbyte-integrations/connectors/source-coda/source_coda/schemas/controls.json new file mode 100644 index 0000000000000..14fc1e737c60b --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/schemas/controls.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + + "href": { + "type": "string" + }, + "name": { + "type": "string" + }, + "parent": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "id": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-coda/source_coda/schemas/docs.json b/airbyte-integrations/connectors/source-coda/source_coda/schemas/docs.json new file mode 100644 index 0000000000000..3a28e58b0c336 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/schemas/docs.json @@ -0,0 +1,153 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "icon": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "type": "string" + }, + "browserLink": { + "type": "string" + } + } + }, + "name": { + "type": "string" + }, + "owner": { + "type": "string" + }, + "owner_name": { + "type": "string" + }, + "docSize": { + "type": "object", + "properties": { + "totalRowCount": { + "type": "integer" + }, + "tableViewCount": { + "type": "integer" + }, + "pageCount": { + "type": "integer" + }, + "overApiSizeLimit": { + "type": "boolean" + } + } + }, + "sourceDoc": { + "type": "object", + "properties": { + "totalRowCount": { + "type": "string" + }, + "type": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + } + } + }, + "createdAt": { + "type": "string" + }, + "updatedAt": { + "type": "string" + }, + "published": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "imageLink": { + "type": "string" + }, + "discoverable": { + "type": "boolean" + }, + "earnCredit": { + "type": "boolean" + }, + "mode": { + "type": "string" + }, + "categories": { + "type": "string", + "items": { + "type": "string" + } + } + } + }, + "folder": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, + "workspace": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "organizationId": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, + "workspaceId": { + "type": "string" + }, + "folderId": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-coda/source_coda/schemas/formulas.json b/airbyte-integrations/connectors/source-coda/source_coda/schemas/formulas.json new file mode 100644 index 0000000000000..14fc1e737c60b --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/schemas/formulas.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + + "href": { + "type": "string" + }, + "name": { + "type": "string" + }, + "parent": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "id": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-coda/source_coda/schemas/pages.json b/airbyte-integrations/connectors/source-coda/source_coda/schemas/pages.json new file mode 100644 index 0000000000000..79ca344a2b135 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/schemas/pages.json @@ -0,0 +1,164 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + }, + "subtitle": { + "type": "string" + }, + "icon": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "type": { + "type": "string" + }, + "browserLink": { + "type": "string" + } + } + }, + "image": { + "type": "object", + "properties": { + "width": { + "type": "number" + }, + "height": { + "type": "number" + }, + "type": { + "type": "string" + }, + "browserLink": { + "type": "string" + } + } + }, + "parent": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, + "children": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, + "authors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "@context": { + "type": "string" + }, + "@type": { + "type": "string" + }, + "additionalType": { + "type": "string" + }, + "name": { + "type": "string" + }, + "email": { + "type": "string" + } + } + }, + "createdAt": { + "type": "string" + }, + "updatedBy": { + "type": "object", + "properties": { + "@context": { + "type": "string" + }, + "@type": { + "type": "string" + }, + "additionalType": { + "type": "string" + }, + "name": { + "type": "string" + }, + "email": { + "type": "string" + } + } + }, + "createdBy": { + "type": "object", + "properties": { + "@context": { + "type": "string" + }, + "@type": { + "type": "string" + }, + "additionalType": { + "type": "string" + }, + "name": { + "type": "string" + }, + "email": { + "type": "string" + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-coda/source_coda/schemas/permissions.json b/airbyte-integrations/connectors/source-coda/source_coda/schemas/permissions.json new file mode 100644 index 0000000000000..e440e36472a4d --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/schemas/permissions.json @@ -0,0 +1,23 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "access": { + "type": "string" + }, + "principal": { + "type": "object", + "properties": { + "type": { + "type": "string" + }, + "email": { + "type": "string" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-coda/source_coda/schemas/tables.json b/airbyte-integrations/connectors/source-coda/source_coda/schemas/tables.json new file mode 100644 index 0000000000000..94b46a4170dcf --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/schemas/tables.json @@ -0,0 +1,44 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "tableType": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + }, + "parent": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "href": { + "type": "string" + }, + "browserLink": { + "type": "string" + }, + "name": { + "type": "string" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-coda/source_coda/source.py b/airbyte-integrations/connectors/source-coda/source_coda/source.py new file mode 100755 index 0000000000000..8bd60251c9cfb --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/source.py @@ -0,0 +1,170 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator + +BASE_URL = "https://coda.io/apis/v1/" + + +# Basic full refresh stream +class CodaStream(HttpStream, ABC): + + url_base = BASE_URL + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.limit = 25 + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return response.json().get("nextPageToken", None) + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + if next_page_token: + return {"pageToken": next_page_token, "limit": self.limit} + else: + return {"limit": self.limit} + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + return response.json()["items"] + + +class Docs(CodaStream): + + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "docs" + + +class CodaStreamDoc(CodaStream): + def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: + """ + self.authenticator (which should be used as the + authenticator for Users) is object of NoAuth() + + so self._session.auth is used instead + """ + docs_stream = Docs(**{"authenticator": self._authenticator}) + for doc in docs_stream.read_records(sync_mode=SyncMode.full_refresh): + yield {"doc_id": doc["id"]} + + +class Permissions(CodaStreamDoc): + + primary_key = "id" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + doc_id = stream_slice["doc_id"] + return f"docs/{doc_id}/acl/permissions" + + +class Categories(CodaStreamDoc): + + primary_key = "name" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "categories" + + +class Pages(CodaStreamDoc): + + primary_key = "id" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + doc_id = stream_slice["doc_id"] + return f"docs/{doc_id}/pages" + + +class Tables(CodaStreamDoc): + + primary_key = "id" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + doc_id = stream_slice["doc_id"] + return f"docs/{doc_id}/tables" + + +class Formulas(CodaStreamDoc): + + primary_key = "id" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + doc_id = stream_slice["doc_id"] + return f"docs/{doc_id}/formulas" + + +class Controls(CodaStreamDoc): + + primary_key = "id" + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + doc_id = stream_slice["doc_id"] + return f"docs/{doc_id}/controls" + + +# Source +class SourceCoda(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + try: + token = config.get("auth_token") + headers = {"Authorization": f"Bearer {token}"} + r = requests.get(f"{BASE_URL}whoami", headers=headers) + if r.status_code == 200: + return True, None + except Exception as e: + return False, e + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + stream_args = { + "authenticator": TokenAuthenticator(token=config.get("auth_token")), + } + + return [ + Docs(**stream_args), + Permissions(**stream_args), + Categories(**stream_args), + Pages(**stream_args), + Tables(**stream_args), + Formulas(**stream_args), + Controls(**stream_args), + ] diff --git a/airbyte-integrations/connectors/source-coda/source_coda/spec.yaml b/airbyte-integrations/connectors/source-coda/source_coda/spec.yaml new file mode 100755 index 0000000000000..138ab5bfc046e --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/source_coda/spec.yaml @@ -0,0 +1,15 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/coda +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Required attributes for hitting apis + type: object + required: + - auth_token + additionalProperties: true + properties: + auth_token: + title: Authentication token + type: string + description: Bearer token + airbyte_secret: true + order: 0 diff --git a/airbyte-integrations/connectors/source-coda/unit_tests/__init__.py b/airbyte-integrations/connectors/source-coda/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-coda/unit_tests/test_source.py b/airbyte-integrations/connectors/source-coda/unit_tests/test_source.py new file mode 100644 index 0000000000000..1e33b4e639018 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/unit_tests/test_source.py @@ -0,0 +1,52 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock, patch + +from source_coda.source import SourceCoda + + +class MockResponse: + def __init__(self, json_data, status_code): + self.json_data = json_data + self.status_code = status_code + + def json(self): + return self.json_data + + def raise_for_status(self): + if self.status_code != 200: + raise Exception("Bad things happened") + + +def mocked_requests_get(fail=False): + def wrapper(*args, **kwargs): + if fail: + return MockResponse(None, 404) + + return MockResponse( + {"name": "John", "loginId": "john@example.com", "type": "user", "href": "https://coda.io/apis/v1/whoami", "tokenName": "as", "scoped": False, "pictureLink": "https://images-coda.io", "workspace":{ + "id": "test-id", + "type": "workspace", + "browserLink": "https://coda.io/link", + "name": "title" + }}, 200 + ) + + return wrapper + + +@patch("requests.get", side_effect=mocked_requests_get()) +def test_check_connection(mocker): + source = SourceCoda() + logger_mock, config_mock = MagicMock(), MagicMock() + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker): + source = SourceCoda() + config_mock = MagicMock() + streams = source.streams(config_mock) + expected_streams_number = 7 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-coda/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-coda/unit_tests/test_streams.py new file mode 100644 index 0000000000000..7f1a96c57e050 --- /dev/null +++ b/airbyte-integrations/connectors/source-coda/unit_tests/test_streams.py @@ -0,0 +1,91 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import logging +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator +from source_coda.source import CodaStream + +logger = logging.getLogger() +logger.level = logging.DEBUG + + +authenticator = TokenAuthenticator(token="test_token"), + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(CodaStream, "path", "v0/example_endpoint") + mocker.patch.object(CodaStream, "primary_key", "test_primary_key") + mocker.patch.object(CodaStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = CodaStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {"limit": 25} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = CodaStream(authenticator=authenticator) + response = MagicMock() + response.json.return_value = { + "id": "1244fds", + "name": "Test doc" + } + inputs = {"response": response} + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class): + stream = CodaStream(authenticator=authenticator) + + response = MagicMock() + response.json = MagicMock(return_value={'items': [{"id": 101}]}) + + inputs = {"response": response} + expected_parsed_object = response.json()['items'][0] + assert next(iter(stream.parse_response(**inputs))) == expected_parsed_object + + +def test_request_headers(patch_base_class): + stream = CodaStream(authenticator=authenticator) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = CodaStream(authenticator=authenticator) + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = CodaStream(authenticator=authenticator) + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = CodaStream(authenticator=authenticator) + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-coin-api/source_coin_api/spec.yaml b/airbyte-integrations/connectors/source-coin-api/source_coin_api/spec.yaml index 638b6f452174f..6c16703fa042f 100644 --- a/airbyte-integrations/connectors/source-coin-api/source_coin_api/spec.yaml +++ b/airbyte-integrations/connectors/source-coin-api/source_coin_api/spec.yaml @@ -20,8 +20,8 @@ connectionSpecification: description: | The environment to use. Either sandbox or production. enum: - - sandbox - - production + - sandbox + - production default: sandbox order: 1 symbol_id: @@ -32,8 +32,7 @@ connectionSpecification: order: 2 period: type: string - description: - The period to use. See the documentation for a list. + description: The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get examples: - 5SEC diff --git a/airbyte-integrations/connectors/source-coinmarketcap/source_coinmarketcap/spec.yaml b/airbyte-integrations/connectors/source-coinmarketcap/source_coinmarketcap/spec.yaml index 9d0e5c6b91356..dfccbd4739b1b 100644 --- a/airbyte-integrations/connectors/source-coinmarketcap/source_coinmarketcap/spec.yaml +++ b/airbyte-integrations/connectors/source-coinmarketcap/source_coinmarketcap/spec.yaml @@ -29,11 +29,10 @@ connectionSpecification: symbols: title: Symbol type: array - items: + items: type: string description: Cryptocurrency symbols. (only used for quotes stream) minItems: 1 examples: - AVAX - BTC - diff --git a/airbyte-integrations/connectors/source-configcat/.dockerignore b/airbyte-integrations/connectors/source-configcat/.dockerignore new file mode 100644 index 0000000000000..355d3d537163e --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_configcat +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-configcat/Dockerfile b/airbyte-integrations/connectors/source-configcat/Dockerfile new file mode 100644 index 0000000000000..f4da267a674f9 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_configcat ./source_configcat + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-configcat diff --git a/airbyte-integrations/connectors/source-configcat/README.md b/airbyte-integrations/connectors/source-configcat/README.md new file mode 100644 index 0000000000000..7041446c7f99b --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/README.md @@ -0,0 +1,79 @@ +# Configcat Source + +This is the repository for the Configcat configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/configcat). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-configcat:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/configcat) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_configcat/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source configcat test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-configcat:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-configcat:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-configcat:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-configcat:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-configcat:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-configcat:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-configcat:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-configcat:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-configcat/__init__.py b/airbyte-integrations/connectors/source-configcat/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-configcat/acceptance-test-config.yml b/airbyte-integrations/connectors/source-configcat/acceptance-test-config.yml new file mode 100644 index 0000000000000..50576c0dd298a --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/acceptance-test-config.yml @@ -0,0 +1,20 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-configcat:dev +tests: + spec: + - spec_path: "source_configcat/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["organization_members"] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-configcat/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-configcat/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-configcat/build.gradle b/airbyte-integrations/connectors/source-configcat/build.gradle new file mode 100644 index 0000000000000..e7b33e0f3e27c --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_configcat' +} diff --git a/airbyte-integrations/connectors/source-configcat/integration_tests/__init__.py b/airbyte-integrations/connectors/source-configcat/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-configcat/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-configcat/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-configcat/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-configcat/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-configcat/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-configcat/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..589866b6a4fee --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/integration_tests/configured_catalog.json @@ -0,0 +1,49 @@ +{ + "streams": [ + { + "stream": { + "name": "organizations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "organization_members", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "products", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "tags", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "environments", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-configcat/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-configcat/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..7c98a32eeacb1 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "username": "this should be an incomplete config file, used in standard tests" +} diff --git a/airbyte-integrations/connectors/source-configcat/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-configcat/integration_tests/sample_config.json new file mode 100644 index 0000000000000..18c9245426b53 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "username": "08dab91a-3029-4e2a-82dd-dt5a9219c6e3", + "password": "pRsAOW5AC03Jcm9NqiKg8Pb4u1bFooq7O2bNcPRu3nI=" +} diff --git a/airbyte-integrations/connectors/source-configcat/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-configcat/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-configcat/main.py b/airbyte-integrations/connectors/source-configcat/main.py new file mode 100644 index 0000000000000..376b395e114d5 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_configcat import SourceConfigcat + +if __name__ == "__main__": + source = SourceConfigcat() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-configcat/requirements.txt b/airbyte-integrations/connectors/source-configcat/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-configcat/setup.py b/airbyte-integrations/connectors/source-configcat/setup.py new file mode 100644 index 0000000000000..ebc0530476885 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_configcat", + description="Source implementation for Configcat.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/__init__.py b/airbyte-integrations/connectors/source-configcat/source_configcat/__init__.py new file mode 100644 index 0000000000000..80fba4655ec54 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceConfigcat + +__all__ = ["SourceConfigcat"] diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/configcat.yaml b/airbyte-integrations/connectors/source-configcat/source_configcat/configcat.yaml new file mode 100644 index 0000000000000..bed9ea7cb4dd4 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/configcat.yaml @@ -0,0 +1,100 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://api.configcat.com/v1" + http_method: "GET" + authenticator: + type: BasicHttpAuthenticator + username: "{{ config['username'] }}" + password: "{{ config['password'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + organizations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "organizations" + primary_key: "organizationId" + path: "/organizations" + products_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "products" + primary_key: "productId" + path: "/products" + product_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.products_stream)" + parent_key: productId + stream_slice_field: productId + tags_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tags" + primary_key: "tagId" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/products/{{ stream_slice.productId }}/tags" + stream_slicer: + $ref: "*ref(definitions.product_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + environments_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "environments" + primary_key: "environmentId" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/products/{{ stream_slice.productId }}/environments" + stream_slicer: + $ref: "*ref(definitions.product_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + organization_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.organizations_stream)" + parent_key: organizationId + stream_slice_field: organizationId + organization_members_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "organization_members" + primary_key: "userId" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/organizations/{{ stream_slice.organizationId }}/members" + stream_slicer: + $ref: "*ref(definitions.organization_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + +streams: + - "*ref(definitions.organizations_stream)" + - "*ref(definitions.organization_members_stream)" + - "*ref(definitions.products_stream)" + - "*ref(definitions.tags_stream)" + - "*ref(definitions.environments_stream)" + +check: + stream_names: + - "organizations" diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/environments.json b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/environments.json new file mode 100644 index 0000000000000..ca73ee9f1e4fd --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/environments.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "product": { + "type": "object" + }, + "environmentId": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "order": { + "type": ["null", "integer"] + }, + "reasonRequired": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/organization_members.json b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/organization_members.json new file mode 100644 index 0000000000000..cf5e62a9c565b --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/organization_members.json @@ -0,0 +1,15 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "userId": { + "type": "string" + }, + "fullName": { + "type": "string" + }, + "email": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/organizations.json b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/organizations.json new file mode 100644 index 0000000000000..36360da3c1beb --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/organizations.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "organizationId": { + "type": "string" + }, + "name": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/products.json b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/products.json new file mode 100644 index 0000000000000..dca01bcc5d799 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/products.json @@ -0,0 +1,24 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "organizationId": { + "type": "object" + }, + "name": { + "type": "string" + }, + "productId": { + "type": "string" + }, + "description": { + "type": "string" + }, + "order": { + "type": "integer" + }, + "reasonRequired": { + "type": "boolean" + } + } +} diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/tags.json b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/tags.json new file mode 100644 index 0000000000000..a23529e9b5914 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/schemas/tags.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "product": { + "type": "object" + }, + "name": { + "type": "string" + }, + "tagId": { + "type": "integer" + }, + "color": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/source.py b/airbyte-integrations/connectors/source-configcat/source_configcat/source.py new file mode 100644 index 0000000000000..840d14c60b316 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceConfigcat(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "configcat.yaml"}) diff --git a/airbyte-integrations/connectors/source-configcat/source_configcat/spec.yaml b/airbyte-integrations/connectors/source-configcat/source_configcat/spec.yaml new file mode 100644 index 0000000000000..f391aed901b70 --- /dev/null +++ b/airbyte-integrations/connectors/source-configcat/source_configcat/spec.yaml @@ -0,0 +1,23 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/configcat +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Configcat Spec + type: object + required: + - username + - password + additionalProperties: true + properties: + username: + title: Username + type: string + description: >- + Basic auth user name. See here. + password: + title: Password + type: string + description: >- + Basic auth password. See here. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/convertkit.yaml b/airbyte-integrations/connectors/source-convertkit/source_convertkit/convertkit.yaml index e3c90fbb0e808..9e157316fdc56 100644 --- a/airbyte-integrations/connectors/source-convertkit/source_convertkit/convertkit.yaml +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/convertkit.yaml @@ -2,7 +2,7 @@ version: "0.1.0" definitions: selector: extractor: - field_pointer: [ "{{ options['name'] }}" ] + field_pointer: ["{{ options['name'] }}"] requester: # API Docs: https://developers.convertkit.com/#overview url_base: "https://api.convertkit.com/v3" @@ -47,7 +47,7 @@ definitions: $ref: "*ref(definitions.retriever)" record_selector: extractor: - field_pointer: [ "courses" ] + field_pointer: ["courses"] $options: name: "sequences" primary_key: "id" diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/broadcasts.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/broadcasts.json index 2d9c6ef73ebdb..b4177a6c49b5d 100644 --- a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/broadcasts.json +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/broadcasts.json @@ -5,19 +5,11 @@ "type": "integer" }, "created_at": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "subject": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } }, - "required": [ - "id" - ] -} \ No newline at end of file + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/forms.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/forms.json index 95519975d6772..e898ccd307aa1 100644 --- a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/forms.json +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/forms.json @@ -5,79 +5,41 @@ "type": "integer" }, "uid": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "name": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "created_at": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "archived": { - "type": [ - "boolean", - "null" - ] + "type": ["boolean", "null"] }, "type": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "url": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "embed_js": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "embed_url": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "title": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "description": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "sign_up_button_text": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "success_message": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } }, - "required": [ - "id" - ] -} \ No newline at end of file + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/sequences.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/sequences.json index 3767adac7c421..f1e4f67062ef8 100644 --- a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/sequences.json +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/sequences.json @@ -5,31 +5,17 @@ "type": "integer" }, "name": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "hold": { - "type": [ - "boolean", - "null" - ] + "type": ["boolean", "null"] }, "repeat": { - "type": [ - "boolean", - "null" - ] + "type": ["boolean", "null"] }, "created_at": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } }, - "required": [ - "id" - ] -} \ No newline at end of file + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/subscribers.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/subscribers.json index 8e94599a7ed55..15ae518ac82ad 100644 --- a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/subscribers.json +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/subscribers.json @@ -1,49 +1,29 @@ { - "type": "object", - "properties": { - "id": { - "type": "integer" - }, - "first_name": { - "type": [ - "string", - "null" - ] - }, - "email_address": { - "type": [ - "string", - "null" - ] - }, - "state": { - "type": [ - "string", - "null" - ] - }, - "created_at": { - "type": [ - "string", - "null" - ] - }, - "fields": { - "type": [ - "object", - "null" - ], - "properties": { - "last_name": { - "type": [ - "string", - "null" - ] - } - } - } + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "first_name": { + "type": ["string", "null"] + }, + "email_address": { + "type": ["string", "null"] }, - "required": [ - "id" - ] -} \ No newline at end of file + "state": { + "type": ["string", "null"] + }, + "created_at": { + "type": ["string", "null"] + }, + "fields": { + "type": ["object", "null"], + "properties": { + "last_name": { + "type": ["string", "null"] + } + } + } + }, + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/tags.json b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/tags.json index 66ccdc68f34ef..09a41f24c0ae2 100644 --- a/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/tags.json +++ b/airbyte-integrations/connectors/source-convertkit/source_convertkit/schemas/tags.json @@ -5,19 +5,11 @@ "type": "integer" }, "name": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] }, "created_at": { - "type": [ - "string", - "null" - ] + "type": ["string", "null"] } }, - "required": [ - "id" - ] -} \ No newline at end of file + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-convex/.dockerignore b/airbyte-integrations/connectors/source-convex/.dockerignore new file mode 100644 index 0000000000000..f5f1f3642250b --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_convex +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-convex/Dockerfile b/airbyte-integrations/connectors/source-convex/Dockerfile new file mode 100644 index 0000000000000..05c0b4727a567 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_convex ./source_convex + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-convex diff --git a/airbyte-integrations/connectors/source-convex/README.md b/airbyte-integrations/connectors/source-convex/README.md new file mode 100644 index 0000000000000..864e0a264a7f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/README.md @@ -0,0 +1,132 @@ +# Convex Source + +This is the repository for the Convex source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/convex). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-convex:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/convex) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_convex/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source convex test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-convex:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-convex:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-convex:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-convex:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-convex:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-convex:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-convex:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-convex/acceptance-test-config.yml b/airbyte-integrations/connectors/source-convex/acceptance-test-config.yml new file mode 100644 index 0000000000000..b17a53e928f47 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/acceptance-test-config.yml @@ -0,0 +1,29 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-convex:dev +tests: + spec: + - spec_path: "source_convex/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + expect_records: + path: "integration_tests/expected_records.txt" + extra_fields: no + exact_order: yes + extra_records: yes + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-convex/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-convex/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-convex/bootstrap.md b/airbyte-integrations/connectors/source-convex/bootstrap.md new file mode 100644 index 0000000000000..952bdcbdf2692 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/bootstrap.md @@ -0,0 +1,18 @@ +# Convex + +## Overview + +Convex is the reactive backend-as-a-service for web developers. +As part of the backend, Convex stores developer-defined documents in tables. +Convex's HTTP API allows a developer to retrieve documents from their Convex tables. + +## Endpoints + +Convex defines three endpoints used for extracting data: + +1. `/json_schema` identifies the data format for each table. +2. `/list_snapshot` returns pages of a table's data at a snapshot timestamp, for initial sync. +3. `/document_deltas` returns pages of modifications to a table's data after a given timestamp. + +For more details, see the documentation for Convex Sync endpoints at +[https://docs.convex.dev/http-api/#sync](https://docs.convex.dev/http-api/#sync). diff --git a/airbyte-integrations/connectors/source-convex/build.gradle b/airbyte-integrations/connectors/source-convex/build.gradle new file mode 100644 index 0000000000000..19b2ea0ede6d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_convex' +} diff --git a/airbyte-integrations/connectors/source-convex/integration_tests/__init__.py b/airbyte-integrations/connectors/source-convex/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-convex/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-convex/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..38653dcad26fa --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/integration_tests/abnormal_state.json @@ -0,0 +1,12 @@ +{ + "posts": { + "snapshot_cursor": "hi", + "snapshot_has_more": false, + "delta_cursor": 2652635567679741986 + }, + "users": { + "snapshot_cursor": "hi", + "snapshot_has_more": false, + "delta_cursor": 2660025892355943945 + } +} diff --git a/airbyte-integrations/connectors/source-convex/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-convex/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-convex/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-convex/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..901b870079f6f --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/integration_tests/configured_catalog.json @@ -0,0 +1,56 @@ +{ + "streams": [ + { + "sync_mode": "incremental", + "destination_sync_mode": "append", + "stream": { + "name": "posts", + "json_schema": { + "type": "object", + "properties": { + "_creationTime": { "type": "number" }, + "_id": { + "type": "object", + "properties": { "$id": { "type": "string" } } + }, + "author": { + "type": "object", + "properties": { "$id": { "type": "string" } } + }, + "body": { "type": "string" }, + "time": { "type": "number" }, + "_ts": { "type": "number" } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["_ts"], + "source_defined_primary_key": [["_id"]] + } + }, + { + "sync_mode": "incremental", + "destination_sync_mode": "append", + "stream": { + "name": "users", + "json_schema": { + "type": "object", + "properties": { + "_creationTime": { "type": "number" }, + "_id": { + "type": "object", + "properties": { "$id": { "type": "string" } } + }, + "name": { "type": "string" }, + "tokenIdentifier": { "type": "string" }, + "_ts": { "type": "number" } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["_ts"], + "source_defined_primary_key": [["_id"]] + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-convex/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-convex/integration_tests/expected_records.txt new file mode 100644 index 0000000000000..2dbe5eab26af6 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/integration_tests/expected_records.txt @@ -0,0 +1,3 @@ +{"stream": "users", "data": {"_id": {"$id": "users|wnmxwZrHuQa8TWcCXl8faAW"}, "_creationTime": 1652593901795.4175, "name": "Lee Danilek", "tokenIdentifier": "https://dev-wgspahhl.us.auth0.com/|google-oauth2|116819734026499097324", "_ts": 1660025892365542190, "_ab_cdc_deleted_at": null, "_ab_cdc_lsn": 1660025892365542190, "_ab_cdc_updated_at": "2022-08-09T06:18:12.365542", "_deleted": false}, "emitted_at": 1665605909352} +{"stream": "posts", "data": {"_id": {"$id": "posts|UZs05arHuQa8TWcCXl8faAW"}, "_creationTime": 1652595311880.4985, "author": {"$id": "users|wnmxwZrHuQa8TWcCXl8faAW"}, "body": "first!", "time": 1652595311875.6301, "_ts": 1660025892355943945, "_ab_cdc_deleted_at": null, "_ab_cdc_lsn": 1660025892355943945, "_ab_cdc_updated_at": "2022-08-09T06:18:12.355944", "_deleted": false}, "emitted_at": 1665605909353} +{"stream": "posts", "data": {"_id": {"$id": "posts|iQKB5arHuQa8TWcCXl8faAW"}, "_creationTime": 1652595855409.799, "author": {"$id": "users|wnmxwZrHuQa8TWcCXl8faAW"}, "body": "second!", "time": 1652595855404.5964, "_ts": 1660025892355943945, "_ab_cdc_deleted_at": null, "_ab_cdc_lsn": 1660025892355943945, "_ab_cdc_updated_at": "2022-08-09T06:18:12.355944", "_deleted": false}, "emitted_at": 1665605909354} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-convex/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-convex/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..d4841ab07cc35 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "deployment_url": "https://murky-swan-635.convex.cloud", + "access_key": "bad" +} diff --git a/airbyte-integrations/connectors/source-convex/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-convex/integration_tests/sample_config.json new file mode 100644 index 0000000000000..6de26e5d188bc --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "deployment_url": "https://descriptive-vulture-260.convex.cloud", + "access_key": "Your access token" +} diff --git a/airbyte-integrations/connectors/source-convex/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-convex/integration_tests/sample_state.json new file mode 100644 index 0000000000000..00b4b8dba4449 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/integration_tests/sample_state.json @@ -0,0 +1,12 @@ +{ + "posts": { + "snapshot_cursor": "hi", + "snapshot_has_more": false, + "delta_cursor": 1 + }, + "users": { + "snapshot_cursor": null, + "snapshot_has_more": true, + "delta_cursor": null + } +} diff --git a/airbyte-integrations/connectors/source-convex/main.py b/airbyte-integrations/connectors/source-convex/main.py new file mode 100644 index 0000000000000..d17bdd282a60f --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_convex import SourceConvex + +if __name__ == "__main__": + source = SourceConvex() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-convex/requirements.txt b/airbyte-integrations/connectors/source-convex/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-convex/setup.py b/airbyte-integrations/connectors/source-convex/setup.py new file mode 100644 index 0000000000000..8da8d18c95bd7 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/setup.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", + "responses~=0.13.3", +] + +setup( + name="source_convex", + description="Source implementation for Convex.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-convex/source_convex/__init__.py b/airbyte-integrations/connectors/source-convex/source_convex/__init__.py new file mode 100644 index 0000000000000..60255167b05f7 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/source_convex/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceConvex + +__all__ = ["SourceConvex"] diff --git a/airbyte-integrations/connectors/source-convex/source_convex/source.py b/airbyte-integrations/connectors/source-convex/source_convex/source.py new file mode 100644 index 0000000000000..b91ad4f4c6f36 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/source_convex/source.py @@ -0,0 +1,194 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from datetime import datetime +from typing import Any, Dict, Iterable, Iterator, List, Mapping, MutableMapping, Optional, Tuple, TypedDict + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import IncrementalMixin, Stream +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.requests_native_auth.token import TokenAuthenticator + +ConvexConfig = TypedDict( + "ConvexConfig", + { + "deployment_url": str, + "access_key": str, + }, +) + +ConvexState = TypedDict( + "ConvexState", + { + "snapshot_cursor": Optional[str], + "snapshot_has_more": bool, + "delta_cursor": Optional[int], + }, +) + + +# Source +class SourceConvex(AbstractSource): + def _json_schemas(self, config: ConvexConfig) -> requests.Response: + deployment_url = config["deployment_url"] + access_key = config["access_key"] + url = f"{deployment_url}/api/json_schemas?deltaSchema=true&format=convex_json" + headers = {"Authorization": f"Convex {access_key}"} + return requests.get(url, headers=headers) + + def check_connection(self, logger: Any, config: ConvexConfig) -> Tuple[bool, Any]: + """ + Connection check to validate that the user-provided config can be used to connect to the underlying API + + :param config: the user-input config object conforming to the connector's spec.yaml + :param logger: logger object + :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. + """ + resp = self._json_schemas(config) + if resp.status_code == 200: + return True, None + else: + return False, resp.text + + def streams(self, config: ConvexConfig) -> List[Stream]: + """ + :param config: A Mapping of the user input configuration as defined in the connector spec. + """ + resp = self._json_schemas(config) + assert resp.status_code == 200 + json_schemas = resp.json() + table_names = list(json_schemas.keys()) + return [ + ConvexStream( + config["deployment_url"], + config["access_key"], + table_name, + json_schemas[table_name], + ) + for table_name in table_names + ] + + +class ConvexStream(HttpStream, IncrementalMixin): + def __init__(self, deployment_url: str, access_key: str, table_name: str, json_schema: Mapping[str, Any]): + self.deployment_url = deployment_url + self.table_name = table_name + if json_schema: + json_schema["properties"]["_ab_cdc_lsn"] = {"type": "number"} + json_schema["properties"]["_ab_cdc_updated_at"] = {"type": "string"} + json_schema["properties"]["_ab_cdc_deleted_at"] = {"anyOf": [{"type": "string"}, {"type": "null"}]} + else: + json_schema = {} + self.json_schema = json_schema + self._snapshot_cursor_value: Optional[str] = None + self._snapshot_has_more = True + self._delta_cursor_value: Optional[int] = None + self._delta_has_more = True + super().__init__(TokenAuthenticator(access_key, "Convex")) + + @property + def name(self) -> str: + return self.table_name + + @property + def url_base(self) -> str: + return self.deployment_url + + def get_json_schema(self) -> Mapping[str, Any]: + return self.json_schema + + primary_key = "_id" + cursor_field = "_ts" + + # Checkpoint stream reads after this many records. This prevents re-reading of data if the stream fails for any reason. + state_checkpoint_interval = 128 + + @property + def state(self) -> ConvexState: + return { + "snapshot_cursor": self._snapshot_cursor_value, + "snapshot_has_more": self._snapshot_has_more, + "delta_cursor": self._delta_cursor_value, + } + + @state.setter + def state(self, value: ConvexState) -> None: + self._snapshot_cursor_value = value["snapshot_cursor"] + self._snapshot_has_more = value["snapshot_has_more"] + self._delta_cursor_value = value["delta_cursor"] + + def next_page_token(self, response: requests.Response) -> Optional[ConvexState]: + # Inner level of pagination shares the same state as outer, + # and returns None to indicate that we're done. + return self.state if self._delta_has_more else None + + def path( + self, + stream_state: Optional[ConvexState] = None, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[ConvexState] = None, + ) -> str: + # https://docs.convex.dev/http-api/#sync + if self._snapshot_has_more: + return "/api/list_snapshot" + else: + return "/api/document_deltas" + + def parse_response( + self, + response: requests.Response, + stream_state: ConvexState, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[ConvexState] = None, + ) -> Iterable[Any]: + resp_json = response.json() + if self._snapshot_has_more: + self._snapshot_cursor_value = resp_json["cursor"] + self._snapshot_has_more = resp_json["hasMore"] + self._delta_cursor_value = resp_json["snapshot"] + else: + self._delta_cursor_value = resp_json["cursor"] + self._delta_has_more = resp_json["hasMore"] + return list(resp_json["values"]) + + def request_params( + self, + stream_state: ConvexState, + stream_slice: Optional[Mapping[str, Any]] = None, + next_page_token: Optional[ConvexState] = None, + ) -> MutableMapping[str, Any]: + params: Dict[str, Any] = {"tableName": self.table_name, "format": "convex_json"} + if self._snapshot_has_more: + if self._snapshot_cursor_value: + params["cursor"] = self._snapshot_cursor_value + if self._delta_cursor_value: + params["snapshot"] = self._delta_cursor_value + else: + if self._delta_cursor_value: + params["cursor"] = self._delta_cursor_value + return params + + def get_updated_state(self, current_stream_state: ConvexState, latest_record: Mapping[str, Any]) -> ConvexState: + """ + This (deprecated) method is still used by AbstractSource to update state between calls to `read_records`. + """ + return self.state + + def read_records(self, *args: Any, **kwargs: Any) -> Iterator[Any]: + for record in super().read_records(*args, **kwargs): + ts_ns = record["_ts"] + ts_seconds = ts_ns / 1e9 # convert from nanoseconds. + # equivalent of java's `new Timestamp(transactionMillis).toInstant().toString()` + ts_datetime = datetime.utcfromtimestamp(ts_seconds) + ts = ts_datetime.isoformat() + # DebeziumEventUtils.CDC_LSN + record["_ab_cdc_lsn"] = ts_ns + # DebeziumEventUtils.CDC_DELETED_AT + record["_ab_cdc_updated_at"] = ts + record["_deleted"] = "_deleted" in record and record["_deleted"] + # DebeziumEventUtils.CDC_DELETED_AT + record["_ab_cdc_deleted_at"] = ts if record["_deleted"] else None + yield record diff --git a/airbyte-integrations/connectors/source-convex/source_convex/spec.yaml b/airbyte-integrations/connectors/source-convex/source_convex/spec.yaml new file mode 100644 index 0000000000000..27ba86a6c876a --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/source_convex/spec.yaml @@ -0,0 +1,20 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/convex +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Convex Source Spec + type: object + required: + - deployment_url + - access_key + properties: + deployment_url: + type: string + title: Deployment Url + examples: + - https://murky-swan-635.convex.cloud + - https://cluttered-owl-337.convex.cloud + access_key: + type: string + title: Access Key + description: API access key used to retrieve data from Convex. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-convex/unit_tests/__init__.py b/airbyte-integrations/connectors/source-convex/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-convex/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-convex/unit_tests/test_incremental_streams.py new file mode 100644 index 0000000000000..7a89df6e71848 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/unit_tests/test_incremental_streams.py @@ -0,0 +1,83 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from unittest.mock import MagicMock + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_convex.source import ConvexStream + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(ConvexStream, "path", "v0/example_endpoint") + mocker.patch.object(ConvexStream, "primary_key", "test_primary_key") + mocker.patch.object(ConvexStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + expected_cursor_field = "_ts" + assert stream.cursor_field == expected_cursor_field + + +def test_get_updated_state(patch_incremental_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + resp = MagicMock() + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 123}], "cursor": 1234, "snapshot": 3000, "hasMore": True} + stream.parse_response(resp, {}) + assert stream.get_updated_state(None, None) == { + "snapshot_cursor": 1234, + "snapshot_has_more": True, + "delta_cursor": 3000, + } + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 1235, "snapshot": 3000, "hasMore": False} + stream.parse_response(resp, {}) + assert stream.get_updated_state(None, None) == { + "snapshot_cursor": 1235, + "snapshot_has_more": False, + "delta_cursor": 3000, + } + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 8000, "hasMore": True} + stream.parse_response(resp, {}) + assert stream.get_updated_state(None, None) == { + "snapshot_cursor": 1235, + "snapshot_has_more": False, + "delta_cursor": 8000, + } + assert stream._delta_has_more is True + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 9000, "hasMore": False} + stream.parse_response(resp, {}) + assert stream.get_updated_state(None, None) == { + "snapshot_cursor": 1235, + "snapshot_has_more": False, + "delta_cursor": 9000, + } + assert stream._delta_has_more is False + + +def test_stream_slices(patch_incremental_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker): + mocker.patch.object(ConvexStream, "cursor_field", "dummy_field") + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + expected_checkpoint_interval = 128 + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-convex/unit_tests/test_source.py b/airbyte-integrations/connectors/source-convex/unit_tests/test_source.py new file mode 100644 index 0000000000000..9490d1f31e561 --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/unit_tests/test_source.py @@ -0,0 +1,95 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import responses +from source_convex.source import SourceConvex + + +def setup_responses(): + sample_shapes_resp = { + "posts": { + "type": "object", + "properties": { + "_creationTime": {"type": "number"}, + "_id": {"$description": "Id(posts)", "type": "object", "properties": {"$id": {"type": "string"}}}, + "author": {"$description": "Id(users)", "type": "object", "properties": {"$id": {"type": "string"}}}, + "body": {"type": "string"}, + "_ts": {"type": "integer"}, + "_deleted": {"type": "boolean"}, + }, + "$schema": "http://json-schema.org/draft-07/schema#", + }, + "users": { + "type": "object", + "properties": { + "_creationTime": {"type": "number"}, + "_id": {"$description": "Id(users)", "type": "object", "properties": {"$id": {"type": "string"}}}, + "name": {"type": "string"}, + "tokenIdentifier": {"type": "string"}, + "_ts": {"type": "integer"}, + "_deleted": {"type": "boolean"}, + }, + "$schema": "http://json-schema.org/draft-07/schema#", + }, + } + responses.add( + responses.GET, + "https://murky-swan-635.convex.cloud/api/json_schemas?deltaSchema=true&format=convex_json", + json=sample_shapes_resp, + ) + + +@responses.activate +def test_check_connection(mocker): + setup_responses() + source = SourceConvex() + logger_mock = MagicMock() + assert source.check_connection( + logger_mock, + { + "deployment_url": "https://murky-swan-635.convex.cloud", + "access_key": "test_api_key", + }, + ) == (True, None) + + +@responses.activate +def test_streams(mocker): + setup_responses() + source = SourceConvex() + streams = source.streams( + { + "deployment_url": "https://murky-swan-635.convex.cloud", + "access_key": "test_api_key", + } + ) + assert len(streams) == 2 + streams.sort(key=lambda stream: stream.table_name) + assert streams[0].table_name == "posts" + assert streams[1].table_name == "users" + assert all(stream.deployment_url == "https://murky-swan-635.convex.cloud" for stream in streams) + assert all(stream._session.auth.get_auth_header() == {"Authorization": "Convex test_api_key"} for stream in streams) + shapes = [stream.get_json_schema() for stream in streams] + assert all(shape["type"] == "object" for shape in shapes) + properties = [shape["properties"] for shape in shapes] + assert [ + props["_id"] + == { + "type": "object", + "properties": { + "$id": {"type": "string"}, + }, + } + for props in properties + ] + assert [props["_ts"] == {"type": "number"} for props in properties] + assert [props["_creationTime"] == {"type": "number"} for props in properties] + assert set(properties[0].keys()) == set( + ["_id", "_ts", "_deleted", "_creationTime", "author", "body", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at"] + ) + assert set(properties[1].keys()) == set( + ["_id", "_ts", "_deleted", "_creationTime", "name", "tokenIdentifier", "_ab_cdc_lsn", "_ab_cdc_updated_at", "_ab_cdc_deleted_at"] + ) diff --git a/airbyte-integrations/connectors/source-convex/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-convex/unit_tests/test_streams.py new file mode 100644 index 0000000000000..8ca43a6d1190b --- /dev/null +++ b/airbyte-integrations/connectors/source-convex/unit_tests/test_streams.py @@ -0,0 +1,106 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_convex.source import ConvexStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(ConvexStream, "path", "v0/example_endpoint") + mocker.patch.object(ConvexStream, "primary_key", "test_primary_key") + mocker.patch.object(ConvexStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {"tableName": "messages", "format": "convex_json"} + assert stream.request_params(**inputs) == expected_params + stream._snapshot_cursor_value = 1234 + expected_params = {"tableName": "messages", "format": "convex_json", "cursor": 1234} + assert stream.request_params(**inputs) == expected_params + stream._snapshot_has_more = False + stream._delta_cursor_value = 2345 + expected_params = {"tableName": "messages", "format": "convex_json", "cursor": 2345} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + resp = MagicMock() + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 123}], "cursor": 1234, "snapshot": 5000, "hasMore": True} + stream.parse_response(resp, {}) + assert stream.next_page_token(resp) == { + "snapshot_cursor": 1234, + "snapshot_has_more": True, + "delta_cursor": 5000, + } + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 1235, "snapshot": 5000, "hasMore": False} + stream.parse_response(resp, {}) + assert stream.next_page_token(resp) == { + "snapshot_cursor": 1235, + "snapshot_has_more": False, + "delta_cursor": 5000, + } + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 6000, "hasMore": True} + stream.parse_response(resp, {}) + assert stream.next_page_token(resp) == { + "snapshot_cursor": 1235, + "snapshot_has_more": False, + "delta_cursor": 6000, + } + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1235}], "cursor": 7000, "hasMore": False} + stream.parse_response(resp, {}) + assert stream.next_page_token(resp) is None + assert stream.state == {"snapshot_cursor": 1235, "snapshot_has_more": False, "delta_cursor": 7000} + + +def test_parse_response(patch_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + resp = MagicMock() + resp.json = lambda: {"values": [{"_id": "my_id", "field": "f", "_ts": 1234}], "cursor": 1234, "snapshot": 2000, "hasMore": True} + inputs = {"response": resp, "stream_state": {}} + expected_parsed_objects = [{"_id": "my_id", "field": "f", "_ts": 1234}] + assert stream.parse_response(**inputs) == expected_parsed_objects + assert stream.state == {"snapshot_cursor": 1234, "snapshot_has_more": True, "delta_cursor": 2000} + + +def test_request_headers(patch_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + assert stream.request_headers(**inputs) == {} + + +def test_http_method(patch_base_class): + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = ConvexStream("murky-swan-635", "accesskey", "messages", None) + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-copper/.dockerignore b/airbyte-integrations/connectors/source-copper/.dockerignore new file mode 100644 index 0000000000000..4be14b79b0034 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_copper +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-copper/Dockerfile b/airbyte-integrations/connectors/source-copper/Dockerfile new file mode 100644 index 0000000000000..77aa36b577a0d --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_copper ./source_copper + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-copper diff --git a/airbyte-integrations/connectors/source-copper/README.md b/airbyte-integrations/connectors/source-copper/README.md new file mode 100644 index 0000000000000..3cc52f79034b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/README.md @@ -0,0 +1,132 @@ +# Copper Source + +This is the repository for the Copper source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/copper). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-copper:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/copper) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_copper/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source copper test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-copper:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-copper:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-copper:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-copper:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-copper:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-copper:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-copper:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-copper:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-copper/acceptance-test-config.yml b/airbyte-integrations/connectors/source-copper/acceptance-test-config.yml new file mode 100644 index 0000000000000..821585f27b8a6 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-copper:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_copper/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-copper/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-copper/acceptance-test-docker.sh new file mode 100755 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-copper/build.gradle b/airbyte-integrations/connectors/source-copper/build.gradle new file mode 100644 index 0000000000000..4f761c74cd660 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_copper' +} diff --git a/airbyte-integrations/connectors/source-copper/integration_tests/__init__.py b/airbyte-integrations/connectors/source-copper/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-copper/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-copper/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-copper/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-copper/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-copper/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-copper/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..e5cb099eb6dfe --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/integration_tests/configured_catalog.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "stream": { + "name": "people", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "projects", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "companies", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-copper/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-copper/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..69def70c1df3c --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "invalid api key", + "user_email": "invalid email" +} diff --git a/airbyte-integrations/connectors/source-copper/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-copper/integration_tests/sample_config.json new file mode 100644 index 0000000000000..249df5edfccbc --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "1234ABCD", + "user_email": "xxx@email.com" +} diff --git a/airbyte-integrations/connectors/source-copper/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-copper/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-copper/main.py b/airbyte-integrations/connectors/source-copper/main.py new file mode 100644 index 0000000000000..555e1210c8dd5 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_copper import SourceCopper + +if __name__ == "__main__": + source = SourceCopper() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-copper/requirements.txt b/airbyte-integrations/connectors/source-copper/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-copper/setup.py b/airbyte-integrations/connectors/source-copper/setup.py new file mode 100644 index 0000000000000..aa1c8291ca2aa --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/setup.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "responses~=0.21.0", + "source-acceptance-test", +] + +setup( + name="source_copper", + description="Source implementation for Copper.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-copper/source_copper/__init__.py b/airbyte-integrations/connectors/source-copper/source_copper/__init__.py new file mode 100644 index 0000000000000..1ae3654426552 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/source_copper/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceCopper + +__all__ = ["SourceCopper"] diff --git a/airbyte-integrations/connectors/source-copper/source_copper/schemas/TODO.md b/airbyte-integrations/connectors/source-copper/source_copper/schemas/TODO.md new file mode 100644 index 0000000000000..cf1efadb3c9c9 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/source_copper/schemas/TODO.md @@ -0,0 +1,25 @@ +# TODO: Define your stream schemas +Your connector must describe the schema of each stream it can output using [JSONSchema](https://json-schema.org). + +The simplest way to do this is to describe the schema of your streams using one `.json` file per stream. You can also dynamically generate the schema of your stream in code, or you can combine both approaches: start with a `.json` file and dynamically add properties to it. + +The schema of a stream is the return value of `Stream.get_json_schema`. + +## Static schemas +By default, `Stream.get_json_schema` reads a `.json` file in the `schemas/` directory whose name is equal to the value of the `Stream.name` property. In turn `Stream.name` by default returns the name of the class in snake case. Therefore, if you have a class `class EmployeeBenefits(HttpStream)` the default behavior will look for a file called `schemas/employee_benefits.json`. You can override any of these behaviors as you need. + +Important note: any objects referenced via `$ref` should be placed in the `shared/` directory in their own `.json` files. + +## Dynamic schemas +If you'd rather define your schema in code, override `Stream.get_json_schema` in your stream class to return a `dict` describing the schema using [JSONSchema](https://json-schema.org). + +## Dynamically modifying static schemas +Override `Stream.get_json_schema` to run the default behavior, edit the returned value, then return the edited value: +``` +def get_json_schema(self): + schema = super().get_json_schema() + schema['dynamically_determined_property'] = "property" + return schema +``` + +Delete this file once you're done. Or don't. Up to you :) diff --git a/airbyte-integrations/connectors/source-copper/source_copper/schemas/companies.json b/airbyte-integrations/connectors/source-copper/source_copper/schemas/companies.json new file mode 100644 index 0000000000000..81e9ce0abf497 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/source_copper/schemas/companies.json @@ -0,0 +1,85 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "address": { + "type": "object", + "properties": { + "street": { + "type": "string" + }, + "city": { + "type": "string" + }, + "state": { + "type": "null" + }, + "postal_code": { + "type": "string" + }, + "country": { + "type": "null" + } + } + }, + "assignee_id": { + "type": "integer" + }, + "contact_type_id": { + "type": "integer" + }, + "details": { + "type": "string" + }, + "email_domain": { + "type": "string" + }, + "socials": { + "type": "array", + "items": { + "type": "object", + "properties": { + "url": { + "type": "string" + }, + "category": { + "type": "string" + } + } + } + }, + "tags": { + "type": "array", + "items": {} + }, + "websites": { + "type": "array", + "items": { + "type": ["object", "null"], + "properties": { + "url": { + "type": "string" + }, + "category": { + "type": "string" + } + } + } + }, + "interaction_count": { + "type": "integer" + }, + "date_created": { + "type": "integer" + }, + "date_modified": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-copper/source_copper/schemas/people.json b/airbyte-integrations/connectors/source-copper/source_copper/schemas/people.json new file mode 100644 index 0000000000000..45f6d33a5eeed --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/source_copper/schemas/people.json @@ -0,0 +1,123 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "prefix": { + "type": "null" + }, + "first_name": { + "type": "string" + }, + "middle_name": { + "type": "null" + }, + "last_name": { + "type": "string" + }, + "suffix": { + "type": "null" + }, + "address": { + "type": "object", + "properties": { + "street": { + "type": "string" + }, + "city": { + "type": ["string", "null"] + }, + "state": { + "type": ["string", "null"] + }, + "postal_code": { + "type": ["string", "null"] + }, + "country": { + "type": ["string", "null"] + } + } + }, + "assignee_id": { + "type": "integer" + }, + "company_id": { + "type": "integer" + }, + "company_name": { + "type": "string" + }, + "contact_type_id": { + "type": "integer" + }, + "details": { + "type": "null" + }, + "emails": { + "type": "array", + "items": { + "type": "object", + "properties": { + "category": { + "type": "string" + }, + "email": { + "type": "string" + } + } + } + }, + "phone_numbers": { + "type": "array", + "items": { + "type": "object", + "properties": { + "number": { + "type": "string" + }, + "category": { + "type": "string" + } + } + } + }, + "title": { + "type": "null" + }, + "websites": { + "type": "array", + "items": { + "type": "object", + "properties": { + "url": { + "type": "string" + }, + "category": { + "type": "string" + } + } + } + }, + "date_created": { + "type": "integer" + }, + "date_modified": { + "type": ["integer", "null"] + }, + "date_last_contacted": { + "type": ["integer", "null"] + }, + "interaction_count": { + "type": "integer" + }, + "date_lead_created": { + "type": ["integer", "null"] + } + }, + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-copper/source_copper/schemas/projects.json b/airbyte-integrations/connectors/source-copper/source_copper/schemas/projects.json new file mode 100644 index 0000000000000..0cc15febf620b --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/source_copper/schemas/projects.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "related_resource": { + "type": ["string", "null"] + }, + "assignee_id": { + "type": "integer" + }, + "status": { + "type": "string" + }, + "details": { + "type": "string" + }, + "date_created": { + "type": "integer" + }, + "date_modified": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-copper/source_copper/source.py b/airbyte-integrations/connectors/source-copper/source_copper/source.py new file mode 100644 index 0000000000000..047a772a0f66f --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/source_copper/source.py @@ -0,0 +1,105 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json +from abc import ABC +from typing import Any, Iterable, List, Mapping, Optional, Tuple + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream + + +# Basic full refresh stream +class CopperStream(HttpStream, ABC): + def __init__(self, *args, api_key: str = None, user_email: str = None, **kwargs): + super().__init__(*args, **kwargs) + self._user_email = user_email + self._api_key = api_key + + url_base = "https://api.copper.com/developer_api/v1/" + + @property + def http_method(self) -> str: + return "POST" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + body = json.loads(response.request.body) + result = response.json() + if body and result: + page_number = body.get("page_number") + return {"page_number": page_number + 1, "page_size": 200} + else: + return None + + def request_body_json( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Optional[Mapping]: + + if next_page_token: + return next_page_token + + return {"page_number": 1, "page_size": 200} + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + return { + "X-PW-AccessToken": self._api_key, + "X-PW-UserEmail": self._user_email, + "X-PW-Application": "developer_api", + "Content-type": "application/json", + } + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_result = response.json() + if response_result: + yield from response_result + return + + +class People(CopperStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "people/search" + + +class Projects(CopperStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "projects/search" + + +class Companies(CopperStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "companies/search" + + +# Source +class SourceCopper(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + try: + records = People(**config).read_records(sync_mode=SyncMode.full_refresh) + next(records, None) + return True, None + except Exception as error: + return False, f"Unable to connect to Copper API with the provided credentials - {repr(error)}" + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + return [People(**config), Companies(**config), Projects(**config)] diff --git a/airbyte-integrations/connectors/source-copper/source_copper/spec.yaml b/airbyte-integrations/connectors/source-copper/source_copper/spec.yaml new file mode 100644 index 0000000000000..751c441aad825 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/source_copper/spec.yaml @@ -0,0 +1,18 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/copper +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Copper Spec + type: object + required: + - api_key + - user_email + properties: + api_key: + type: string + title: API Key + description: Copper API key + airbyte_secret: true + user_email: + type: string + title: User email + description: user email used to login in to Copper diff --git a/airbyte-integrations/connectors/source-copper/unit_tests/__init__.py b/airbyte-integrations/connectors/source-copper/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-copper/unit_tests/test_source.py b/airbyte-integrations/connectors/source-copper/unit_tests/test_source.py new file mode 100644 index 0000000000000..ab9674286b56a --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/unit_tests/test_source.py @@ -0,0 +1,26 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import responses +from source_copper.source import SourceCopper + + +@responses.activate +def test_check_connection(mocker): + source = SourceCopper() + logger_mock, config_mock = MagicMock(), MagicMock() + url = "https://api.copper.com/developer_api/v1/people/search" + responses.add(responses.POST, url, json={}) + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker): + source = SourceCopper() + config_mock = MagicMock() + streams = source.streams(config_mock) + # TODO: replace this with your streams number + expected_streams_number = 3 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-copper/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-copper/unit_tests/test_streams.py new file mode 100644 index 0000000000000..b234725f0b9a0 --- /dev/null +++ b/airbyte-integrations/connectors/source-copper/unit_tests/test_streams.py @@ -0,0 +1,70 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_copper.source import CopperStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(CopperStream, "path", "v0/example_endpoint") + mocker.patch.object(CopperStream, "primary_key", "test_primary_key") + mocker.patch.object(CopperStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = CopperStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request parameters + expected_params = {} + assert stream.request_params(**inputs) == expected_params + + +def test_request_headers(patch_base_class): + stream = CopperStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request headers + expected_headers = { + "Content-type": "application/json", + "X-PW-AccessToken": None, + "X-PW-Application": "developer_api", + "X-PW-UserEmail": None, + } + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = CopperStream() + # TODO: replace this with your expected http request method + expected_method = "POST" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = CopperStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = CopperStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-courier/acceptance-test-config.yml b/airbyte-integrations/connectors/source-courier/acceptance-test-config.yml index 0bd32195927f5..c19214c478427 100644 --- a/airbyte-integrations/connectors/source-courier/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-courier/acceptance-test-config.yml @@ -15,6 +15,8 @@ tests: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" timeout_seconds: 3600 + expect_records: + path: "integration_tests/expected_records.txt" full_refresh: - config_path: "secrets/config.json" configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-courier/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-courier/integration_tests/configured_catalog.json index 192e8acc52654..316b0fe1754db 100644 --- a/airbyte-integrations/connectors/source-courier/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-courier/integration_tests/configured_catalog.json @@ -8,33 +8,6 @@ }, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "message_info", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "message_history", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" - }, - { - "stream": { - "name": "message_output", - "json_schema": {}, - "supported_sync_modes": ["full_refresh"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" } ] } diff --git a/airbyte-integrations/connectors/source-courier/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-courier/integration_tests/expected_records.txt new file mode 100644 index 0000000000000..0d88ebb0b1e06 --- /dev/null +++ b/airbyte-integrations/connectors/source-courier/integration_tests/expected_records.txt @@ -0,0 +1,160 @@ +{"stream":"messages","data":{"enqueued":1666121735936,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634f0008-bec7b59a56bca8de101130db","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_IjXLXfhjax5bN0OH6u7a5","sent":1666121737211,"status":"SENT"},"emitted_at":1667947171536} +{"stream":"messages","data":{"enqueued":1666121725434,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634efffd-c0dba647b6f2b35f9f3bcb33","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_olEjEpmTQpF4aF6ZQsxLX","sent":1666121726852,"status":"SENT"},"emitted_at":1667947171536} +{"stream":"messages","data":{"enqueued":1666121724248,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634efffc-e532825f9425164998e14dc4","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_02ZUW5cK9GlX9I168W5XO","sent":1666121725788,"status":"SENT"},"emitted_at":1667947171537} +{"stream":"messages","data":{"enqueued":1666121690699,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effda-77b4ad7bf93c58f0a9700e0c","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_l-qjdf3fR99wdkY-EIi26","sent":1666121692149,"status":"SENT"},"emitted_at":1667947171537} +{"stream":"messages","data":{"enqueued":1666121685251,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd5-cce28d39137f340e58ac1aff","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_rlH5Ftz575KDsgPbAvW1G","sent":1666121688519,"status":"SENT"},"emitted_at":1667947171538} +{"stream":"messages","data":{"enqueued":1666121683846,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd4-1172e40fd51beec78ee3173d","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_st0uBid48Owwr5MUpuYWE","sent":1666121685697,"status":"SENT"},"emitted_at":1667947171538} +{"stream":"messages","data":{"enqueued":1666121682503,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd2-75f7518b8633e5dd765b7484","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_4sLvpFy1g3ngksqOwuaMh","sent":1666121683805,"status":"SENT"},"emitted_at":1667947171538} +{"stream":"messages","data":{"enqueued":1666121680370,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd0-5765da642b7eadb4f534953b","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcos@airbyte.io","recipientId":"anon_qskL2Vf7-m6pVKM7F1Xe8","sent":1666121682269,"status":"SENT"},"emitted_at":1667947171539} +{"stream":"messages","data":{"enqueued":1666118182536,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef226-a611b43243f7cdbe2a5115b3","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1666119196057,"recipient":"marcosmarxm@gmail.com","recipientId":"anon_b4KEGwKFKwKCPRf7DHoux","sent":1666118184073,"status":"OPENED"},"emitted_at":1667947171539} +{"stream":"messages","data":{"enqueued":1666118180791,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef225-8b222f3539887a52ac69cc26","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1666119193320,"recipient":"marcosmarxm@gmail.com","recipientId":"anon_eNQjQGZSTf0LU0Z_uCMSM","sent":1666118182302,"status":"OPENED"},"emitted_at":1667947171540} +{"stream":"messages","data":{"enqueued":1666118150572,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef206-8620ab19a54eeab221126dad","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","recipient":"marcosmarxm@gmail.com","recipientId":"anon_OqdvsDBUNBR-SKWiBqzrO","sent":1666118151976,"status":"SENT"},"emitted_at":1667947173106} +{"stream":"messages","data":{"enqueued":1666117975292,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef157-ebfbd09d59d116c4ce45c89e","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1667501919456,"recipient":"integration-test@airbyte.io","recipientId":"anon_fzpKb-gaY14Sg7D72yXFX","sent":1666117976893,"status":"OPENED"},"emitted_at":1667947173107} +{"stream":"messages","data":{"enqueued":1666117146240,"id":"1-634eee1a-9130394a2b4ada99aa21567b","opened":1667501920954,"recipient":"integration-test@airbyte.io","recipientId":"anon_39GMHrCYlLlW6vOucqnRl","sent":1666117147345,"status":"OPENED"},"emitted_at":1667947173108} +{"stream":"message_info","data":{"enqueued":1666121735936,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634f0008-bec7b59a56bca8de101130db","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec98228ea52fa","threadId":"183ec98228ea52fa","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:35:37 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121737211,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_IjXLXfhjax5bN0OH6u7a5","sent":1666121737211,"status":"SENT"},"emitted_at":1667947367464} +{"stream":"message_info","data":{"enqueued":1666121725434,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634efffd-c0dba647b6f2b35f9f3bcb33","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec97f813b7d89","threadId":"183ec97f813b7d89","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:35:26 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121726852,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_olEjEpmTQpF4aF6ZQsxLX","sent":1666121726852,"status":"SENT"},"emitted_at":1667947367837} +{"stream":"message_info","data":{"enqueued":1666121724248,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634efffc-e532825f9425164998e14dc4","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec97f7fb4fad1","threadId":"183ec97f7fb4fad1","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:35:25 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121725788,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_02ZUW5cK9GlX9I168W5XO","sent":1666121725788,"status":"SENT"},"emitted_at":1667947368375} +{"stream":"message_info","data":{"enqueued":1666121690699,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effda-77b4ad7bf93c58f0a9700e0c","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec9772444bd1c","threadId":"183ec9772444bd1c","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:52 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121692149,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_l-qjdf3fR99wdkY-EIi26","sent":1666121692149,"status":"SENT"},"emitted_at":1667947368778} +{"stream":"message_info","data":{"enqueued":1666121685251,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd5-cce28d39137f340e58ac1aff","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec976476c31cd","threadId":"183ec976476c31cd","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:48 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121688519,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_rlH5Ftz575KDsgPbAvW1G","sent":1666121688519,"status":"SENT"},"emitted_at":1667947369153} +{"stream":"message_info","data":{"enqueued":1666121683846,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd4-1172e40fd51beec78ee3173d","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec975a05099d8","threadId":"183ec975a05099d8","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:45 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121685697,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_st0uBid48Owwr5MUpuYWE","sent":1666121685697,"status":"SENT"},"emitted_at":1667947369610} +{"stream":"message_info","data":{"enqueued":1666121682503,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd2-75f7518b8633e5dd765b7484","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec975386d6917","threadId":"183ec975386d6917","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:43 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121683805,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_4sLvpFy1g3ngksqOwuaMh","sent":1666121683805,"status":"SENT"},"emitted_at":1667947370017} +{"stream":"message_info","data":{"enqueued":1666121680370,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634effd0-5765da642b7eadb4f534953b","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec974d3e4ab1a","threadId":"183ec974d3e4ab1a","labelIds":["UNREAD","SENT","INBOX"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 19:34:42 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666121682269,"status":"SENT"}],"recipient":"marcos@airbyte.io","recipientId":"anon_qskL2Vf7-m6pVKM7F1Xe8","sent":1666121682269,"status":"SENT"},"emitted_at":1667947370431} +{"stream":"message_info","data":{"enqueued":1666118182536,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef226-a611b43243f7cdbe2a5115b3","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1666119196057,"providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"opened":1666119196057,"provider":"gmail","sent":1666118184073,"status":"OPENED"}],"recipient":"marcosmarxm@gmail.com","recipientId":"anon_b4KEGwKFKwKCPRf7DHoux","sent":1666118184073,"status":"OPENED"},"emitted_at":1667947370798} +{"stream":"message_info","data":{"enqueued":1666118180791,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef225-8b222f3539887a52ac69cc26","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1666119193320,"providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"opened":1666119193320,"provider":"gmail","sent":1666118182302,"status":"OPENED"}],"recipient":"marcosmarxm@gmail.com","recipientId":"anon_eNQjQGZSTf0LU0Z_uCMSM","sent":1666118182302,"status":"OPENED"},"emitted_at":1667947371019} +{"stream":"message_info","data":{"enqueued":1666118150572,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef206-8620ab19a54eeab221126dad","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"provider":"gmail","providerResponse":{"status":200,"statusText":"OK","data":{"id":"183ec616dd6676bd","threadId":"183ec616dd6676bd","labelIds":["SENT"]},"headers":{"content-type":"application/json; charset=UTF-8","vary":"X-Origin, Referer, Origin,Accept-Encoding","date":"Tue, 18 Oct 2022 18:35:51 GMT","server":"ESF","cache-control":"private","x-xss-protection":"0","x-frame-options":"SAMEORIGIN","x-content-type-options":"nosniff","alt-svc":"h3=\":443\"; ma=2592000,h3-29=\":443\"; ma=2592000,h3-Q050=\":443\"; ma=2592000,h3-Q046=\":443\"; ma=2592000,h3-Q043=\":443\"; ma=2592000,quic=\":443\"; ma=2592000; v=\"46,43\"","accept-ranges":"none","connection":"close","transfer-encoding":"chunked"}},"sent":1666118151976,"status":"SENT"}],"recipient":"marcosmarxm@gmail.com","recipientId":"anon_OqdvsDBUNBR-SKWiBqzrO","sent":1666118151976,"status":"SENT"},"emitted_at":1667947372536} +{"stream":"message_info","data":{"enqueued":1666117975292,"event":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","id":"1-634ef157-ebfbd09d59d116c4ce45c89e","notification":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","opened":1667501919456,"providers":[{"channel":{"name":"","template":"99cd000d-1492-45ce-aa58-1a735478ec65"},"opened":1667501919456,"provider":"gmail","sent":1666117976893,"status":"OPENED"}],"recipient":"integration-test@airbyte.io","recipientId":"anon_fzpKb-gaY14Sg7D72yXFX","sent":1666117976893,"status":"OPENED"},"emitted_at":1667947373077} +{"stream":"message_info","data":{"enqueued":1666117146240,"id":"1-634eee1a-9130394a2b4ada99aa21567b","opened":1667501920954,"providers":[{"channel":{"key":"gmail"},"opened":1667501920954,"provider":"gmail","sent":1666117147345,"status":"OPENED"}],"recipient":"integration-test@airbyte.io","recipientId":"anon_39GMHrCYlLlW6vOucqnRl","sent":1666117147345,"status":"OPENED"},"emitted_at":1667947373418} +{"stream":"message_history","data":{"ts":1666121735945,"type":"ENQUEUED","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495264} +{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121736184,"type":"PROFILE_LOADED","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495266} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121736300,"type":"MAPPED","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495268} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634f0008-bec7b59a56bca8de101130db/output/83a0c138-2328-4d3d-93f9-7467a7e4395c/text","subject":"/messages/1-634f0008-bec7b59a56bca8de101130db/output/83a0c138-2328-4d3d-93f9-7467a7e4395c/subject","html":"/messages/1-634f0008-bec7b59a56bca8de101130db/output/83a0c138-2328-4d3d-93f9-7467a7e4395c/html"},"ts":1666121736733,"type":"RENDERED","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495269} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121737211,"type":"SENT","message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947495270} +{"stream":"message_history","data":{"ts":1666121725442,"type":"ENQUEUED","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495870} +{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121725618,"type":"PROFILE_LOADED","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495871} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121725667,"type":"MAPPED","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495872} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634efffd-c0dba647b6f2b35f9f3bcb33/output/d8d7bb5c-f74f-4dc6-b40e-4e560b8d0e40/text","subject":"/messages/1-634efffd-c0dba647b6f2b35f9f3bcb33/output/d8d7bb5c-f74f-4dc6-b40e-4e560b8d0e40/subject","html":"/messages/1-634efffd-c0dba647b6f2b35f9f3bcb33/output/d8d7bb5c-f74f-4dc6-b40e-4e560b8d0e40/html"},"ts":1666121726110,"type":"RENDERED","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495873} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121726852,"type":"SENT","message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947495875} +{"stream":"message_history","data":{"ts":1666121724260,"type":"ENQUEUED","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496108} +{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121724484,"type":"PROFILE_LOADED","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496108} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121724555,"type":"MAPPED","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496109} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634efffc-e532825f9425164998e14dc4/output/b869a222-8301-45e2-a21b-e34db3952918/text","subject":"/messages/1-634efffc-e532825f9425164998e14dc4/output/b869a222-8301-45e2-a21b-e34db3952918/subject","html":"/messages/1-634efffc-e532825f9425164998e14dc4/output/b869a222-8301-45e2-a21b-e34db3952918/html"},"ts":1666121725015,"type":"RENDERED","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496110} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121725788,"type":"SENT","message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947496110} +{"stream":"message_history","data":{"ts":1666121690745,"type":"ENQUEUED","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496380} +{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121690962,"type":"PROFILE_LOADED","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496381} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121691007,"type":"MAPPED","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496382} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effda-77b4ad7bf93c58f0a9700e0c/output/111c6056-156a-4cdc-bd66-372e9e521750/text","subject":"/messages/1-634effda-77b4ad7bf93c58f0a9700e0c/output/111c6056-156a-4cdc-bd66-372e9e521750/subject","html":"/messages/1-634effda-77b4ad7bf93c58f0a9700e0c/output/111c6056-156a-4cdc-bd66-372e9e521750/html"},"ts":1666121691654,"type":"RENDERED","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496384} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121692149,"type":"SENT","message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947496385} +{"stream":"message_history","data":{"ts":1666121685259,"type":"ENQUEUED","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496687} +{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121685491,"type":"PROFILE_LOADED","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496687} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121685537,"type":"MAPPED","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496688} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effd5-cce28d39137f340e58ac1aff/output/3fa6953a-5ef2-4b45-bd10-09c685768a27/text","subject":"/messages/1-634effd5-cce28d39137f340e58ac1aff/output/3fa6953a-5ef2-4b45-bd10-09c685768a27/subject","html":"/messages/1-634effd5-cce28d39137f340e58ac1aff/output/3fa6953a-5ef2-4b45-bd10-09c685768a27/html"},"ts":1666121686024,"type":"RENDERED","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496688} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121688519,"type":"SENT","message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947496689} +{"stream":"message_history","data":{"ts":1666121683855,"type":"ENQUEUED","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496937} +{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121684334,"type":"PROFILE_LOADED","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496940} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121684385,"type":"MAPPED","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496941} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effd4-1172e40fd51beec78ee3173d/output/5251854d-361d-44aa-944a-bd9898edb7bb/text","subject":"/messages/1-634effd4-1172e40fd51beec78ee3173d/output/5251854d-361d-44aa-944a-bd9898edb7bb/subject","html":"/messages/1-634effd4-1172e40fd51beec78ee3173d/output/5251854d-361d-44aa-944a-bd9898edb7bb/html"},"ts":1666121685065,"type":"RENDERED","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496941} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121685697,"type":"SENT","message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947496943} +{"stream":"message_history","data":{"ts":1666121682514,"type":"ENQUEUED","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497173} +{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121682751,"type":"PROFILE_LOADED","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497175} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121682875,"type":"MAPPED","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497176} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effd2-75f7518b8633e5dd765b7484/output/12b2f13c-6280-4fa7-9557-f6b838d93757/text","subject":"/messages/1-634effd2-75f7518b8633e5dd765b7484/output/12b2f13c-6280-4fa7-9557-f6b838d93757/subject","html":"/messages/1-634effd2-75f7518b8633e5dd765b7484/output/12b2f13c-6280-4fa7-9557-f6b838d93757/html"},"ts":1666121683336,"type":"RENDERED","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497178} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121683805,"type":"SENT","message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947497179} +{"stream":"message_history","data":{"ts":1666121680380,"type":"ENQUEUED","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497397} +{"stream":"message_history","data":{"merged_profile":{"email":"marcos@airbyte.io"},"received_profile":{"email":"marcos@airbyte.io"},"ts":1666121680814,"type":"PROFILE_LOADED","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497399} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666121680916,"type":"MAPPED","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497401} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634effd0-5765da642b7eadb4f534953b/output/f315b4ad-e291-4958-beed-3a671cf6f247/text","subject":"/messages/1-634effd0-5765da642b7eadb4f534953b/output/f315b4ad-e291-4958-beed-3a671cf6f247/subject","html":"/messages/1-634effd0-5765da642b7eadb4f534953b/output/f315b4ad-e291-4958-beed-3a671cf6f247/html"},"ts":1666121681280,"type":"RENDERED","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497402} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121682269,"type":"SENT","message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947497403} +{"stream":"message_history","data":{"ts":1666118182542,"type":"ENQUEUED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498020} +{"stream":"message_history","data":{"merged_profile":{"email":"marcosmarxm@gmail.com"},"received_profile":{"email":"marcosmarxm@gmail.com"},"ts":1666118182749,"type":"PROFILE_LOADED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498022} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666118182789,"type":"MAPPED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498023} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634ef226-a611b43243f7cdbe2a5115b3/output/a925e133-dd34-4f77-96f3-789098a76b9a/text","subject":"/messages/1-634ef226-a611b43243f7cdbe2a5115b3/output/a925e133-dd34-4f77-96f3-789098a76b9a/subject","html":"/messages/1-634ef226-a611b43243f7cdbe2a5115b3/output/a925e133-dd34-4f77-96f3-789098a76b9a/html"},"ts":1666118183370,"type":"RENDERED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498024} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118184073,"type":"SENT","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498026} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119196057,"type":"OPENED","message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947498027} +{"stream":"message_history","data":{"ts":1666118180798,"type":"ENQUEUED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498637} +{"stream":"message_history","data":{"merged_profile":{"email":"marcosmarxm@gmail.com"},"received_profile":{"email":"marcosmarxm@gmail.com"},"ts":1666118181051,"type":"PROFILE_LOADED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498638} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666118181204,"type":"MAPPED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498638} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634ef225-8b222f3539887a52ac69cc26/output/f0593540-235f-447b-8526-b1ec321fb0a1/text","subject":"/messages/1-634ef225-8b222f3539887a52ac69cc26/output/f0593540-235f-447b-8526-b1ec321fb0a1/subject","html":"/messages/1-634ef225-8b222f3539887a52ac69cc26/output/f0593540-235f-447b-8526-b1ec321fb0a1/html"},"ts":1666118181718,"type":"RENDERED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498639} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118182302,"type":"SENT","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498639} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119193320,"type":"OPENED","message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947498640} +{"stream":"message_history","data":{"ts":1666118150579,"type":"ENQUEUED","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500596} +{"stream":"message_history","data":{"merged_profile":{"email":"marcosmarxm@gmail.com"},"received_profile":{"email":"marcosmarxm@gmail.com"},"ts":1666118150805,"type":"PROFILE_LOADED","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500597} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666118150939,"type":"MAPPED","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500597} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634ef206-8620ab19a54eeab221126dad/output/6cd79f99-a0d4-429e-b66a-6c99047e0777/text","subject":"/messages/1-634ef206-8620ab19a54eeab221126dad/output/6cd79f99-a0d4-429e-b66a-6c99047e0777/subject","html":"/messages/1-634ef206-8620ab19a54eeab221126dad/output/6cd79f99-a0d4-429e-b66a-6c99047e0777/html"},"ts":1666118151367,"type":"RENDERED","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500598} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118151976,"type":"SENT","message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947500598} +{"stream":"message_history","data":{"ts":1666117975299,"type":"ENQUEUED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502116} +{"stream":"message_history","data":{"merged_profile":{"email":"integration-test@airbyte.io"},"received_profile":{"email":"integration-test@airbyte.io"},"ts":1666117975590,"type":"PROFILE_LOADED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502117} +{"stream":"message_history","data":{"event_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","notification_id":"ABTQN5NW3F4VA3H5EYTXHES0QXD8","ts":1666117975701,"type":"MAPPED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502118} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634ef157-ebfbd09d59d116c4ce45c89e/output/bcbffbd9-7daa-473c-ac11-7b49be7035e3/text","subject":"/messages/1-634ef157-ebfbd09d59d116c4ce45c89e/output/bcbffbd9-7daa-473c-ac11-7b49be7035e3/subject","html":"/messages/1-634ef157-ebfbd09d59d116c4ce45c89e/output/bcbffbd9-7daa-473c-ac11-7b49be7035e3/html"},"ts":1666117976071,"type":"RENDERED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502119} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117976893,"type":"SENT","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502119} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117986671,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502120} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117988580,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502121} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118081296,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502122} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118563186,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502122} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118758257,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502123} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118935356,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502124} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119128407,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502124} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119448182,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502125} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119725709,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502125} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666121775742,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502126} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128609009,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502127} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128611530,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502127} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128649996,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502128} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666134970418,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502128} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666134974510,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502129} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666148305097,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502130} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666160528537,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502130} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666161400073,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502131} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163704656,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502131} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163788243,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502132} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163931562,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502133} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666171803626,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502133} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666174474590,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502134} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666249893593,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502134} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666355531786,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502135} +{"stream":"message_history","data":{"channel":{"id":"99cd000d-1492-45ce-aa58-1a735478ec65"},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1667501919456,"type":"OPENED","message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947502136} +{"stream":"message_history","data":{"ts":1666117146246,"type":"ENQUEUED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503332} +{"stream":"message_history","data":{"merged_profile":{"email":"integration-test@airbyte.io"},"received_profile":{"email":"integration-test@airbyte.io"},"ts":1666117146354,"type":"PROFILE_LOADED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503333} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"output":{"text":"/messages/1-634eee1a-9130394a2b4ada99aa21567b/output/d8895731-341c-4c0f-ab5b-8959170de7d3/text","subject":"/messages/1-634eee1a-9130394a2b4ada99aa21567b/output/d8895731-341c-4c0f-ab5b-8959170de7d3/subject","html":"/messages/1-634eee1a-9130394a2b4ada99aa21567b/output/d8895731-341c-4c0f-ab5b-8959170de7d3/html"},"ts":1666117146764,"type":"RENDERED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503333} +{"stream":"message_history","data":{"channel":{"id":"","label":""},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117147345,"type":"SENT","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503334} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117158183,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503335} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117159506,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503335} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117161996,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503336} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117180676,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503336} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117181352,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503338} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117185871,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503338} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666117440248,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503339} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118058308,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503340} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118098458,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503340} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118565173,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503341} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666118934619,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503341} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119130253,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503342} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119252052,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503342} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666119725701,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503343} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128615007,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503343} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666128650024,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503344} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666134970833,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503344} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666134972436,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503345} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666148314052,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503345} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666160528529,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503345} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666161412122,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503346} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163817661,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503346} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666163949680,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503347} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666171803621,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503347} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666174499363,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503348} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666249874814,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503348} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666355531518,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503349} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1666369358702,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503349} +{"stream":"message_history","data":{"channel":{},"integration":{"id":"bae9e7c4-2ac9-44d5-80b4-66458d00ae4e","provider":"gmail"},"ts":1667501920954,"type":"OPENED","message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947503350} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
\n \n \n \n \n \n
\n \n \n \n \n \n \n \n \n \n
\n \n \n \n \n \n \n \n
\n \n \n
\n \n \n \n \n \n \n \n
\n \n \n \n
\n \n
\n \n
\n \n \n
\n \n
\n \n \n \n \n \n
\n \n \n \n \n \n \n \n
\n \n \n
\n \n \n \n \n \n \n \n
\n \n \n \n \n \n \n \n
\n \n
Hey Airbyters,
\n \n
\n \n
\n \n
\n \n \n
\n \n
\n \n \n \n \n \n
\n \n \n \n \n \n \n \n
\n \n \n
\n \n \n \n \n \n \n \n
\n \n \n \n \n \n \n \n
\n \n
This is an example of a email sent through Courier. Updated Message.
\n \n
\n \n
\n \n
\n \n \n
\n \n
\n \n \n \n \n \n
\n \n \n \n \n \n \n \n
\n \n \n
\n \n \n \n \n \n \n \n
\n \n \n \n \n \n \n \n
\n \n
Cheers,
The Courier Team
\n \n
\n \n
\n \n
\n \n \n
\n \n
\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
\n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634f0008-bec7b59a56bca8de101130db"},"emitted_at":1667947633162} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n

\n \n \n \n "},"message_id":"1-634efffd-c0dba647b6f2b35f9f3bcb33"},"emitted_at":1667947633430} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634efffc-e532825f9425164998e14dc4"},"emitted_at":1667947633814} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effda-77b4ad7bf93c58f0a9700e0c"},"emitted_at":1667947634210} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effd5-cce28d39137f340e58ac1aff"},"emitted_at":1667947634616} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effd4-1172e40fd51beec78ee3173d"},"emitted_at":1667947634800} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effd2-75f7518b8633e5dd765b7484"},"emitted_at":1667947634989} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634effd0-5765da642b7eadb4f534953b"},"emitted_at":1667947635543} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634ef226-a611b43243f7cdbe2a5115b3"},"emitted_at":1667947635742} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634ef225-8b222f3539887a52ac69cc26"},"emitted_at":1667947635927} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier. Updated Message.\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634ef206-8620ab19a54eeab221126dad"},"emitted_at":1667947637465} +{"stream":"message_output","data":{"channel":"email","channel_id":"99cd000d-1492-45ce-aa58-1a735478ec65","content":{"text":"Hey Airbyters,\n\nThis is an example of a email sent through Courier\n\nCheers,\nThe Courier Team","subject":"Hi, Airbyters","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634ef157-ebfbd09d59d116c4ce45c89e"},"emitted_at":1667947638315} +{"stream":"message_output","data":{"channel":"email","content":{"text":"Want to hear a joke? How did the T-Rex feel after a set of bicep curls? Dino-sore!\n","subject":"Welcome to Courier!","html":"\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n "},"message_id":"1-634eee1a-9130394a2b4ada99aa21567b"},"emitted_at":1667947639031} diff --git a/airbyte-integrations/connectors/source-datascope/.dockerignore b/airbyte-integrations/connectors/source-datascope/.dockerignore new file mode 100644 index 0000000000000..a4655d572bd7b --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_datascope +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-datascope/BOOTSTRAP.md b/airbyte-integrations/connectors/source-datascope/BOOTSTRAP.md new file mode 100644 index 0000000000000..68a89e9425a73 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/BOOTSTRAP.md @@ -0,0 +1,10 @@ +# DataScope +DataScope is a mobile solution that helps you collect data offline, manage field teams, and share business insights. Use the intuitive Form Builder to create your forms, and then analyze the data you've collected via powerful and personalized dashboards. + +The streams implemented allows you to pull data from the following DataScope objects: +- Locations +- Answers +- Lists +- Notifications + +For more information about the DataScope API, see the [DataScope API documentation](https://dscope.github.io/docs/). \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-datascope/Dockerfile b/airbyte-integrations/connectors/source-datascope/Dockerfile new file mode 100644 index 0000000000000..49a9f0b7178f7 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_datascope ./source_datascope + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-datascope diff --git a/airbyte-integrations/connectors/source-datascope/README.md b/airbyte-integrations/connectors/source-datascope/README.md new file mode 100644 index 0000000000000..425473f9f6452 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/README.md @@ -0,0 +1,79 @@ +# Datascope Source + +This is the repository for the Datascope configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/datascope). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-datascope:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/datascope) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_datascope/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source datascope test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-datascope:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-datascope:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-datascope:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-datascope:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-datascope:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-datascope:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-datascope:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-datascope:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-datascope/__init__.py b/airbyte-integrations/connectors/source-datascope/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-datascope/acceptance-test-config.yml b/airbyte-integrations/connectors/source-datascope/acceptance-test-config.yml new file mode 100644 index 0000000000000..5879b9065fcbb --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/acceptance-test-config.yml @@ -0,0 +1,24 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-datascope:dev +tests: + spec: + - spec_path: "source_datascope/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["notifications", "lists"] + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-datascope/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-datascope/acceptance-test-docker.sh new file mode 100755 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-datascope/build.gradle b/airbyte-integrations/connectors/source-datascope/build.gradle new file mode 100644 index 0000000000000..effe7c6fe23e6 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_datascope' +} diff --git a/airbyte-integrations/connectors/source-datascope/integration_tests/__init__.py b/airbyte-integrations/connectors/source-datascope/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-datascope/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-datascope/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..62350b41e2cd8 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "answers": { + "created_at": "01/01/9999 00:00" + } +} diff --git a/airbyte-integrations/connectors/source-datascope/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-datascope/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-datascope/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-datascope/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..14c47c910c6c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/integration_tests/configured_catalog.json @@ -0,0 +1,42 @@ +{ + "streams": [ + { + "stream": { + "name": "locations", + "json_schema": {}, + "source_defined_cursor": true, + "source_defined_primary_key": [["id"]], + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "answers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "lists", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "notifications", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-datascope/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-datascope/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..4f40c8d766e65 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "abctestconfig", + "start_date": "2019-01-01T00:00:00Z" +} diff --git a/airbyte-integrations/connectors/source-datascope/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-datascope/integration_tests/sample_config.json new file mode 100644 index 0000000000000..5b5c2eeef90a2 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "test_key", + "start_date": "2022-10-30 00:00" +} diff --git a/airbyte-integrations/connectors/source-datascope/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-datascope/integration_tests/sample_state.json new file mode 100644 index 0000000000000..a2a2b17dacadf --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "answers": { + "created_at": "01/01/2000 00:00" + } +} diff --git a/airbyte-integrations/connectors/source-datascope/main.py b/airbyte-integrations/connectors/source-datascope/main.py new file mode 100644 index 0000000000000..8d7ff4df304fa --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_datascope import SourceDatascope + +if __name__ == "__main__": + source = SourceDatascope() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-datascope/requirements.txt b/airbyte-integrations/connectors/source-datascope/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-datascope/setup.py b/airbyte-integrations/connectors/source-datascope/setup.py new file mode 100644 index 0000000000000..bf5f11481f354 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_datascope", + description="Source implementation for Datascope.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-datascope/source_datascope/__init__.py b/airbyte-integrations/connectors/source-datascope/source_datascope/__init__.py new file mode 100644 index 0000000000000..1b6e441cb5d41 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/source_datascope/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceDatascope + +__all__ = ["SourceDatascope"] diff --git a/airbyte-integrations/connectors/source-datascope/source_datascope/datascope.yaml b/airbyte-integrations/connectors/source-datascope/source_datascope/datascope.yaml new file mode 100644 index 0000000000000..2b86a3b984212 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/source_datascope/datascope.yaml @@ -0,0 +1,111 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://www.mydatascope.com/api/external/" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "Authorization" + api_token: "{{ config['api_key'] }}" + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%d/%m/%Y %H:%M" + end_datetime: + datetime: "{{ now_utc().strftime('%d/%m/%Y %H:%M') }}" + datetime_format: "%d/%m/%Y %H:%M" + step: "1d" + datetime_format: "%d/%m/%Y %H:%M" + cursor_field: "{{ options['stream_cursor_field'] }}" + start_time_option: + field_name: "start" + inject_into: "request_parameter" + end_time_option: + field_name: "end" + inject_into: "request_parameter" + + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + request_options_provider: + request_parameters: + start: "{{stream_slice['start_time'] or 'latest'}}" + paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + pagination_strategy: + type: "OffsetIncrement" + page_size: 200 + page_token_option: + inject_into: "request_parameter" + field_name: "offset" + requester: + $ref: "*ref(definitions.requester)" + stream_slicer: + $ref: "*ref(definitions.stream_slicer)" + + retriever_non_incremental: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + pagination_strategy: + type: "OffsetIncrement" + page_size: 200 + page_token_option: + inject_into: "request_parameter" + field_name: "offset" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + location_stream: + retriever: + $ref: "*ref(definitions.retriever_non_incremental)" + $options: + name: "locations" + primary_key: "id" + path: "/locations" + answers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "answers" + primary_key: "form_answer_id" + path: "/v2/answers" + stream_cursor_field: "created_at" + lists_stream: + retriever: + $ref: "*ref(definitions.retriever_non_incremental)" + $options: + name: "lists" + primary_key: "id" + path: "/metadata_objects" + notifications_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "notifications" + primary_key: "id" + path: "/notifications" + stream_cursor_field: "created_at" +streams: + - "*ref(definitions.location_stream)" + - "*ref(definitions.answers_stream)" + - "*ref(definitions.lists_stream)" + - "*ref(definitions.notifications_stream)" + +check: + stream_names: + - "locations" diff --git a/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/answers.json b/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/answers.json new file mode 100644 index 0000000000000..960bd2d789a19 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/answers.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "form_name": { + "type": "string" + }, + "form_state": { + "type": ["string", "null"] + }, + "user_name": { + "type": "string" + }, + "user_identifier": { + "type": "string" + }, + "code": { + "type": "string" + }, + "form_id": { + "type": "integer" + }, + "created_at": { + "type": "string" + }, + "form_answer_id": { + "type": "integer" + }, + "latitude": { + "type": ["number", "null"] + }, + "longitude": { + "type": ["number", "null"] + }, + "[question_name1]": { + "type": "string" + }, + "[question_name2]": { + "type": "string" + }, + "[question_name3]": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/lists.json b/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/lists.json new file mode 100644 index 0000000000000..162819617e2f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/lists.json @@ -0,0 +1,36 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "attribute1": { + "type": "string" + }, + "attribute2": { + "type": "string" + }, + "list_id": { + "type": "integer" + }, + "account_id": { + "type": "integer" + }, + "code": { + "type": "string" + }, + "created_at": { + "type": "string" + }, + "updated_at": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/locations.json b/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/locations.json new file mode 100644 index 0000000000000..6226b5676edc9 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/locations.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "description": { + "type": ["string", "null"] + }, + "code": { + "type": ["string", "null"] + }, + "address": { + "type": ["string", "null"] + }, + "city": { + "type": ["string", "null"] + }, + "country": { + "type": ["string", "null"] + }, + "latitude": { + "type": ["number", "null"] + }, + "longitude": { + "type": ["number", "null"] + }, + "region": { + "type": ["string", "null"] + }, + "phone": { + "type": ["string", "null"] + }, + "company_code": { + "type": ["string", "null"] + }, + "company_name": { + "type": ["string", "null"] + } + } +} diff --git a/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/notifications.json b/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/notifications.json new file mode 100644 index 0000000000000..d5d26f539c31a --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/source_datascope/schemas/notifications.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "type": { + "type": "string" + }, + "url": { + "type": "string" + }, + "form_name": { + "type": "string" + }, + "form_code": { + "type": "string" + }, + "user": { + "type": "string" + }, + "created_at": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-datascope/source_datascope/source.py b/airbyte-integrations/connectors/source-datascope/source_datascope/source.py new file mode 100644 index 0000000000000..44a433d095533 --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/source_datascope/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceDatascope(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "datascope.yaml"}) diff --git a/airbyte-integrations/connectors/source-datascope/source_datascope/spec.yaml b/airbyte-integrations/connectors/source-datascope/source_datascope/spec.yaml new file mode 100644 index 0000000000000..18358d23ce35e --- /dev/null +++ b/airbyte-integrations/connectors/source-datascope/source_datascope/spec.yaml @@ -0,0 +1,22 @@ +documentationUrl: "https://docs.airbyte.com/integrations/sources/datascope" +connectionSpecification: + $schema: "http://json-schema.org/draft-07/schema#" + title: Datascope Spec + type: object + required: + - api_key + - start_date + additionalProperties: true + properties: + start_date: + title: Start Date + type: string + description: Start date for the data to be replicated + examples: + - "dd/mm/YYYY HH:MM" + pattern: "^[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}$" + api_key: + title: Authorization + type: string + description: API Key + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml b/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml index 5106bfe080cae..a0707f64ca70a 100644 --- a/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-db2/acceptance-test-config.yml @@ -3,4 +3,5 @@ connector_image: airbyte/source-db2:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-db2/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-db2/build.gradle b/airbyte-integrations/connectors/source-db2/build.gradle index 34d5d340fdc2e..6798912d60f77 100644 --- a/airbyte-integrations/connectors/source-db2/build.gradle +++ b/airbyte-integrations/connectors/source-db2/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-db2/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json b/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json index 45235a6e0cd68..092944df3c4a0 100644 --- a/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-db2/src/main/resources/spec.json @@ -56,9 +56,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "unencrypted", - "enum": ["unencrypted"], - "default": "unencrypted" + "const": "unencrypted" } } }, @@ -69,9 +67,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "encrypted_verify_certificate", - "enum": ["encrypted_verify_certificate"], - "default": "encrypted_verify_certificate" + "const": "encrypted_verify_certificate" }, "ssl_certificate": { "title": "SSL PEM file", diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..47e237f976048 --- /dev/null +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/dummy_config.json @@ -0,0 +1,11 @@ +{ + "host": "hhh", + "port": 8123, + "db": "ddd", + "username": "uuu", + "password": "ppp", + "encryption": { + "encryption_method": "encrypted_verify_certificate", + "ssl_certificate": "sss" + } +} diff --git a/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..a7a07e5303890 --- /dev/null +++ b/airbyte-integrations/connectors/source-db2/src/test-integration/resources/expected_spec.json @@ -0,0 +1,94 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/db2", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "IBM Db2 Source Spec", + "type": "object", + "required": ["host", "port", "db", "username", "password", "encryption"], + "properties": { + "host": { + "description": "Host of the Db2.", + "type": "string", + "order": 0 + }, + "port": { + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 8123, + "examples": ["8123"], + "order": 1 + }, + "db": { + "description": "Name of the database.", + "type": "string", + "examples": ["default"], + "order": 2 + }, + "username": { + "description": "Username to use to access the database.", + "type": "string", + "order": 3 + }, + "password": { + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string", + "order": 5 + }, + "encryption": { + "title": "Encryption", + "type": "object", + "description": "Encryption method to use when communicating with the database", + "order": 6, + "oneOf": [ + { + "title": "Unencrypted", + "description": "Data transfer will not be encrypted.", + "required": ["encryption_method"], + "properties": { + "encryption_method": { + "type": "string", + "const": "unencrypted" + } + } + }, + { + "title": "TLS Encrypted (verify certificate)", + "description": "Verify and use the cert provided by the server.", + "required": ["encryption_method", "ssl_certificate"], + "properties": { + "encryption_method": { + "type": "string", + "const": "encrypted_verify_certificate" + }, + "ssl_certificate": { + "title": "SSL PEM file", + "description": "Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations", + "type": "string", + "airbyte_secret": true, + "multiline": true + }, + "key_store_password": { + "title": "Key Store Password. This field is optional. If you do not fill in this field, the password will be randomly generated.", + "description": "Key Store Password", + "type": "string", + "airbyte_secret": true + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-dynamodb/Dockerfile b/airbyte-integrations/connectors/source-dynamodb/Dockerfile new file mode 100644 index 0000000000000..d51a6715c52a5 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/Dockerfile @@ -0,0 +1,21 @@ +FROM airbyte/integration-base-java:dev AS build + +WORKDIR /airbyte + +ENV APPLICATION source-dynamodb + +COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar + +RUN tar xf ${APPLICATION}.tar --strip-components=1 && rm -rf ${APPLICATION}.tar + +FROM airbyte/integration-base-java:dev + +WORKDIR /airbyte + +ENV APPLICATION source-dynamodb + +COPY --from=build /airbyte /airbyte + +# Airbyte's build system uses these labels to know what to name and tag the docker images produced by this Dockerfile. +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-dynamodb diff --git a/airbyte-integrations/connectors/source-dynamodb/README.md b/airbyte-integrations/connectors/source-dynamodb/README.md new file mode 100644 index 0000000000000..56b62b4d9a318 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/README.md @@ -0,0 +1,69 @@ +# Source Dynamodb + +This is the repository for the Dynamodb source connector in Java. +For information about how to use this connector within Airbyte, see [the User Documentation](https://docs.airbyte.io/integrations/sources/dynamodb). + +## Local development + +#### Building via Gradle +From the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-dynamodb:build +``` + +#### Create credentials +**If you are a community contributor**, generate the necessary credentials and place them in `secrets/config.json` conforming to the spec file in `src/main/resources/spec.json`. +Note that the `secrets` directory is git-ignored by default, so there is no danger of accidentally checking in sensitive information. + +**If you are an Airbyte core member**, follow the [instructions](https://docs.airbyte.io/connector-development#using-credentials-in-ci) to set up the credentials. + +### Locally running the connector docker image + +#### Build +Build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-dynamodb:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-dynamodb:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-dynamodb:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-dynamodb:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-dynamodb:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +## Testing +We use `JUnit` for Java tests. + +### Unit and Integration Tests +Place unit tests under `src/test/...` +Place integration tests in `src/test-integration/...` + +#### Acceptance Tests +Airbyte has a standard test suite that all source connectors must pass. Implement the `TODO`s in +`src/test-integration/java/io/airbyte/integrations/sources/dynamodbSourceAcceptanceTest.java`. + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-dynamodb:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-dynamodb:integrationTest +``` + +## Dependency Management + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-dynamodb/build.gradle b/airbyte-integrations/connectors/source-dynamodb/build.gradle new file mode 100644 index 0000000000000..c6b41ba2e3bfa --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/build.gradle @@ -0,0 +1,43 @@ +plugins { + id 'application' + id 'airbyte-docker' + id 'airbyte-integration-test-java' +} + +application { + mainClass = 'io.airbyte.integrations.source.dynamodb.DynamodbSource' +} + +def testContainersVersion = '1.17.5' +def assertVersion = '3.23.1' + +dependencies { + implementation project(':airbyte-db:db-lib') + implementation project(':airbyte-integrations:bases:base-java') + implementation project(':airbyte-protocol:protocol-models') + implementation project(':airbyte-integrations:connectors:source-relational-db') + implementation project(':airbyte-config:config-models') + + implementation platform('software.amazon.awssdk:bom:2.18.1') + // https://mvnrepository.com/artifact/software.amazon.awssdk/dynamodb + implementation 'software.amazon.awssdk:dynamodb' + + testImplementation 'org.skyscreamer:jsonassert:1.5.1' + + + // https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-databind + implementation 'com.fasterxml.jackson.core:jackson-databind:2.13.4.2' + // https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core + implementation 'com.fasterxml.jackson.core:jackson-core:2.13.4' + + + testImplementation "org.assertj:assertj-core:${assertVersion}" + testImplementation "org.testcontainers:localstack:${testContainersVersion}" + + + integrationTestJavaImplementation project(':airbyte-integrations:connectors:source-dynamodb') + integrationTestJavaImplementation project(':airbyte-integrations:bases:standard-source-test') + + implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) + integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbAttributeSerializer.java b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbAttributeSerializer.java new file mode 100644 index 0000000000000..a470ec2016ec3 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbAttributeSerializer.java @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.SerializerProvider; +import java.io.IOException; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; + +public class DynamodbAttributeSerializer extends JsonSerializer { + + @Override + public void serialize(AttributeValue value, JsonGenerator gen, SerializerProvider serializers) throws IOException { + switch (value.type()) { + case S -> gen.writeString(value.s()); + case N -> { + try { + Long.parseLong(value.n()); + gen.writeNumber(Long.parseLong(value.n())); + } catch (NumberFormatException e) { + gen.writeNumber(Double.parseDouble(value.n())); + } + } + case B -> gen.writeBinary(value.b().asByteArray()); + case SS -> { + gen.writeStartArray(); + for (var str : value.ss()) { + gen.writeString(str); + } + gen.writeEndArray(); + } + case NS -> { + gen.writeStartArray(); + for (var str : value.ns()) { + gen.writeNumber(str); + } + gen.writeEndArray(); + } + case BS -> { + gen.writeStartArray(); + for (var sb : value.bs()) { + gen.writeBinary(sb.asByteArray()); + } + gen.writeEndArray(); + } + case M -> { + gen.writeStartObject(); + for (var attr : value.m().entrySet()) { + gen.writeFieldName(attr.getKey()); + serialize(attr.getValue(), gen, serializers); + } + gen.writeEndObject(); + } + case L -> { + gen.writeStartArray(); + for (var attr : value.l()) { + serialize(attr, gen, serializers); + } + gen.writeEndArray(); + } + case BOOL -> gen.writeBoolean(value.bool()); + case NUL -> gen.writeNull(); + case UNKNOWN_TO_SDK_VERSION -> { + // ignore unknown fields + } + } + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbConfig.java b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbConfig.java new file mode 100644 index 0000000000000..0011fabe3b0d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbConfig.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.databind.JsonNode; +import java.net.URI; +import software.amazon.awssdk.regions.Region; + +public record DynamodbConfig( + + URI endpoint, + + Region region, + + String accessKey, + + String secretKey + +) { + + public static DynamodbConfig createDynamodbConfig(JsonNode jsonNode) { + JsonNode endpoint = jsonNode.get("endpoint"); + JsonNode region = jsonNode.get("region"); + return new DynamodbConfig( + endpoint != null && !endpoint.asText().isBlank() ? URI.create(endpoint.asText()) : null, + region != null && !region.asText().isBlank() ? Region.of(region.asText()) : null, + jsonNode.get("access_key_id").asText(), + jsonNode.get("secret_access_key").asText()); + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbOperations.java b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbOperations.java new file mode 100644 index 0000000000000..e16d6a77c6834 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbOperations.java @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import io.airbyte.db.AbstractDatabase; +import java.io.Closeable; +import java.time.LocalDate; +import java.time.format.DateTimeParseException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import software.amazon.awssdk.services.dynamodb.DynamoDbClient; +import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; +import software.amazon.awssdk.services.dynamodb.model.DescribeTableRequest; +import software.amazon.awssdk.services.dynamodb.model.ScanRequest; + +public class DynamodbOperations extends AbstractDatabase implements Closeable { + + private final DynamoDbClient dynamoDbClient; + + private ObjectMapper attributeObjectMapper; + + private ObjectMapper schemaObjectMapper; + + public DynamodbOperations(DynamodbConfig dynamodbConfig) { + this.dynamoDbClient = DynamodbUtils.createDynamoDbClient(dynamodbConfig); + initMappers(); + } + + public DynamodbOperations(DynamoDbClient dynamoDbClient) { + this.dynamoDbClient = dynamoDbClient; + initMappers(); + } + + private void initMappers() { + SimpleModule attributeModule = new SimpleModule(); + attributeModule.addSerializer(AttributeValue.class, new DynamodbAttributeSerializer()); + this.attributeObjectMapper = new ObjectMapper().registerModule(attributeModule); + + SimpleModule schemaModule = new SimpleModule(); + schemaModule.addSerializer(AttributeValue.class, new DynamodbSchemaSerializer()); + this.schemaObjectMapper = new ObjectMapper().registerModule(schemaModule); + } + + public List listTables() { + return dynamoDbClient.listTables() + // filter on table status? + .tableNames(); + } + + public List primaryKey(String tableName) { + DescribeTableRequest describeTableRequest = DescribeTableRequest.builder().tableName(tableName).build(); + return dynamoDbClient.describeTable(describeTableRequest).table().attributeDefinitions().stream() + .map(AttributeDefinition::attributeName) + .toList(); + } + + public JsonNode inferSchema(String tableName, int sampleSize) { + + List> items = new ArrayList<>(); + + ScanRequest scanRequest = ScanRequest.builder() + .limit(sampleSize) + .tableName(tableName) + .build(); + + var scanIterable = dynamoDbClient.scanPaginator(scanRequest); + int scannedItems = 0; + for (var scanResponse : scanIterable) { + + if (scannedItems >= sampleSize) { + break; + } + + // can scan a 'bit' more items than 'sampleSize' if response is > 1MB since every + // new page request on the iterator will return new 'sampleSize' amount of items. + scannedItems += scanResponse.count(); + + items.addAll(scanResponse.items()); + + } + + /* + * schema inference with combining only the top level attributes of different items. for complete + * schema inference the implementation should do full traversal of each item object graph and merge + * different nested attributes at the same level + */ + Map mergedItems = items.stream() + .reduce(new HashMap<>(), (merged, current) -> { + merged.putAll(current); + return merged; + }); + + return schemaObjectMapper.convertValue(mergedItems, JsonNode.class); + } + + public List scanTable(String tableName, Set attributes, FilterAttribute filterAttribute) { + List items = new ArrayList<>(); + + var projectionAttributes = String.join(", ", attributes); + + ScanRequest.Builder scanRequestBuilder = ScanRequest.builder() + .tableName(tableName) + .projectionExpression(projectionAttributes); + + if (filterAttribute != null && filterAttribute.name() != null && + filterAttribute.value() != null && filterAttribute.type() != null) { + + var filterName = filterAttribute.name(); + var filterValue = filterAttribute.value(); + + // Dynamodb supports timestamp filtering based on ISO format as string and Epoch format as number + // type + AttributeValue attributeValue = switch (filterAttribute.type()) { + case S -> AttributeValue.builder().s(filterValue).build(); + case N -> AttributeValue.builder().n(filterValue).build(); + }; + + String comparator; + try { + // if date is of format 2016-02-15 we should use gr-eq in order to not skip records + // from the same date after first replication + LocalDate.parse(filterValue); + comparator = ">="; + } catch (DateTimeParseException e) { + comparator = ">"; + } + + scanRequestBuilder + .filterExpression(filterName + " " + comparator + " :timestamp") + .expressionAttributeValues(Map.of(":timestamp", attributeValue)); + + } + + var scanIterable = dynamoDbClient.scanPaginator(scanRequestBuilder.build()); + for (var scanResponse : scanIterable) { + + scanResponse.items().stream() + .map(attr -> attributeObjectMapper.convertValue(attr, JsonNode.class)) + .forEach(items::add); + + } + + return items; + } + + @Override + public void close() { + dynamoDbClient.close(); + } + + public record FilterAttribute(String name, String value, FilterType type) { + + public enum FilterType { + + S, + N + + } + + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbSchemaSerializer.java b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbSchemaSerializer.java new file mode 100644 index 0000000000000..c64fddae1efa0 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbSchemaSerializer.java @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.SerializerProvider; +import java.io.IOException; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; + +public class DynamodbSchemaSerializer extends JsonSerializer { + + @Override + public void serialize(AttributeValue value, JsonGenerator gen, SerializerProvider serializers) throws IOException { + switch (value.type()) { + case S -> { + gen.writeStartObject(); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "string"}, 0, 2); + + gen.writeEndObject(); + } + case N -> { + + gen.writeStartObject(); + + gen.writeFieldName("type"); + + try { + Long.parseLong(value.n()); + gen.writeArray(new String[] {"null", "integer"}, 0, 2); + } catch (NumberFormatException e) { + gen.writeArray(new String[] {"null", "number"}, 0, 2); + } + + gen.writeEndObject(); + + } + case B -> { + gen.writeStartObject(); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "string"}, 0, 2); + + gen.writeStringField("contentEncoding", "base64"); + + gen.writeEndObject(); + } + case SS -> { + gen.writeStartObject(); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "array"}, 0, 2); + + gen.writeObjectFieldStart("items"); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "string"}, 0, 2); + + gen.writeEndObject(); + + gen.writeEndObject(); + } + case NS -> { + gen.writeStartObject(); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "array"}, 0, 2); + + gen.writeObjectFieldStart("items"); + + gen.writeFieldName("type"); + // array can contain mixed integer and decimal values + gen.writeArray(new String[] {"null", "number"}, 0, 2); + + gen.writeEndObject(); + + gen.writeEndObject(); + } + case BS -> { + gen.writeStartObject(); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "array"}, 0, 2); + + gen.writeObjectFieldStart("items"); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "string"}, 0, 2); + + gen.writeStringField("contentEncoding", "base64"); + + gen.writeEndObject(); + + gen.writeEndObject(); + } + case M -> { + gen.writeStartObject(); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "object"}, 0, 2); + + gen.writeObjectFieldStart("properties"); + + for (var attr : value.m().entrySet()) { + gen.writeFieldName(attr.getKey()); + // recursively iterate over nested attributes and create json schema fields + serialize(attr.getValue(), gen, serializers); + } + + gen.writeEndObject(); + + gen.writeEndObject(); + } + case L -> { + // TODO (itaseski) perform deduplication on same type schema elements + + gen.writeStartObject(); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "array"}, 0, 2); + + gen.writeObjectFieldStart("items"); + + gen.writeArrayFieldStart("anyOf"); + + // recursively iterate over nested attributes and create json schema fields + for (var attr : value.l()) { + serialize(attr, gen, serializers); + } + + gen.writeEndArray(); + + gen.writeEndObject(); + + gen.writeEndObject(); + } + case BOOL -> { + gen.writeStartObject(); + + gen.writeFieldName("type"); + gen.writeArray(new String[] {"null", "boolean"}, 0, 2); + + gen.writeEndObject(); + } + case NUL -> { + gen.writeStartObject(); + gen.writeStringField("type", "null"); + gen.writeEndObject(); + } + case UNKNOWN_TO_SDK_VERSION -> { + // ignore unknown fields + } + } + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbSource.java b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbSource.java new file mode 100644 index 0000000000000..9da150fda8c47 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbSource.java @@ -0,0 +1,192 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.features.EnvVariableFeatureFlags; +import io.airbyte.commons.features.FeatureFlags; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.util.AutoCloseableIterator; +import io.airbyte.commons.util.AutoCloseableIterators; +import io.airbyte.integrations.BaseConnector; +import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.integrations.base.IntegrationRunner; +import io.airbyte.integrations.base.Source; +import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.integrations.source.relationaldb.StateDecoratingIterator; +import io.airbyte.integrations.source.relationaldb.state.StateManager; +import io.airbyte.integrations.source.relationaldb.state.StateManagerFactory; +import io.airbyte.protocol.models.AirbyteCatalog; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.JsonSchemaPrimitive; +import io.airbyte.protocol.models.SyncMode; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DynamodbSource extends BaseConnector implements Source { + + private static final Logger LOGGER = LoggerFactory.getLogger(DynamodbSource.class); + + private final FeatureFlags featureFlags = new EnvVariableFeatureFlags(); + + private final ObjectMapper objectMapper = new ObjectMapper(); + + public static void main(String[] args) throws Exception { + Source source = new DynamodbSource(); + LOGGER.info("starting Source: {}", DynamodbSource.class); + new IntegrationRunner(source).run(args); + LOGGER.info("completed Source: {}", DynamodbSource.class); + } + + @Override + public AirbyteConnectionStatus check(JsonNode config) { + var dynamodbConfig = DynamodbConfig.createDynamodbConfig(config); + + try (var dynamodbOperations = new DynamodbOperations(dynamodbConfig)) { + dynamodbOperations.listTables(); + + return new AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.SUCCEEDED); + } catch (Exception e) { + LOGGER.error("Error while listing Dynamodb tables with reason: ", e); + return new AirbyteConnectionStatus() + .withStatus(AirbyteConnectionStatus.Status.FAILED); + } + + } + + @Override + public AirbyteCatalog discover(JsonNode config) { + + var dynamodbConfig = DynamodbConfig.createDynamodbConfig(config); + + try (var dynamodbOperations = new DynamodbOperations(dynamodbConfig)) { + + var airbyteStreams = dynamodbOperations.listTables().stream() + .map(tb -> new AirbyteStream() + .withName(tb) + .withJsonSchema(Jsons.jsonNode(ImmutableMap.builder() + .put("type", "object") + .put("properties", dynamodbOperations.inferSchema(tb, 1000)) + .build())) + .withSourceDefinedPrimaryKey(Collections.singletonList(dynamodbOperations.primaryKey(tb))) + .withSupportedSyncModes(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))) + .toList(); + + return new AirbyteCatalog().withStreams(airbyteStreams); + } + + } + + @Override + public AutoCloseableIterator read(JsonNode config, + ConfiguredAirbyteCatalog catalog, + JsonNode state) { + + var streamState = DynamodbUtils.deserializeStreamState(state, featureFlags.useStreamCapableState()); + + StateManager stateManager = StateManagerFactory + .createStateManager(streamState.airbyteStateType(), streamState.airbyteStateMessages(), catalog); + + var dynamodbConfig = DynamodbConfig.createDynamodbConfig(config); + + try (var dynamodbOperations = new DynamodbOperations(dynamodbConfig)) { + + var streamIterators = catalog.getStreams().stream() + .map(str -> switch (str.getSyncMode()) { + case INCREMENTAL -> scanIncremental(dynamodbOperations, str.getStream(), str.getCursorField().get(0), stateManager); + case FULL_REFRESH -> scanFullRefresh(dynamodbOperations, str.getStream()); + }) + .toList(); + + return AutoCloseableIterators.concatWithEagerClose(streamIterators); + + } + } + + private AutoCloseableIterator scanIncremental(DynamodbOperations dynamodbOperations, + AirbyteStream airbyteStream, + String cursorField, + StateManager stateManager) { + + var streamPair = new AirbyteStreamNameNamespacePair(airbyteStream.getName(), airbyteStream.getNamespace()); + + Optional cursorInfo = stateManager.getCursorInfo(streamPair); + + Map properties = objectMapper.convertValue(airbyteStream.getJsonSchema().get("properties"), new TypeReference<>() {}); + Set selectedAttributes = properties.keySet(); + + // cursor type will be retrieved from the json schema to save time on db schema crawling reading + // large amount of items + String cursorType = properties.get(cursorField).get("type").asText(); + + var messageStream = cursorInfo.map(cursor -> { + + var filterType = switch (cursorType) { + case "string" -> DynamodbOperations.FilterAttribute.FilterType.S; + case "integer" -> DynamodbOperations.FilterAttribute.FilterType.N; + case "number" -> { + JsonNode airbyteType = properties.get(cursorField).get("airbyte_type"); + if (airbyteType != null && airbyteType.asText().equals("integer")) { + yield DynamodbOperations.FilterAttribute.FilterType.N; + } else { + throw new UnsupportedOperationException("Unsupported attribute type for filtering"); + } + } + default -> throw new UnsupportedOperationException("Unsupported attribute type for filtering"); + }; + + DynamodbOperations.FilterAttribute filterAttribute = new DynamodbOperations.FilterAttribute( + cursor.getCursorField(), + cursor.getCursor(), + filterType); + + return dynamodbOperations.scanTable(airbyteStream.getName(), selectedAttributes, filterAttribute); + + }) + // perform full refresh if cursor is not present + .orElse(dynamodbOperations.scanTable(airbyteStream.getName(), selectedAttributes, null)) + .stream() + .map(jn -> DynamodbUtils.mapAirbyteMessage(airbyteStream.getName(), jn)); + + // wrap stream in state emission iterator + return AutoCloseableIterators.transform(autoCloseableIterator -> new StateDecoratingIterator( + autoCloseableIterator, + stateManager, + streamPair, + cursorField, + cursorInfo.map(CursorInfo::getCursor).orElse(null), + JsonSchemaPrimitive.valueOf(cursorType.toUpperCase()), + // emit state after full stream has been processed + 0), + AutoCloseableIterators.fromStream(messageStream)); + + } + + private AutoCloseableIterator scanFullRefresh(DynamodbOperations dynamodbOperations, + AirbyteStream airbyteStream) { + Map properties = objectMapper.convertValue(airbyteStream.getJsonSchema().get("properties"), new TypeReference<>() {}); + Set selectedAttributes = properties.keySet(); + + var messageStream = dynamodbOperations + .scanTable(airbyteStream.getName(), selectedAttributes, null) + .stream() + .map(jn -> DynamodbUtils.mapAirbyteMessage(airbyteStream.getName(), jn)); + + return AutoCloseableIterators.fromStream(messageStream); + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbUtils.java b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbUtils.java new file mode 100644 index 0000000000000..05741b6f648e4 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/main/java/io/airbyte/integrations/source/dynamodb/DynamodbUtils.java @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.StateWrapper; +import io.airbyte.config.helpers.StateMessageHelper; +import io.airbyte.integrations.source.relationaldb.models.DbState; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.AirbyteRecordMessage; +import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamState; +import java.time.Instant; +import java.util.List; +import java.util.Optional; +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; +import software.amazon.awssdk.services.dynamodb.DynamoDbClient; + +public class DynamodbUtils { + + private DynamodbUtils() { + + } + + public static DynamoDbClient createDynamoDbClient(DynamodbConfig dynamodbConfig) { + var dynamoDbClientBuilder = DynamoDbClient.builder(); + + // configure access credentials + dynamoDbClientBuilder.credentialsProvider(StaticCredentialsProvider.create( + AwsBasicCredentials.create(dynamodbConfig.accessKey(), dynamodbConfig.secretKey()))); + + if (dynamodbConfig.region() != null) { + dynamoDbClientBuilder.region(dynamodbConfig.region()); + } + + if (dynamodbConfig.endpoint() != null) { + dynamoDbClientBuilder.endpointOverride(dynamodbConfig.endpoint()); + } + + return dynamoDbClientBuilder.build(); + } + + public static AirbyteMessage mapAirbyteMessage(String stream, JsonNode data) { + return new AirbyteMessage() + .withType(AirbyteMessage.Type.RECORD) + .withRecord(new AirbyteRecordMessage() + .withStream(stream) + .withEmittedAt(Instant.now().toEpochMilli()) + .withData(data)); + } + + public static StreamState deserializeStreamState(JsonNode state, boolean useStreamCapableState) { + Optional typedState = + StateMessageHelper.getTypedState(state, useStreamCapableState); + return typedState.map(stateWrapper -> switch (stateWrapper.getStateType()) { + case STREAM: + yield new StreamState(AirbyteStateMessage.AirbyteStateType.STREAM, stateWrapper.getStateMessages()); + case LEGACY: + yield new StreamState(AirbyteStateMessage.AirbyteStateType.LEGACY, List.of( + new AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData(stateWrapper.getLegacyState()))); + case GLOBAL: + throw new UnsupportedOperationException("Unsupported stream state"); + }).orElseGet(() -> { + // create empty initial state + if (useStreamCapableState) { + return new StreamState(AirbyteStateMessage.AirbyteStateType.STREAM, List.of( + new AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.STREAM) + .withStream(new AirbyteStreamState()))); + } else { + return new StreamState(AirbyteStateMessage.AirbyteStateType.LEGACY, List.of( + new AirbyteStateMessage().withType(AirbyteStateMessage.AirbyteStateType.LEGACY) + .withData(Jsons.jsonNode(new DbState())))); + } + }); + } + + record StreamState( + + AirbyteStateMessage.AirbyteStateType airbyteStateType, + + List airbyteStateMessages) { + + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/main/resources/spec.json b/airbyte-integrations/connectors/source-dynamodb/src/main/resources/spec.json new file mode 100644 index 0000000000000..c6e6d129c8780 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/main/resources/spec.json @@ -0,0 +1,67 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/dynamodb", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Dynamodb Source Spec", + "type": "object", + "required": ["access_key_id", "secret_access_key"], + "additionalProperties": false, + "properties": { + "endpoint": { + "title": "Dynamodb Endpoint", + "type": "string", + "default": "", + "description": "the URL of the Dynamodb database", + "examples": ["https://{aws_dynamo_db_url}.com"] + }, + "region": { + "title": "Dynamodb Region", + "type": "string", + "default": "", + "description": "The region of the Dynamodb database", + "enum": [ + "", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", + "af-south-1", + "ap-east-1", + "ap-south-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "cn-north-1", + "cn-northwest-1", + "eu-central-1", + "eu-north-1", + "eu-south-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "sa-east-1", + "me-south-1", + "us-gov-east-1", + "us-gov-west-1" + ] + }, + "access_key_id": { + "title": "Dynamodb Key Id", + "type": "string", + "description": "The access key id to access Dynamodb. Airbyte requires read permissions to the database", + "airbyte_secret": true, + "examples": ["A012345678910EXAMPLE"] + }, + "secret_access_key": { + "title": "Dynamodb Access Key", + "type": "string", + "description": "The corresponding secret to the access key id.", + "airbyte_secret": true, + "examples": ["a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY"] + } + } + } +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbContainer.java b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbContainer.java new file mode 100644 index 0000000000000..123491fdfd70b --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbContainer.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import java.net.URI; +import org.testcontainers.containers.localstack.LocalStackContainer; +import org.testcontainers.utility.DockerImageName; + +public class DynamodbContainer extends LocalStackContainer { + + public static DynamodbContainer createWithStart() { + var dynamodbContainer = (DynamodbContainer) new DynamodbContainer() + .withServices(Service.DYNAMODB); + dynamodbContainer.start(); + return dynamodbContainer; + } + + public static DynamodbContainer create() { + return (DynamodbContainer) new DynamodbContainer() + .withServices(Service.DYNAMODB); + } + + public DynamodbContainer() { + super(DockerImageName.parse("localstack/localstack:1.2.0")); + } + + public URI getEndpointOverride() { + return super.getEndpointOverride(Service.DYNAMODB); + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbDataFactory.java b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbDataFactory.java new file mode 100644 index 0000000000000..e02195f91a7b6 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbDataFactory.java @@ -0,0 +1,102 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.CatalogHelpers; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConfiguredAirbyteStream; +import io.airbyte.protocol.models.DestinationSyncMode; +import io.airbyte.protocol.models.Field; +import io.airbyte.protocol.models.JsonSchemaType; +import io.airbyte.protocol.models.SyncMode; +import java.util.List; +import java.util.Map; +import java.util.stream.IntStream; +import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; +import software.amazon.awssdk.services.dynamodb.model.CreateTableRequest; +import software.amazon.awssdk.services.dynamodb.model.KeySchemaElement; +import software.amazon.awssdk.services.dynamodb.model.KeyType; +import software.amazon.awssdk.services.dynamodb.model.ProvisionedThroughput; +import software.amazon.awssdk.services.dynamodb.model.PutItemRequest; +import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType; +import software.amazon.awssdk.services.dynamodb.model.TableClass; + +public class DynamodbDataFactory { + + private DynamodbDataFactory() { + + } + + public static List createTables(String tablePrefix, int tables) { + return IntStream.range(0, tables).mapToObj(range -> CreateTableRequest + .builder() + .tableClass(TableClass.STANDARD) + .tableName(tablePrefix + (range + 1)) + .attributeDefinitions( + AttributeDefinition.builder() + .attributeName("attr_1") + .attributeType(ScalarAttributeType.S) + .build(), + AttributeDefinition.builder() + .attributeName("attr_2") + .attributeType(ScalarAttributeType.S) + .build()) + .keySchema( + KeySchemaElement.builder() + .attributeName("attr_1") + .keyType(KeyType.HASH) + .build(), + KeySchemaElement.builder() + .attributeName("attr_2") + .keyType(KeyType.RANGE) + .build()) + .provisionedThroughput(ProvisionedThroughput.builder() + .readCapacityUnits(10L) + .writeCapacityUnits(10L).build()) + .build()) + .toList(); + + } + + public static PutItemRequest putItemRequest(String tableName, Map item) { + return PutItemRequest + .builder() + .tableName(tableName) + .item(item) + .build(); + + } + + public static JsonNode createJsonConfig(DynamodbContainer dynamodbContainer) { + return Jsons.jsonNode(ImmutableMap.builder() + .put("endpoint", dynamodbContainer.getEndpointOverride().toString()) + .put("region", dynamodbContainer.getRegion()) + .put("access_key_id", dynamodbContainer.getAccessKey()) + .put("secret_access_key", dynamodbContainer.getSecretKey()) + .build()); + } + + public static ConfiguredAirbyteCatalog createConfiguredAirbyteCatalog(String streamName) { + return new ConfiguredAirbyteCatalog().withStreams(Lists.newArrayList( + new ConfiguredAirbyteStream() + .withSyncMode(SyncMode.INCREMENTAL) + .withCursorField(Lists.newArrayList("attr_timestamp")) + .withPrimaryKey(List.of(List.of("attr_1", "attr_2"))) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(CatalogHelpers.createAirbyteStream( + streamName, + Field.of("attr_1", JsonSchemaType.STRING), + Field.of("attr_2", JsonSchemaType.STRING), + Field.of("attr_3", JsonSchemaType.NUMBER), + Field.of("attr_timestamp", JsonSchemaType.INTEGER)) + .withSupportedSyncModes(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))))); + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbOperationsTest.java b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbOperationsTest.java new file mode 100644 index 0000000000000..b6d22b3e8ab03 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbOperationsTest.java @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.json.JSONException; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.skyscreamer.jsonassert.JSONAssert; +import software.amazon.awssdk.services.dynamodb.DynamoDbClient; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; +import software.amazon.awssdk.services.dynamodb.model.PutItemRequest; + +public class DynamodbOperationsTest { + + private static final String TABLE_NAME = "airbyte_table"; + + private DynamodbOperations dynamodbOperations; + + private DynamoDbClient dynamoDbClient; + + private DynamodbContainer dynamodbContainer; + + private ObjectMapper objectMapper; + + @BeforeEach + void setup() { + dynamodbContainer = DynamodbContainer.createWithStart(); + + var jsonConfig = DynamodbDataFactory.createJsonConfig(dynamodbContainer); + + this.dynamodbOperations = new DynamodbOperations(DynamodbConfig.createDynamodbConfig(jsonConfig)); + this.dynamoDbClient = DynamodbUtils.createDynamoDbClient(DynamodbConfig.createDynamodbConfig(jsonConfig)); + + this.objectMapper = new ObjectMapper() + .configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true) + .configure(SerializationFeature.INDENT_OUTPUT, true); + + } + + @AfterEach + void shutdown() { + dynamoDbClient.close(); + dynamodbOperations.close(); + dynamodbContainer.stop(); + dynamodbContainer.close(); + } + + @Test + void testListTables() { + + var createTableRequests = DynamodbDataFactory.createTables(TABLE_NAME, 5); + createTableRequests.forEach(dynamoDbClient::createTable); + + List tables = dynamodbOperations.listTables(); + + assertThat(tables).hasSize(5) + .anyMatch(t -> t.equals(TABLE_NAME + 1)) + .anyMatch(t -> t.equals(TABLE_NAME + 2)) + .anyMatch(t -> t.equals(TABLE_NAME + 3)) + .anyMatch(t -> t.equals(TABLE_NAME + 4)) + .anyMatch(t -> t.equals(TABLE_NAME + 5)); + + } + + @Test + void testPrimaryKey() { + + var createTableRequests = DynamodbDataFactory.createTables(TABLE_NAME, 1); + var createTableResponse = dynamoDbClient.createTable(createTableRequests.get(0)); + + var primaryKey = dynamodbOperations.primaryKey(createTableResponse.tableDescription().tableName()); + + assertThat(primaryKey).hasSize(2) + .anyMatch(t -> t.equals("attr_1")) + .anyMatch(t -> t.equals("attr_2")); + + } + + @Test + void testInferSchema() throws JsonProcessingException, JSONException { + + var createTableRequests = DynamodbDataFactory.createTables(TABLE_NAME, 1); + var createTableResponse = dynamoDbClient.createTable(createTableRequests.get(0)); + String tableName = createTableResponse.tableDescription().tableName(); + + PutItemRequest putItemRequest1 = DynamodbDataFactory.putItemRequest(tableName, Map.of( + "attr_1", AttributeValue.builder().s("str_4").build(), + "attr_2", AttributeValue.builder().s("str_5").build(), + "attr_3", AttributeValue.builder().n("1234").build(), + "attr_4", AttributeValue.builder().ns("12.5", "74.5").build())); + + dynamoDbClient.putItem(putItemRequest1); + + PutItemRequest putItemRequest2 = DynamodbDataFactory.putItemRequest(tableName, Map.of( + "attr_1", AttributeValue.builder().s("str_6").build(), + "attr_2", AttributeValue.builder().s("str_7").build(), + "attr_5", AttributeValue.builder().bool(true).build(), + "attr_6", AttributeValue.builder().ss("str_1", "str_2").build())); + + dynamoDbClient.putItem(putItemRequest2); + + var schema = dynamodbOperations.inferSchema(tableName, 1000); + + JSONAssert.assertEquals(objectMapper.writeValueAsString(schema), """ + { + "attr_5": { + "type": ["null","boolean"] + }, + "attr_4": { + "type": ["null","array"], + "items": { + "type": ["null","number"] + } + }, + "attr_3": { + "type": ["null","integer"] + }, + "attr_2": { + "type": ["null","string"] + }, + "attr_1": { + "type": ["null","string"] + }, + "attr_6": { + "type": ["null","array"], + "items": { + "type": ["null","string"] + } + } + } + """, true); + + } + + @Test + void testScanTable() throws JsonProcessingException, JSONException { + + var createTableRequests = DynamodbDataFactory.createTables(TABLE_NAME, 1); + var createTableResponse = dynamoDbClient.createTable(createTableRequests.get(0)); + String tableName = createTableResponse.tableDescription().tableName(); + + PutItemRequest putItemRequest1 = DynamodbDataFactory.putItemRequest(tableName, Map.of( + "attr_1", AttributeValue.builder().s("str_4").build(), + "attr_2", AttributeValue.builder().s("str_5").build(), + "attr_3", AttributeValue.builder().s("2017-12-21T17:42:34Z").build(), + "attr_4", AttributeValue.builder().ns("12.5", "74.5").build())); + + dynamoDbClient.putItem(putItemRequest1); + + PutItemRequest putItemRequest2 = DynamodbDataFactory.putItemRequest(tableName, Map.of( + "attr_1", AttributeValue.builder().s("str_6").build(), + "attr_2", AttributeValue.builder().s("str_7").build(), + "attr_3", AttributeValue.builder().s("2019-12-21T17:42:34Z").build(), + "attr_6", AttributeValue.builder().ss("str_1", "str_2").build())); + + dynamoDbClient.putItem(putItemRequest2); + + var response = dynamodbOperations.scanTable(tableName, Set.of("attr_1", "attr_2", "attr_3"), + new DynamodbOperations.FilterAttribute("attr_3", "2018-12-21T17:42:34Z", + DynamodbOperations.FilterAttribute.FilterType.S)); + + assertThat(response) + .hasSize(1); + + JSONAssert.assertEquals(objectMapper.writeValueAsString(response.get(0)), """ + { + "attr_3": "2019-12-21T17:42:34Z", + "attr_2": "str_7", + "attr_1": "str_6" + } + """, true); + + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbSourceAcceptanceTest.java new file mode 100644 index 0000000000000..3671f38fe2ef0 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbSourceAcceptanceTest.java @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.resources.MoreResources; +import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; +import io.airbyte.integrations.standardtest.source.TestDestinationEnv; +import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; +import io.airbyte.protocol.models.ConnectorSpecification; +import java.util.HashMap; +import java.util.Map; +import software.amazon.awssdk.services.dynamodb.DynamoDbClient; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; +import software.amazon.awssdk.services.dynamodb.model.PutItemRequest; + +public class DynamodbSourceAcceptanceTest extends SourceAcceptanceTest { + + private static final String TABLE_NAME = "airbyte_table"; + + private JsonNode config; + + private DynamodbContainer dynamodbContainer; + + private DynamoDbClient dynamoDbClient; + + @Override + protected void setupEnvironment(final TestDestinationEnv testEnv) { + dynamodbContainer = DynamodbContainer.createWithStart(); + + config = DynamodbDataFactory.createJsonConfig(dynamodbContainer); + + dynamoDbClient = DynamodbUtils.createDynamoDbClient(DynamodbConfig.createDynamodbConfig(config)); + + var createTableRequests = DynamodbDataFactory.createTables(TABLE_NAME, 1); + var createTableResponse = dynamoDbClient.createTable(createTableRequests.get(0)); + String tableName = createTableResponse.tableDescription().tableName(); + + PutItemRequest putItemRequest = DynamodbDataFactory.putItemRequest(tableName, Map.of( + "attr_1", AttributeValue.builder().s("str_4").build(), + "attr_2", AttributeValue.builder().s("str_5").build(), + "attr_3", AttributeValue.builder().n("1234.25").build(), + "attr_timestamp", AttributeValue.builder().n("1572268323").build())); + + dynamoDbClient.putItem(putItemRequest); + + } + + @Override + protected void tearDown(final TestDestinationEnv testEnv) { + dynamoDbClient.close(); + dynamodbContainer.stop(); + dynamodbContainer.close(); + } + + @Override + protected String getImageName() { + return "airbyte/source-dynamodb:dev"; + } + + @Override + protected ConnectorSpecification getSpec() throws Exception { + return Jsons.deserialize(MoreResources.readResource("spec.json"), ConnectorSpecification.class); + } + + @Override + protected JsonNode getConfig() { + return config; + } + + @Override + protected ConfiguredAirbyteCatalog getConfiguredCatalog() { + return DynamodbDataFactory.createConfiguredAirbyteCatalog(TABLE_NAME + 1); + } + + @Override + protected JsonNode getState() { + return Jsons.jsonNode(new HashMap<>()); + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbSourceTest.java b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbSourceTest.java new file mode 100644 index 0000000000000..c0ff366bc6fc4 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/test-integration/java/io/airbyte/integrations/source/dynamodb/DynamodbSourceTest.java @@ -0,0 +1,160 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import io.airbyte.protocol.models.AirbyteMessage; +import io.airbyte.protocol.models.SyncMode; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import software.amazon.awssdk.services.dynamodb.DynamoDbClient; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; +import software.amazon.awssdk.services.dynamodb.model.PutItemRequest; + +public class DynamodbSourceTest { + + private static final String TABLE_NAME = "airbyte_table"; + + private DynamodbSource dynamodbSource; + + private DynamoDbClient dynamoDbClient; + + private DynamodbContainer dynamodbContainer; + + @BeforeEach + void setup() { + dynamodbContainer = DynamodbContainer.createWithStart(); + + var jsonConfig = DynamodbDataFactory.createJsonConfig(dynamodbContainer); + + this.dynamodbSource = new DynamodbSource(); + this.dynamoDbClient = DynamodbUtils.createDynamoDbClient(DynamodbConfig.createDynamodbConfig(jsonConfig)); + + } + + @AfterEach + void shutdown() { + dynamoDbClient.close(); + dynamodbContainer.stop(); + dynamodbContainer.close(); + } + + @Test + void testCheckWithSucceeded() { + + var jsonConfig = DynamodbDataFactory.createJsonConfig(dynamodbContainer); + + DynamodbDataFactory.createTables(TABLE_NAME, 1).forEach(dynamoDbClient::createTable); + + var connectionStatus = dynamodbSource.check(jsonConfig); + + assertThat(connectionStatus.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.SUCCEEDED); + + } + + @Test + void testCheckWithFailed() { + + var jsonConfig = DynamodbDataFactory.createJsonConfig(dynamodbContainer); + ((ObjectNode) jsonConfig).replace("endpoint", Jsons.jsonNode("localhost:8080")); + + DynamodbDataFactory.createTables(TABLE_NAME, 1).forEach(dynamoDbClient::createTable); + + var connectionStatus = dynamodbSource.check(jsonConfig); + + assertThat(connectionStatus.getStatus()).isEqualTo(AirbyteConnectionStatus.Status.FAILED); + + } + + @Test + void testDiscover() { + + var jsonConfig = DynamodbDataFactory.createJsonConfig(dynamodbContainer); + + var createTableRequests = DynamodbDataFactory.createTables(TABLE_NAME, 2); + + var createTableResponses = createTableRequests.stream().map(dynamoDbClient::createTable).toList(); + + DynamodbDataFactory.putItemRequest(createTableResponses.get(0).tableDescription().tableName(), Map.of( + "attr_1", AttributeValue.builder().s("str_4").build(), + "attr_2", AttributeValue.builder().s("str_5").build(), + "attr_3", AttributeValue.builder().s("2017-12-21T17:42:34Z").build(), + "attr_4", AttributeValue.builder().ns("12.5", "74.5").build())); + + DynamodbDataFactory.putItemRequest(createTableResponses.get(1).tableDescription().tableName(), Map.of( + "attr_1", AttributeValue.builder().s("str_4").build(), + "attr_2", AttributeValue.builder().s("str_5").build(), + "attr_4", AttributeValue.builder().s("2017-12-21T17:42:34Z").build(), + "attr_5", AttributeValue.builder().ns("12.5", "74.5").build())); + + var airbyteCatalog = dynamodbSource.discover(jsonConfig); + + assertThat(airbyteCatalog.getStreams()) + .anyMatch(as -> as.getName().equals(createTableResponses.get(0).tableDescription().tableName()) && + as.getJsonSchema().isObject() && + as.getSourceDefinedPrimaryKey().get(0).containsAll(List.of("attr_1", "attr_2")) && + as.getSupportedSyncModes().containsAll(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))) + .anyMatch(as -> as.getName().equals(createTableResponses.get(1).tableDescription().tableName()) && + as.getJsonSchema().isObject() && + as.getSourceDefinedPrimaryKey().get(0).containsAll(List.of("attr_1", "attr_2")) && + as.getSupportedSyncModes().containsAll(List.of(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL))); + + } + + @Test + void testRead() { + + var jsonConfig = DynamodbDataFactory.createJsonConfig(dynamodbContainer); + + var createTableRequests = DynamodbDataFactory.createTables(TABLE_NAME, 1); + var createTableResponses = createTableRequests.stream().map(dynamoDbClient::createTable).toList(); + String tableName = createTableResponses.get(0).tableDescription().tableName(); + var configuredCatalog = DynamodbDataFactory.createConfiguredAirbyteCatalog(tableName); + + PutItemRequest putItemRequest1 = DynamodbDataFactory.putItemRequest(tableName, Map.of( + "attr_1", AttributeValue.builder().s("str_4").build(), + "attr_2", AttributeValue.builder().s("str_5").build(), + "attr_3", AttributeValue.builder().n("1234.25").build(), + "attr_timestamp", AttributeValue.builder().n("1572268323").build())); + + dynamoDbClient.putItem(putItemRequest1); + + PutItemRequest putItemRequest2 = DynamodbDataFactory.putItemRequest(tableName, Map.of( + "attr_1", AttributeValue.builder().s("str_6").build(), + "attr_2", AttributeValue.builder().s("str_7").build(), + "attr_3", AttributeValue.builder().n("1234.25").build(), + "attr_timestamp", AttributeValue.builder().n("1672228343").build())); + + dynamoDbClient.putItem(putItemRequest2); + + Iterator iterator = dynamodbSource.read(jsonConfig, configuredCatalog, Jsons.emptyObject()); + + var airbyteRecordMessages = Stream.generate(() -> null) + .takeWhile(x -> iterator.hasNext()) + .map(n -> iterator.next()) + .filter(am -> am.getType() == AirbyteMessage.Type.RECORD) + .map(AirbyteMessage::getRecord) + .toList(); + + assertThat(airbyteRecordMessages) + .anyMatch(arm -> arm.getStream().equals(tableName) && + Jsons.serialize(arm.getData()).equals( + "{\"attr_timestamp\":1572268323,\"attr_3\":1234.25,\"attr_2\":\"str_5\",\"attr_1\":\"str_4\"}")) + .anyMatch(arm -> arm.getStream().equals(tableName) && + Jsons.serialize(arm.getData()).equals( + "{\"attr_timestamp\":1672228343,\"attr_3\":1234.25,\"attr_2\":\"str_7\",\"attr_1\":\"str_6\"}")); + + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbAttributeSerializerTest.java b/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbAttributeSerializerTest.java new file mode 100644 index 0000000000000..379dc5ab81d35 --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbAttributeSerializerTest.java @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.module.SimpleModule; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import org.json.JSONException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.skyscreamer.jsonassert.JSONAssert; +import software.amazon.awssdk.core.SdkBytes; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; + +class DynamodbAttributeSerializerTest { + + private ObjectMapper attributeObjectMapper; + + @BeforeEach + void setup() { + SimpleModule module = new SimpleModule(); + module.addSerializer(AttributeValue.class, new DynamodbAttributeSerializer()); + this.attributeObjectMapper = new ObjectMapper() + .registerModule(module) + .configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true) + .configure(SerializationFeature.INDENT_OUTPUT, true); + } + + @Test + void serializeAttributeValueToJson() throws JSONException, JsonProcessingException { + + Map items = Map.of( + "sAttribute", AttributeValue.builder().s("string").build(), + "nAttribute", AttributeValue.builder().n("123").build(), + "bAttribute", + AttributeValue.builder().b(SdkBytes.fromByteArray("byteArray".getBytes(StandardCharsets.UTF_8))).build(), + "ssAttribute", AttributeValue.builder().ss("string1", "string2").build(), + "nsAttribute", AttributeValue.builder().ns("12.5", "25.5").build(), + "bsAttribute", AttributeValue.builder().bs( + SdkBytes.fromByteArray("byteArray1".getBytes(StandardCharsets.UTF_8)), + SdkBytes.fromByteArray("byteArray2".getBytes(StandardCharsets.UTF_8))).build(), + "lAttribute", AttributeValue.builder().l( + AttributeValue.builder().s("string3").build(), + AttributeValue.builder().n("125").build()).build(), + "mAttribute", AttributeValue.builder().m(Map.of( + "attr1", AttributeValue.builder().s("string4").build(), + "attr2", AttributeValue.builder().s("string5").build())).build(), + "boolAttribute", AttributeValue.builder().bool(false).build(), + "nulAttribute", AttributeValue.builder().nul(true).build()); + + var jsonNode = attributeObjectMapper.writeValueAsString(items); + + JSONAssert.assertEquals(jsonNode, """ + { + "bAttribute": "Ynl0ZUFycmF5", + "boolAttribute": false, + "bsAttribute": ["Ynl0ZUFycmF5MQ==", "Ynl0ZUFycmF5Mg=="], + "lAttribute": ["string3", 125], + "mAttribute": { + "attr1": "string4", + "attr2": "string5" + }, + "nAttribute": 123, + "nsAttribute": [12.5, 25.5], + "nulAttribute": null, + "sAttribute": "string", + "ssAttribute": ["string1", "string2"] + } + """, true); + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbConfigTest.java b/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbConfigTest.java new file mode 100644 index 0000000000000..5913d706ca24c --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbConfigTest.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.airbyte.commons.json.Jsons; +import java.net.URI; +import java.util.Map; +import org.junit.jupiter.api.Test; +import software.amazon.awssdk.regions.Region; + +class DynamodbConfigTest { + + @Test + void testDynamodbConfig() { + + var jsonConfig = Jsons.jsonNode(Map.of( + "endpoint", "http://localhost:8080", + "region", "us-east-1", + "access_key_id", "A012345678910EXAMPLE", + "secret_access_key", "a012345678910ABCDEFGH/AbCdEfGhLEKEY")); + + var dynamodbConfig = DynamodbConfig.createDynamodbConfig(jsonConfig); + + assertThat(dynamodbConfig) + .hasFieldOrPropertyWithValue("endpoint", URI.create("http://localhost:8080")) + .hasFieldOrPropertyWithValue("region", Region.of("us-east-1")) + .hasFieldOrPropertyWithValue("accessKey", "A012345678910EXAMPLE") + .hasFieldOrPropertyWithValue("secretKey", "a012345678910ABCDEFGH/AbCdEfGhLEKEY"); + + } + +} diff --git a/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbSchemaSerializerTest.java b/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbSchemaSerializerTest.java new file mode 100644 index 0000000000000..601aa932875af --- /dev/null +++ b/airbyte-integrations/connectors/source-dynamodb/src/test/java/io/airbyte/integrations/source/dynamodb/DynamodbSchemaSerializerTest.java @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.dynamodb; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.module.SimpleModule; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import org.json.JSONException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.skyscreamer.jsonassert.JSONAssert; +import software.amazon.awssdk.core.SdkBytes; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; + +class DynamodbSchemaSerializerTest { + + private ObjectMapper schemaObjectMapper; + + @BeforeEach + void setup() { + SimpleModule module = new SimpleModule(); + module.addSerializer(AttributeValue.class, new DynamodbSchemaSerializer()); + this.schemaObjectMapper = new ObjectMapper() + .registerModule(module) + .configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true) + .configure(SerializationFeature.INDENT_OUTPUT, true); + } + + @Test + void serializeAttributeValueToJsonSchema() throws JsonProcessingException, JSONException { + + Map items = Map.of( + "sAttribute", AttributeValue.builder().s("string").build(), + "nAttribute", AttributeValue.builder().n("123").build(), + "bAttribute", + AttributeValue.builder().b(SdkBytes.fromByteArray("byteArray".getBytes(StandardCharsets.UTF_8))).build(), + "ssAttribute", AttributeValue.builder().ss("string1", "string2").build(), + "nsAttribute", AttributeValue.builder().ns("125", "126").build(), + "bsAttribute", AttributeValue.builder().bs( + SdkBytes.fromByteArray("byteArray1".getBytes(StandardCharsets.UTF_8)), + SdkBytes.fromByteArray("byteArray2".getBytes(StandardCharsets.UTF_8))).build(), + "lAttribute", AttributeValue.builder().l( + AttributeValue.builder().s("string3").build(), + AttributeValue.builder().n("12.5").build()).build(), + "mAttribute", AttributeValue.builder().m(Map.of( + "attr1", AttributeValue.builder().s("string4").build(), + "attr2", AttributeValue.builder().s("number4").build())).build(), + "boolAttribute", AttributeValue.builder().bool(false).build(), + "nulAttribute", AttributeValue.builder().nul(true).build() + + ); + + var jsonSchema = schemaObjectMapper.writeValueAsString(items); + + JSONAssert.assertEquals(jsonSchema, """ + { + "bAttribute": { + "type": ["null", "string"], + "contentEncoding": "base64" + }, + "boolAttribute": { + "type": ["null", "boolean"] + }, + "bsAttribute": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"], + "contentEncoding": "base64" + } + }, + "lAttribute": { + "type": ["null", "array"], + "items": { + "anyOf": [{ + "type": ["null", "string"] + }, { + "type": ["null", "number"] + }] + } + }, + "mAttribute": { + "type": ["null", "object"], + "properties": { + "attr2": { + "type": ["null", "string"] + }, + "attr1": { + "type": ["null", "string"] + } + } + }, + "nAttribute": { + "type": ["null", "integer"] + }, + "nsAttribute": { + "type": ["null", "array"], + "items": { + "type": ["null", "number"] + } + }, + "nulAttribute": { + "type": "null" + }, + "sAttribute": { + "type": ["null", "string"] + }, + "ssAttribute": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } + """, true); + + } + +} diff --git a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml index 7f499fdefc405..73ee3ce32d37a 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-config.yml @@ -1,6 +1,7 @@ # See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) # for more information about how to configure these tests -connector_image: airbyte/source-elasticsearch +connector_image: airbyte/source-elasticsearch:dev tests: spec: - - spec_path: "src/test/resources/expected_spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-elasticsearch/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-elasticsearch/build.gradle b/airbyte-integrations/connectors/source-elasticsearch/build.gradle index dbd9910362973..fff8a6d3fe05a 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/build.gradle +++ b/airbyte-integrations/connectors/source-elasticsearch/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-elasticsearch/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json index fba7486011334..71ad637a0c4f5 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-elasticsearch/src/main/resources/spec.json @@ -5,7 +5,7 @@ "title": "Elasticsearch Connection Configuration", "type": "object", "required": ["endpoint"], - "additionalProperties": false, + "additionalProperties": true, "properties": { "endpoint": { "title": "Server Endpoint", @@ -19,7 +19,7 @@ "oneOf": [ { "title": "None", - "additionalProperties": false, + "additionalProperties": true, "description": "No authentication will be used", "required": ["method"], "properties": { @@ -31,7 +31,7 @@ }, { "title": "Api Key/Secret", - "additionalProperties": false, + "additionalProperties": true, "description": "Use a api key and secret combination to authenticate", "required": ["method", "apiKeyId", "apiKeySecret"], "properties": { @@ -54,7 +54,7 @@ }, { "title": "Username/Password", - "additionalProperties": false, + "additionalProperties": true, "description": "Basic auth header with a username and password", "required": ["method", "username", "password"], "properties": { diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..c945791cea371 --- /dev/null +++ b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/dummy_config.json @@ -0,0 +1,3 @@ +{ + "endpoint": "default" +} diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..a6245dc39f1cf --- /dev/null +++ b/airbyte-integrations/connectors/source-elasticsearch/src/test-integration/resources/expected_spec.json @@ -0,0 +1,85 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/source/elasticsearch", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Elasticsearch Connection Configuration", + "type": "object", + "required": ["endpoint"], + "additionalProperties": true, + "properties": { + "endpoint": { + "title": "Server Endpoint", + "type": "string", + "description": "The full url of the Elasticsearch server" + }, + "authenticationMethod": { + "title": "Authentication Method", + "type": "object", + "description": "The type of authentication to be used", + "oneOf": [ + { + "title": "None", + "additionalProperties": true, + "description": "No authentication will be used", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "none" + } + } + }, + { + "title": "Api Key/Secret", + "additionalProperties": true, + "description": "Use a api key and secret combination to authenticate", + "required": ["method", "apiKeyId", "apiKeySecret"], + "properties": { + "method": { + "type": "string", + "const": "secret" + }, + "apiKeyId": { + "title": "API Key ID", + "description": "The Key ID to used when accessing an enterprise Elasticsearch instance.", + "type": "string" + }, + "apiKeySecret": { + "title": "API Key Secret", + "description": "The secret associated with the API Key ID.", + "type": "string", + "airbyte_secret": true + } + } + }, + { + "title": "Username/Password", + "additionalProperties": true, + "description": "Basic auth header with a username and password", + "required": ["method", "username", "password"], + "properties": { + "method": { + "type": "string", + "const": "basic" + }, + "username": { + "title": "Username", + "description": "Basic auth username to access a secure Elasticsearch server", + "type": "string" + }, + "password": { + "title": "Password", + "description": "Basic auth password to access a secure Elasticsearch server", + "type": "string", + "airbyte_secret": true + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json index fba7486011334..71ad637a0c4f5 100644 --- a/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-elasticsearch/src/test/resources/expected_spec.json @@ -5,7 +5,7 @@ "title": "Elasticsearch Connection Configuration", "type": "object", "required": ["endpoint"], - "additionalProperties": false, + "additionalProperties": true, "properties": { "endpoint": { "title": "Server Endpoint", @@ -19,7 +19,7 @@ "oneOf": [ { "title": "None", - "additionalProperties": false, + "additionalProperties": true, "description": "No authentication will be used", "required": ["method"], "properties": { @@ -31,7 +31,7 @@ }, { "title": "Api Key/Secret", - "additionalProperties": false, + "additionalProperties": true, "description": "Use a api key and secret combination to authenticate", "required": ["method", "apiKeyId", "apiKeySecret"], "properties": { @@ -54,7 +54,7 @@ }, { "title": "Username/Password", - "additionalProperties": false, + "additionalProperties": true, "description": "Basic auth header with a username and password", "required": ["method", "username", "password"], "properties": { diff --git a/airbyte-integrations/connectors/source-emailoctopus/.dockerignore b/airbyte-integrations/connectors/source-emailoctopus/.dockerignore new file mode 100644 index 0000000000000..bcd24bdbb33b9 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_emailoctopus +!setup.py +!secrets \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-emailoctopus/BOOTSTRAP.md b/airbyte-integrations/connectors/source-emailoctopus/BOOTSTRAP.md new file mode 100644 index 0000000000000..235e73bf8af08 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/BOOTSTRAP.md @@ -0,0 +1,8 @@ +# EmailOctopus + +EmailOctopus is an email marketing tool. Link to API [here](https://emailoctopus.com/api-documentation). + +## How to get an API key +- [Sign up for EmailOctopus](https://emailoctopus.com/account/sign-up). I recall there is a verification process that involves speaking with support staff. +- Pricing is volume-based, so a sandbox account should be free: see [Pricing](https://emailoctopus.com/pricing). +- Once signed in, generate an API key from the [API documentation page](https://emailoctopus.com/api-documentation). \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-emailoctopus/Dockerfile b/airbyte-integrations/connectors/source-emailoctopus/Dockerfile new file mode 100644 index 0000000000000..3e727c3ebb2ba --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_emailoctopus ./source_emailoctopus + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-emailoctopus \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-emailoctopus/README.md b/airbyte-integrations/connectors/source-emailoctopus/README.md new file mode 100644 index 0000000000000..0c850c27fa3c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/README.md @@ -0,0 +1,79 @@ +# EmailOctopus Source + +This is the repository for the EmailOctopus configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/emailoctopus). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-emailoctopus:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/emailoctopus) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_emailoctopus/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source emailoctopus test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-emailoctopus:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-emailoctopus:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-emailoctopus:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-emailoctopus:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-emailoctopus:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-emailoctopus:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-emailoctopus:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-emailoctopus:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-emailoctopus/__init__.py b/airbyte-integrations/connectors/source-emailoctopus/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-emailoctopus/acceptance-test-config.yml b/airbyte-integrations/connectors/source-emailoctopus/acceptance-test-config.yml new file mode 100644 index 0000000000000..fa2b66fee5df8 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/acceptance-test-config.yml @@ -0,0 +1,27 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-emailoctopus:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_emailoctopus/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + bypass_reason: "This connector does not implement incremental sync" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-emailoctopus/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-emailoctopus/acceptance-test-docker.sh new file mode 100755 index 0000000000000..fa680528f222c --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-emailoctopus/build.gradle b/airbyte-integrations/connectors/source-emailoctopus/build.gradle new file mode 100644 index 0000000000000..e9b31b83826ac --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_emailoctopus' +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-emailoctopus/integration_tests/__init__.py b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-emailoctopus/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/acceptance.py new file mode 100644 index 0000000000000..e2a8f1a4cb74d --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + pass diff --git a/airbyte-integrations/connectors/source-emailoctopus/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..c874cae945bb2 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/configured_catalog.json @@ -0,0 +1,34 @@ +{ + "streams": [ + { + "cursor_field": null, + "destination_sync_mode": "overwrite", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "campaigns", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": [["id"]], + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh" + }, + { + "cursor_field": null, + "destination_sync_mode": "overwrite", + "primary_key": null, + "stream": { + "default_cursor_field": null, + "json_schema": {}, + "name": "lists", + "namespace": null, + "source_defined_cursor": null, + "source_defined_primary_key": [["id"]], + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh" + } + ] +} diff --git a/airbyte-integrations/connectors/source-emailoctopus/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..dc521ade7acf9 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-emailoctopus/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/sample_config.json new file mode 100644 index 0000000000000..013e22132796e --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "your_api_key" +} diff --git a/airbyte-integrations/connectors/source-emailoctopus/main.py b/airbyte-integrations/connectors/source-emailoctopus/main.py new file mode 100644 index 0000000000000..4a250c9ca57cc --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_emailoctopus import SourceEmailoctopus + +if __name__ == "__main__": + source = SourceEmailoctopus() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-emailoctopus/requirements.txt b/airbyte-integrations/connectors/source-emailoctopus/requirements.txt new file mode 100644 index 0000000000000..78140e52009f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-emailoctopus/setup.py b/airbyte-integrations/connectors/source-emailoctopus/setup.py new file mode 100644 index 0000000000000..8d763ab4793da --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_emailoctopus", + description="Source implementation for Emailoctopus.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/__init__.py b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/__init__.py new file mode 100644 index 0000000000000..a548e3428f31c --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceEmailoctopus + +__all__ = ["SourceEmailoctopus"] diff --git a/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/emailoctopus.yaml b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/emailoctopus.yaml new file mode 100644 index 0000000000000..a613bb76c4c71 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/emailoctopus.yaml @@ -0,0 +1,76 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["data"] + requester: + url_base: "https://emailoctopus.com/api/1.6" + http_method: "GET" + request_options_provider: + request_parameters: + api_key: "{{ config['api_key'] }}" + increment_paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: PageIncrement + page_size: 50 + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + page_token_option: + inject_into: "request_parameter" + field_name: "page" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + # API Docs: https://emailoctopus.com/api-documentation/campaigns/get-all + campaigns_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "campaigns" + primary_key: "id" + path: "/campaigns" + retriever: + $ref: "*ref(definitions.retriever)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + # Fields are large and may affect performance + transformations: + - type: RemoveFields + field_pointers: + - ["content", "html"] + - ["content", "plain_text"] + # API Docs: https://emailoctopus.com/api-documentation/lists/get-all + lists_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "lists" + primary_key: "id" + path: "/lists" + retriever: + $ref: "*ref(definitions.retriever)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + # 'tags' array not yet documented (2022-10-29) + transformations: + - type: RemoveFields + field_pointers: + - ["tags"] + +streams: + - "*ref(definitions.campaigns_stream)" + - "*ref(definitions.lists_stream)" + +check: + stream_names: + - "campaigns" + - "lists" diff --git a/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/schemas/campaigns.json b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/schemas/campaigns.json new file mode 100644 index 0000000000000..f2da8ae7e58e1 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/schemas/campaigns.json @@ -0,0 +1,62 @@ +{ + "type": "object", + "title": "Campaigns", + "description": "Details of all campaigns.", + "properties": { + "id": { + "type": "string", + "title": "Campaign ID", + "description": "The identifier of the campaign." + }, + "status": { + "type": "string", + "title": "Status", + "description": "The status of the campaign (DRAFT/SENDING/SENT/ERROR)." + }, + "name": { + "type": ["string", "null"], + "title": "Campaign Name", + "description": "The name of the campaign." + }, + "subject": { + "type": ["string", "null"], + "title": "Subject", + "description": "The subject of the campaign." + }, + "to": { + "type": "array", + "title": "To", + "description": "The ids of the lists the campaign was sent to.", + "items": { + "type": "string" + } + }, + "from": { + "type": "object", + "title": "From", + "description": "The sender of the campaign.", + "properties": { + "name": { + "type": "string", + "title": "Sender Name", + "description": "The name the campaign was sent from." + }, + "email_address": { + "type": "string", + "title": "Sender Email Address", + "description": "The email address the campaign was sent from." + } + } + }, + "created_at": { + "type": "string", + "title": "Campaign Created At", + "description": "When the campaign was created, in ISO 8601 format." + }, + "sent_at": { + "type": "string", + "title": "Campaign Sent At", + "description": "When the campaign was sent, in ISO 8601 format." + } + } +} diff --git a/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/schemas/lists.json b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/schemas/lists.json new file mode 100644 index 0000000000000..d3ce7ad20cba7 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/schemas/lists.json @@ -0,0 +1,76 @@ +{ + "type": "object", + "title": "Lists", + "description": "Details of all lists.", + "properties": { + "id": { + "type": "string", + "title": "List ID", + "description": "The identifier of the list." + }, + "name": { + "type": "string", + "title": "List Name", + "description": "The name of the list." + }, + "double_opt_in": { + "type": "boolean", + "title": "Double Opt-in", + "description": "If double opt-in has been enabled on the list." + }, + "fields": { + "type": "array", + "title": "Subscriber Fields", + "description": "Stored information on subscribers.", + "properties": { + "tag": { + "type": "string", + "title": "Tag", + "description": "The identifier used to reference the field in your emails." + }, + "type": { + "type": "string", + "title": "Type", + "description": "The type of the field - can be NUMBER, TEXT or DATE." + }, + "label": { + "type": "string", + "title": "Label", + "description": "A human readable label for the field." + }, + "fallback": { + "type": "null", + "title": "Fallback", + "description": "A default value for the field, used when there is no other value available." + } + } + }, + "counts": { + "type": "object", + "title": "Counts", + "description": "The summary counts of the list.", + "properties": { + "pending": { + "type": "integer", + "title": "Pending", + "description": "The number of pending contacts in the list." + }, + "subscribed": { + "type": "integer", + "title": "Subscribed", + "description": "The number of subscribed contacts in the list." + }, + "unsubscribed": { + "type": "integer", + "title": "Unsubscribed", + "description": "The number of unsubscribed contacts in the list." + } + } + }, + "created_at": { + "type": "string", + "title": "List Created At", + "description": "When the list was created, in ISO 8601 format." + } + } +} diff --git a/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/source.py b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/source.py new file mode 100644 index 0000000000000..738ef2970b88b --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceEmailoctopus(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "emailoctopus.yaml"}) diff --git a/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/spec.yaml b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/spec.yaml new file mode 100644 index 0000000000000..75da7a9bbd392 --- /dev/null +++ b/airbyte-integrations/connectors/source-emailoctopus/source_emailoctopus/spec.yaml @@ -0,0 +1,17 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/emailoctopus +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: EmailOctopus Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + type: string + title: EmailOctopus API key + description: >- + EmailOctopus API Key. See the docs + for information on how to generate this key. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-faker/Dockerfile b/airbyte-integrations/connectors/source-faker/Dockerfile index 86f7f29a80a0c..67498507c5a7d 100644 --- a/airbyte-integrations/connectors/source-faker/Dockerfile +++ b/airbyte-integrations/connectors/source-faker/Dockerfile @@ -34,5 +34,5 @@ COPY source_faker ./source_faker ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.0 +LABEL io.airbyte.version=0.2.1 LABEL io.airbyte.name=airbyte/source-faker diff --git a/airbyte-integrations/connectors/source-faker/acceptance-test-config.yml b/airbyte-integrations/connectors/source-faker/acceptance-test-config.yml index 8e505bb363ad1..ca177ab8d1a68 100644 --- a/airbyte-integrations/connectors/source-faker/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-faker/acceptance-test-config.yml @@ -8,7 +8,7 @@ tests: - config_path: "secrets/config.json" status: "succeed" - config_path: "integration_tests/invalid_config.json" - status: "exception" + status: "failed" discovery: - config_path: "secrets/config.json" basic_read: diff --git a/airbyte-integrations/connectors/source-faker/setup.py b/airbyte-integrations/connectors/source-faker/setup.py index ab62499037f54..e6fe864c12655 100644 --- a/airbyte-integrations/connectors/source-faker/setup.py +++ b/airbyte-integrations/connectors/source-faker/setup.py @@ -5,10 +5,10 @@ from setuptools import find_packages, setup -MAIN_REQUIREMENTS = ["airbyte-cdk~=0.1", "mimesis==6.1.1"] +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.2", "mimesis==6.1.1"] TEST_REQUIREMENTS = [ - "pytest~=6.1", + "pytest~=7.0", "source-acceptance-test", ] diff --git a/airbyte-integrations/connectors/source-faker/source_faker/source.py b/airbyte-integrations/connectors/source-faker/source_faker/source.py index 6e664751df249..4a81b6337cb45 100644 --- a/airbyte-integrations/connectors/source-faker/source_faker/source.py +++ b/airbyte-integrations/connectors/source-faker/source_faker/source.py @@ -13,13 +13,17 @@ from airbyte_cdk.models import ( AirbyteCatalog, AirbyteConnectionStatus, + AirbyteEstimateTraceMessage, AirbyteLogMessage, AirbyteMessage, AirbyteRecordMessage, AirbyteStateMessage, AirbyteStream, + AirbyteTraceMessage, ConfiguredAirbyteCatalog, + EstimateType, Status, + TraceType, Type, ) from airbyte_cdk.sources import Source @@ -42,7 +46,10 @@ def check(self, logger: AirbyteLogger, config: Dict[str, any]) -> AirbyteConnect """ # As this is an in-memory source, it always succeeds - return AirbyteConnectionStatus(status=Status.SUCCEEDED) + if type(config["count"]) == int or type(config["count"]) == float: + return AirbyteConnectionStatus(status=Status.SUCCEEDED) + else: + return AirbyteConnectionStatus(status=Status.FAILED) def discover(self, logger: AirbyteLogger, config: Dict[str, any]) -> AirbyteCatalog: """ @@ -136,6 +143,10 @@ def read( records_in_sync = 0 records_in_page = 0 + users_estimate = count - cursor + yield generate_estimate(stream.stream.name, users_estimate, 450) + yield generate_estimate("Purchases", users_estimate * 1.5, 230) # a fuzzy guess, some users have purchases, some don't + for i in range(cursor, count): user = generate_user(person, dt, i) yield generate_record(stream, user) @@ -162,6 +173,7 @@ def read( elif stream.stream.name == "Products": products = generate_products() + yield generate_estimate(stream.stream.name, len(products), 180) for p in products: yield generate_record(stream, p) yield generate_state(state, stream, {"product_count": len(products)}) @@ -204,6 +216,14 @@ def log_stream(stream_name: str): ) +def generate_estimate(stream_name: str, total: int, bytes_per_row: int): + emitted_at = int(datetime.datetime.now().timestamp() * 1000) + estimate = AirbyteEstimateTraceMessage( + type=EstimateType.STREAM, name=stream_name, row_estimate=round(total), byte_estimate=round(total * bytes_per_row) + ) + return AirbyteMessage(type=Type.TRACE, trace=AirbyteTraceMessage(type=TraceType.ESTIMATE, emitted_at=emitted_at, estimate=estimate)) + + def generate_state(state: Dict[str, any], stream: any, data: any): state[ stream.stream.name diff --git a/airbyte-integrations/connectors/source-faker/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-faker/unit_tests/unit_test.py index 0db54325bffaa..68a4351ba2b5c 100644 --- a/airbyte-integrations/connectors/source-faker/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-faker/unit_tests/unit_test.py @@ -44,7 +44,13 @@ def test_read_small_random_data(): logger = None config = {"count": 10} catalog = ConfiguredAirbyteCatalog( - streams=[{"stream": {"name": "Users", "json_schema": {}}, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite"}] + streams=[ + { + "stream": {"name": "Users", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + } + ] ) state = {} iterator = source.read(logger, config, catalog, state) @@ -70,8 +76,16 @@ def test_read_big_random_data(): config = {"count": 1000, "records_per_slice": 100, "records_per_sync": 1000} catalog = ConfiguredAirbyteCatalog( streams=[ - {"stream": {"name": "Users", "json_schema": {}}, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite"}, - {"stream": {"name": "Products", "json_schema": {}}, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite"}, + { + "stream": {"name": "Users", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + }, + { + "stream": {"name": "Products", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + }, ] ) state = {} @@ -98,9 +112,21 @@ def test_with_purchases(): config = {"count": 1000, "records_per_sync": 1000} catalog = ConfiguredAirbyteCatalog( streams=[ - {"stream": {"name": "Users", "json_schema": {}}, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite"}, - {"stream": {"name": "Products", "json_schema": {}}, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite"}, - {"stream": {"name": "Purchases", "json_schema": {}}, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite"}, + { + "stream": {"name": "Users", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + }, + { + "stream": {"name": "Products", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + }, + { + "stream": {"name": "Purchases", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + }, ] ) state = {} @@ -128,7 +154,13 @@ def test_sync_ends_with_limit(): logger = None config = {"count": 100, "records_per_sync": 5} catalog = ConfiguredAirbyteCatalog( - streams=[{"stream": {"name": "Users", "json_schema": {}}, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite"}] + streams=[ + { + "stream": {"name": "Users", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + } + ] ) state = {} iterator = source.read(logger, config, catalog, state) @@ -157,7 +189,13 @@ def test_read_with_seed(): logger = None config = {"count": 1, "seed": 100} catalog = ConfiguredAirbyteCatalog( - streams=[{"stream": {"name": "Users", "json_schema": {}}, "sync_mode": "full_refresh", "destination_sync_mode": "overwrite"}] + streams=[ + { + "stream": {"name": "Users", "json_schema": {}, "supported_sync_modes": ["full_refresh"]}, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite", + } + ] ) state = {} iterator = source.read(logger, config, catalog, state) diff --git a/airbyte-integrations/connectors/source-fastbill/.dockerignore b/airbyte-integrations/connectors/source-fastbill/.dockerignore new file mode 100644 index 0000000000000..501945bc2f715 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_fastbill +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-fastbill/Dockerfile b/airbyte-integrations/connectors/source-fastbill/Dockerfile new file mode 100644 index 0000000000000..796bd1df115e7 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_fastbill ./source_fastbill + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-fastbill diff --git a/airbyte-integrations/connectors/source-fastbill/README.md b/airbyte-integrations/connectors/source-fastbill/README.md new file mode 100644 index 0000000000000..6f41c83e8f717 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/README.md @@ -0,0 +1,132 @@ +# Fastbill Source + +This is the repository for the Fastbill source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/fastbill). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-fastbill:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/fastbill) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_fastbill/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source fastbill test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-fastbill:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-fastbill:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-fastbill:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-fastbill:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-fastbill:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-fastbill:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-fastbill:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-fastbill:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-fastbill/acceptance-test-config.yml b/airbyte-integrations/connectors/source-fastbill/acceptance-test-config.yml new file mode 100644 index 0000000000000..f49f10d2c8c5c --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/acceptance-test-config.yml @@ -0,0 +1,20 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-fastbill:dev +tests: + spec: + - spec_path: "source_fastbill/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["products","recurring_invoices"] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-fastbill/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-fastbill/acceptance-test-docker.sh new file mode 100755 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-fastbill/build.gradle b/airbyte-integrations/connectors/source-fastbill/build.gradle new file mode 100644 index 0000000000000..a23711c3afee8 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_fastbill' +} diff --git a/airbyte-integrations/connectors/source-fastbill/integration_tests/__init__.py b/airbyte-integrations/connectors/source-fastbill/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-fastbill/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-fastbill/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..8890ca08332df --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "customers": { + "CUSTOMER_TYPE": 12 + } +} diff --git a/airbyte-integrations/connectors/source-fastbill/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-fastbill/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-fastbill/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-fastbill/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..3e36914dbc8f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/integration_tests/configured_catalog.json @@ -0,0 +1,49 @@ +{ + "streams": [ + { + "stream": { + "name": "customers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "recurring_invoices", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "invoices", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "products", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "revenues", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-fastbill/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-fastbill/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..27bdf1f2dc26c --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "badkeeeey", + "username": "wrong_username" +} diff --git a/airbyte-integrations/connectors/source-fastbill/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-fastbill/integration_tests/sample_config.json new file mode 100644 index 0000000000000..ae6a028d17145 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "username": "", + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-fastbill/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-fastbill/integration_tests/sample_state.json new file mode 100644 index 0000000000000..7a9ca5814ba87 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "customers": { + "CUSTOMER_TYPE": "strings" + } +} diff --git a/airbyte-integrations/connectors/source-fastbill/main.py b/airbyte-integrations/connectors/source-fastbill/main.py new file mode 100644 index 0000000000000..9b1acb2e2026e --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_fastbill import SourceFastbill + +if __name__ == "__main__": + source = SourceFastbill() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-fastbill/requirements.txt b/airbyte-integrations/connectors/source-fastbill/requirements.txt new file mode 100644 index 0000000000000..78140e52009f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-fastbill/setup.py b/airbyte-integrations/connectors/source-fastbill/setup.py new file mode 100644 index 0000000000000..15c8044979eac --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/setup.py @@ -0,0 +1,25 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.4", +] + +TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock~=3.6.1", "source-acceptance-test", "responses~=0.21.0"] + +setup( + name="source_fastbill", + description="Source implementation for Fastbill.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/__init__.py b/airbyte-integrations/connectors/source-fastbill/source_fastbill/__init__.py new file mode 100644 index 0000000000000..d1caf1ad30261 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceFastbill + +__all__ = ["SourceFastbill"] diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/helpers.py b/airbyte-integrations/connectors/source-fastbill/source_fastbill/helpers.py new file mode 100644 index 0000000000000..b3527c0e254e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/helpers.py @@ -0,0 +1,26 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +def req_body(offset, endpoint: str): + return {"SERVICE": f"{endpoint}.get", "FILTER": {}, "OFFSET": offset} + + +def get_next_page_token(response, response_key: str, API_OFFSET_LIMIT: int, endpoint: str): + response = response.json() + offset = response["REQUEST"]["OFFSET"] if response["REQUEST"]["OFFSET"] >= 0 else None + if offset is None: + response_request = response["REQUEST"]["OFFSET"] + raise Exception(f"No valid offset value found:{response_request}") + + if len(response["RESPONSE"][response_key]) == API_OFFSET_LIMIT: + return req_body(offset + API_OFFSET_LIMIT, endpoint) + return None + + +def get_request_body_json(next_page_token, endpoint): + if next_page_token: + return next_page_token + else: + return {"SERVICE": f"{endpoint}.get", "FILTER": {}, "OFFSET": 0} diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/customers.json b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/customers.json new file mode 100644 index 0000000000000..c3c846c5a517a --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/customers.json @@ -0,0 +1,160 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "CUSTOMER_ID": { + "type": "string" + }, + "CUSTOMER_NUMBER": { + "type": "string" + }, + "DAYS_FOR_PAYMENT": { + "type": "string", + "empty": true + }, + "CREATED": { + "type": "string", + "format": "date-time" + }, + "PAYMENT_TYPE": { + "type": "string", + "empty": true + }, + "BANK_NAME": { + "type": "string", + "empty": true + }, + "BANK_ACCOUNT_NUMBER": { + "type": "string", + "empty": true + }, + "BANK_CODE": { + "type": "string", + "empty": true + }, + "BANK_ACCOUNT_OWNER": { + "type": "string", + "empty": true + }, + "BANK_IBAN": { + "type": "string", + "empty": true + }, + "BANK_BIC": { + "type": "string", + "empty": true + }, + "BANK_ACCOUNT_MANDATE_REFERENCE": { + "type": "string", + "empty": true + }, + "SHOW_PAYMENT_NOTICE": { + "type": "string", + "empty": true + }, + "CUSTOMER_ACCOUNT": { + "type": "string", + "empty": true + }, + "CUSTOMER_TYPE": { + "type": "string", + "empty": true + }, + "TOP": { + "type": "string", + "empty": true + }, + "NEWSLETTER_OPTIN": { + "type": "string", + "empty": true + }, + "ORGANIZATION": { + "type": "string", + "empty": true + }, + "POSITION": { + "type": "string", + "empty": true + }, + "ACADEMIC_DEGREE": { + "type": "string", + "empty": true + }, + "SALUTATION": { + "type": "string", + "empty": true + }, + "FIRST_NAME": { + "type": "string", + "empty": true + }, + "LAST_NAME": { + "type": "string", + "empty": true + }, + "ADDRESS": { + "type": "string", + "empty": true + }, + "ADDRESS_2": { + "type": "string", + "empty": true + }, + "ZIPCODE": { + "type": "string", + "empty": true + }, + "CITY": { + "type": "string", + "empty": true + }, + "COUNTRY_CODE": { + "type": "string" + }, + "SECONDARY_ADDRESS": { + "type": "string", + "empty": true + }, + "PHONE": { + "type": "string", + "empty": true + }, + "PHONE_2": { + "type": "string", + "empty": true + }, + "FAX": { + "type": "string", + "empty": true + }, + "MOBILE": { + "type": "string", + "empty": true + }, + "EMAIL": { + "type": "string", + "empty": true + }, + "WEBSITE": { + "type": "string", + "empty": true + }, + "VAT_ID": { + "type": "string" + }, + "CURRENCY_CODE": { + "type": "string" + }, + "LASTUPDATE": { + "type": "string" + }, + "TAGS": { + "type": "string", + "empty": true + }, + "DOCUMENT_HISTORY_URL": { + "type": "string", + "empty": true + } + } +} diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/invoices.json b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/invoices.json new file mode 100644 index 0000000000000..4ff47f38dc9e9 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/invoices.json @@ -0,0 +1,231 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "INVOICE_ID": { + "type": "string" + }, + "TYPE": { + "type": "string" + }, + "CUSTOMER_ID": { + "type": "string" + }, + "CUSTOMER_NUMBER": { + "type": "string" + }, + "CUSTOMER_COSTCENTER_ID": { + "type": "string" + }, + "CONTACT_ID": { + "type": "string" + }, + "PROJECT_ID": { + "type": "string" + }, + "CURRENCY_CODE": { + "type": "string" + }, + "DELIVERY_DATE": { + "type": "string" + }, + "INVOICE_TITLE": { + "type": "string", + "empty": true + }, + "CASH_DISCOUNT_PERCENT": { + "type": "string" + }, + "CASH_DISCOUNT_DAYS": { + "type": "string" + }, + "SUB_TOTAL": { + "type": "number" + }, + "VAT_TOTAL": { + "type": "number" + }, + "VAT_CASE": { + "type": "string" + }, + "VAT_ITEMS": { + "type": ["null", "array"], + "items": { + "type": "object", + "properties": { + "VAT_PERCENT": { + "type": "integer" + }, + "COMPLETE_NET": { + "type": "number" + }, + "VAT_VALUE": { + "type": "number" + } + } + } + }, + "ITEMS": { + "type": ["null", "array"], + "items": { + "type": "object", + "properties": { + "INVOICE_ITEM_ID": { + "type": "integer" + }, + "ARTICLE_NUMBER": { + "type": "string" + }, + "DESCRIPTION": { + "type": "string", + "empty": true + }, + "QUANTITY": { + "type": "integer" + }, + "UNIT_PRICE": { + "type": "number" + }, + "VAT_PERCENT": { + "type": "integer" + }, + "VAT_VALUE": { + "type": "number" + }, + "COMPLETE_NET": { + "type": "number" + }, + "COMPLETE_GROSS": { + "type": "number" + }, + "CATEGORY": { + "type": ["null", "array"], + "items": {} + }, + "CATEGORY_ID": { + "type": ["null", "array"], + "items": {} + }, + "SORT_ORDER": { + "type": "integer" + } + } + } + }, + "TOTAL": { + "type": "number" + }, + "ORGANIZATION": { + "type": "string", + "empty": true + }, + "NOTE": { + "type": "string", + "empty": true + }, + "SALUTATION": { + "type": "string", + "empty": true + }, + "FIRST_NAME": { + "type": "string", + "empty": true + }, + "LAST_NAME": { + "type": "string", + "empty": true + }, + "ADDRESS": { + "type": "string", + "empty": true + }, + "ADDRESS_2": { + "type": "string", + "empty": true + }, + "ZIPCODE": { + "type": "string", + "empty": true + }, + "CITY": { + "type": "string", + "empty": true + }, + "SERVICE_PERIOD_START": { + "type": "string" + }, + "SERVICE_PERIOD_END": { + "type": "string" + }, + "PAYMENT_TYPE": { + "type": "string", + "empty": true + }, + "BANK_NAME": { + "type": "string", + "empty": true + }, + "BANK_ACCOUNT_NUMBER": { + "type": "string", + "empty": true + }, + "BANK_CODE": { + "type": "string", + "empty": true + }, + "BANK_ACCOUNT_OWNER": { + "type": "string", + "empty": true + }, + "BANK_IBAN": { + "type": "string", + "empty": true + }, + "BANK_BIC": { + "type": "string", + "empty": true + }, + "COUNTRY_CODE": { + "type": "string" + }, + "VAT_ID": { + "type": "string" + }, + "TEMPLATE_ID": { + "type": "string", + "empty": true + }, + "INVOICE_NUMBER": { + "type": "string" + }, + "INTROTEXT": { + "type": "string", + "empty": true + }, + "PAID_DATE": { + "type": "string" + }, + "IS_CANCELED": { + "type": "string" + }, + "INVOICE_DATE": { + "type": "string" + }, + "DUE_DATE": { + "type": "string" + }, + "PAYMENT_INFO": { + "type": "string" + }, + "PAYMENTS": { + "type": ["null", "array"], + "items": {} + }, + "LASTUPDATE": { + "type": "string" + }, + "DOCUMENT_URL": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/products.json b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/products.json new file mode 100644 index 0000000000000..41103e22ab2bc --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/products.json @@ -0,0 +1,38 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "ARTICLE_ID": { + "type": "string" + }, + "ARTICLE_NUMBER": { + "type": "string" + }, + "TITLE": { + "type": "string" + }, + "DESCRIPTION": { + "type": "string", + "empty": true + }, + "UNIT": { + "type": "string" + }, + "UNIT_PRICE": { + "type": "string" + }, + "CURRENCY_CODE": { + "type": "string" + }, + "VAT_PERCENT": { + "type": "string" + }, + "IS_GROSS": { + "type": "number" + }, + "TAGS": { + "type": ["string", "null"], + "empty": true + } + } +} diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/recurring_invoices.json b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/recurring_invoices.json new file mode 100644 index 0000000000000..36bdc11c09e71 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/recurring_invoices.json @@ -0,0 +1,210 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": ["object"], + "properties": { + "INVOICE_ID": { + "type": ["string"] + }, + "TYPE": { + "type": ["null", "string"] + }, + "CUSTOMER_ID": { + "type": ["null", "string"] + }, + "CUSTOMER_NUMBER": { + "type": ["null", "string"] + }, + "CUSTOMER_COSTCENTER_ID": { + "type": ["null", "string"] + }, + "CONTACT_ID": { + "type": ["null", "string"] + }, + "PROJECT_ID": { + "type": ["null", "string"] + }, + "CURRENCY_CODE": { + "type": ["null", "string"] + }, + "DELIVERY_DATE": { + "type": ["null", "string"] + }, + "INVOICE_TITLE": { + "type": ["null", "string"] + }, + "CASH_DISCOUNT_PERCENT": { + "type": ["null", "string"] + }, + "CASH_DISCOUNT_DAYS": { + "type": ["null", "string"] + }, + "SUB_TOTAL": { + "type": ["null", "number"] + }, + "VAT_TOTAL": { + "type": ["null", "number"] + }, + "VAT_CASE": { + "type": ["null", "string"] + }, + "VAT_ITEMS": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "VAT_PERCENT": { + "type": ["null", "number"] + }, + "COMPLETE_NET": { + "type": ["null", "number"] + }, + "VAT_VALUE": { + "type": ["null", "number"] + } + } + } + }, + "ITEMS": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "INVOICE_ITEM_ID": { + "type": ["null", "number"] + }, + "ARTICLE_NUMBER": { + "type": ["null", "string"] + }, + "DESCRIPTION": { + "type": ["null", "string"] + }, + "QUANTITY": { + "type": ["null", "number"] + }, + "UNIT_PRICE": { + "type": ["null", "number"] + }, + "VAT_PERCENT": { + "type": ["null", "number"] + }, + "VAT_VALUE": { + "type": ["null", "number"] + }, + "COMPLETE_NET": { + "type": ["null", "number"] + }, + "COMPLETE_GROSS": { + "type": ["null", "number"] + }, + "CATEGORY": { + "type": ["null", "string", "array"], + "empty": true, + "items": {} + }, + "CATEGORY_ID": { + "type": ["null", "integer", "array"], + "items": {} + }, + "SORT_ORDER": { + "type": ["null", "number"] + } + } + } + }, + "TOTAL": { + "type": ["null", "number"] + }, + "ORGANIZATION": { + "type": ["null", "string"], + "empty": true + }, + "NOTE": { + "type": ["null", "string"], + "empty": true + }, + "SALUTATION": { + "type": ["null", "string"], + "empty": true + }, + "FIRST_NAME": { + "type": ["null", "string"], + "empty": true + }, + "LAST_NAME": { + "type": ["null", "string"], + "empty": true + }, + "ADDRESS": { + "type": ["null", "string"], + "empty": true + }, + "ADDRESS_2": { + "type": ["null", "string"], + "empty": true + }, + "ZIPCODE": { + "type": ["null", "string"], + "empty": true + }, + "CITY": { + "type": ["null", "string"], + "empty": true + }, + "SERVICE_PERIOD_START": { + "type": ["null", "string"] + }, + "SERVICE_PERIOD_END": { + "type": ["null", "string"] + }, + "PAYMENT_TYPE": { + "type": ["null", "string"] + }, + "BANK_NAME": { + "type": ["null", "string"], + "empty": true + }, + "BANK_ACCOUNT_NUMBER": { + "type": ["null", "string"], + "empty": true + }, + "BANK_CODE": { + "type": ["null", "string"], + "empty": true + }, + "BANK_ACCOUNT_OWNER": { + "type": ["null", "string"], + "empty": true + }, + "BANK_IBAN": { + "type": ["null", "string"], + "empty": true + }, + "BANK_BIC": { + "type": ["null", "string"], + "empty": true + }, + "TEMPLATE_ID": { + "type": ["null", "string"], + "empty": true + }, + "OCCURENCES": { + "type": ["null", "string"] + }, + "FREQUENCY": { + "type": ["null", "string"] + }, + "START_DATE": { + "type": ["null", "string"] + }, + "EMAIL_NOTIFY": { + "type": ["null", "string"] + }, + "OUTPUT_TYPE": { + "type": ["null", "string"] + }, + "INTROTEXT": { + "type": ["null", "string"], + "empty": true + } + } +} diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/revenues.json b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/revenues.json new file mode 100644 index 0000000000000..b3c9bbae07f71 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/schemas/revenues.json @@ -0,0 +1,251 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "INVOICE_ID": { + "type": ["string"] + }, + "TYPE": { + "type": ["null", "string"] + }, + "CUSTOMER_ID": { + "type": ["null", "string"] + }, + "CUSTOMER_NUMBER": { + "type": ["null", "string"] + }, + "CUSTOMER_COSTCENTER_ID": { + "type": ["null", "string"] + }, + "CONTACT_ID": { + "type": ["null", "string"] + }, + "PROJECT_ID": { + "type": ["null", "string"] + }, + "CURRENCY_CODE": { + "type": ["null", "string"] + }, + "DELIVERY_DATE": { + "type": ["null", "string"] + }, + "INVOICE_TITLE": { + "type": ["null", "string"], + "empty": true + }, + "CASH_DISCOUNT_PERCENT": { + "type": ["null", "string"] + }, + "CASH_DISCOUNT_DAYS": { + "type": ["null", "string"] + }, + "SUB_TOTAL": { + "type": ["null", "integer"] + }, + "VAT_TOTAL": { + "type": ["null", "number"] + }, + "VAT_CASE": { + "type": ["null", "string"] + }, + "VAT_ITEMS": { + "type": ["array", "null"], + "items": { + "type": "object", + "properties": { + "VAT_PERCENT": { + "type": ["null", "integer"] + }, + "COMPLETE_NET": { + "type": ["null", "integer"] + }, + "VAT_VALUE": { + "type": ["null", "number"] + } + } + } + }, + "ITEMS": { + "type": ["array", "null"], + "items": { + "type": "object", + "properties": { + "INVOICE_ITEM_ID": { + "type": ["null", "integer"] + }, + "ARTICLE_NUMBER": { + "type": ["null", "string"] + }, + "DESCRIPTION": { + "type": ["null", "string"] + }, + "QUANTITY": { + "type": ["null", "integer"] + }, + "UNIT_PRICE": { + "type": ["null", "integer"] + }, + "VAT_PERCENT": { + "type": ["null", "integer"] + }, + "VAT_VALUE": { + "type": ["null", "number"] + }, + "COMPLETE_NET": { + "type": ["null", "integer"] + }, + "COMPLETE_GROSS": { + "type": ["null", "number"] + }, + "CATEGORY": { + "type": ["null", "array", "string"], + "empty": true + }, + "CATEGORY_ID": { + "type": ["null", "array", "integer"] + }, + "SORT_ORDER": { + "type": ["null", "integer"] + } + } + } + }, + "TOTAL": { + "type": ["number", "null"] + }, + "ORGANIZATION": { + "type": ["null", "string"], + "empty": true + }, + "NOTE": { + "type": ["null", "string"], + "empty": true + }, + "SALUTATION": { + "type": ["null", "string"], + "empty": true + }, + "FIRST_NAME": { + "type": ["null", "string"], + "empty": true + }, + "LAST_NAME": { + "type": ["null", "string"], + "empty": true + }, + "ADDRESS": { + "type": ["null", "string"], + "empty": true + }, + "ADDRESS_2": { + "type": ["null", "string"], + "empty": true + }, + "ZIPCODE": { + "type": ["null", "string"], + "empty": true + }, + "CITY": { + "type": ["null", "string"], + "empty": true + }, + "SERVICE_PERIOD_START": { + "type": ["null", "string"], + "empty": true + }, + "SERVICE_PERIOD_END": { + "type": ["null", "string"], + "empty": true + }, + "PAYMENT_TYPE": { + "type": ["null", "string"], + "empty": true + }, + "BANK_NAME": { + "type": ["null", "string"], + "empty": true + }, + "BANK_ACCOUNT_NUMBER": { + "type": ["null", "string"], + "empty": true + }, + "BANK_CODE": { + "type": ["null", "string"], + "empty": true + }, + "BANK_ACCOUNT_OWNER": { + "type": ["null", "string"], + "empty": true + }, + "BANK_IBAN": { + "type": ["null", "string"], + "empty": true + }, + "BANK_BIC": { + "type": ["null", "string"], + "empty": true + }, + "COUNTRY_CODE": { + "type": ["null", "string"] + }, + "VAT_ID": { + "type": ["null", "string"] + }, + "TEMPLATE_ID": { + "type": ["null", "string"], + "empty": true + }, + "INVOICE_NUMBER": { + "type": ["null", "string"] + }, + "INTROTEXT": { + "type": ["null", "string"], + "empty": true + }, + "PAID_DATE": { + "type": ["null", "string"] + }, + "IS_CANCELED": { + "type": ["null", "string"] + }, + "INVOICE_DATE": { + "type": ["null", "string"] + }, + "DUE_DATE": { + "type": ["null", "string"] + }, + "PAYMENT_INFO": { + "type": ["null", "string"] + }, + "PAYMENTS": { + "type": ["null", "array"], + "items": { + "PAYMENT_ID": { + "type": ["string", "null"] + }, + "DATE": { + "type": ["string", "null"] + }, + "AMOUNT": { + "type": ["string", "null"] + }, + "CURRENCY_CODE": { + "type": ["string", "null"] + }, + "NOTE": { + "type": ["string", "null"], + "empty": true + }, + "TYPE": { + "type": ["string", "null"] + } + } + }, + "LASTUPDATE": { + "type": ["null", "string"] + }, + "DOCUMENT_URL": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/source.py b/airbyte-integrations/connectors/source-fastbill/source_fastbill/source.py new file mode 100644 index 0000000000000..d1ca03432506f --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/source.py @@ -0,0 +1,121 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple, Union + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from source_fastbill.helpers import get_next_page_token, get_request_body_json + + +class FastbillStream(HttpStream, ABC): + url_base = " https://my.fastbill.com/api/1.0/api.php" + API_OFFSET_LIMIT = 100 + + def __init__(self, *args, username: str = None, api_key: str = None, **kwargs): + super().__init__(*args, **kwargs) + # self.endpoint = None + self._username = username + self._api_key = api_key + # self.data = None + + @property + def http_method(self) -> str: + return "POST" + + def path( + self, + *, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> str: + return None + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + return None + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + return {"Content-type": "application/json"} + + def request_body_json( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Optional[Union[Mapping, str]]: + return get_request_body_json(next_page_token, endpoint=self.endpoint) + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return get_next_page_token( + response=response, response_key=self.data, API_OFFSET_LIMIT=self.API_OFFSET_LIMIT, endpoint=self.endpoint + ) + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + yield from response.json().get("RESPONSE", {}).get(self.data, []) + + +class Invoices(FastbillStream): + primary_key = "INVOICE_ID" + data = "INVOICES" + endpoint = "invoice" + + +class RecurringInvoices(FastbillStream): + primary_key = "INVOICE_ID" + data = "INVOICES" + endpoint = "recurring" + + +class Products(FastbillStream): + primary_key = "ARTICLE_ID" + data = "ARTICLES" + endpoint = "article" + + +class Revenues(FastbillStream): + primary_key = "INVOICE_ID" + data = "REVENUES" + endpoint = "revenue" + + +class Customers(FastbillStream): + primary_key = "CUSTOMER_ID" + data = "CUSTOMERS" + endpoint = "customer" + + +# Source +class SourceFastbill(AbstractSource): + def get_basic_auth(self, config: Mapping[str, Any]) -> requests.auth.HTTPBasicAuth: + return requests.auth.HTTPBasicAuth(config["username"], config["api_key"]) + + def check_connection(self, logger, config) -> Tuple[bool, any]: + try: + auth = self.get_basic_auth(config) + records = Customers(auth, **config).read_records(sync_mode=SyncMode.full_refresh) + next(records, None) + return True, None + except Exception as error: + return False, f"Unable to connect to Fastbill API with the provided credentials - {repr(error)}" + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + + auth = self.get_basic_auth(config) + return [ + Customers(auth, **config), + Invoices(auth, **config), + RecurringInvoices(auth, **config), + Products(auth, **config), + Revenues(auth, **config), + ] diff --git a/airbyte-integrations/connectors/source-fastbill/source_fastbill/spec.yaml b/airbyte-integrations/connectors/source-fastbill/source_fastbill/spec.yaml new file mode 100644 index 0000000000000..7352591fdcf46 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/source_fastbill/spec.yaml @@ -0,0 +1,18 @@ +documentationUrl: "https://docs.airbyte.com/integrations/sources/fastbill" +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Fastbill Spec + type: object + required: + - username + - api_key + properties: + username: + title: Username + type: string + description: Username for Fastbill account + api_key: + title: API Key + type: string + description: Fastbill API key + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-fastbill/unit_tests/__init__.py b/airbyte-integrations/connectors/source-fastbill/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-fastbill/unit_tests/test_source.py b/airbyte-integrations/connectors/source-fastbill/unit_tests/test_source.py new file mode 100644 index 0000000000000..1c71b6b390551 --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/unit_tests/test_source.py @@ -0,0 +1,39 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import responses +from source_fastbill.source import SourceFastbill + + +@responses.activate +def test_check_connection(mocker): + url = "https://my.fastbill.com/api/1.0/api.php" + source = SourceFastbill() + logger_mock, config_mock = MagicMock(), MagicMock() + responses.add( + responses.POST, + url, + json={ + "REQUEST": { + "OFFSET": 0, + "FILTER": [], + "LIMIT": 0, + }, + "RESPONSE": { + "CUSTOMERS": "", + }, + }, + ) + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker): + source = SourceFastbill() + config_mock = MagicMock() + streams = source.streams(config_mock) + # TODO: replace this with your streams number + expected_streams_number = 5 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-fastbill/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-fastbill/unit_tests/test_streams.py new file mode 100644 index 0000000000000..c992d8f11167e --- /dev/null +++ b/airbyte-integrations/connectors/source-fastbill/unit_tests/test_streams.py @@ -0,0 +1,73 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_fastbill.source import FastbillStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(FastbillStream, "path", "v0/example_endpoint") + mocker.patch.object(FastbillStream, "primary_key", "test_primary_key") + mocker.patch.object(FastbillStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = FastbillStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request parameters + expected_params = None + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = FastbillStream() + stream.endpoint = "test_endpoint" + stream.data = "test_data" + inputs = {"response": MagicMock()} + + inputs["response"].json.return_value = {"REQUEST": {"OFFSET": 0}, "RESPONSE": {"test_data": []}} + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_request_headers(patch_base_class): + stream = FastbillStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {"Content-type": "application/json"} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = FastbillStream() + expected_method = "POST" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = FastbillStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = FastbillStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-fauna/Dockerfile b/airbyte-integrations/connectors/source-fauna/Dockerfile index e200d1ebb1d7f..3762d70736b4b 100644 --- a/airbyte-integrations/connectors/source-fauna/Dockerfile +++ b/airbyte-integrations/connectors/source-fauna/Dockerfile @@ -34,5 +34,5 @@ COPY source_fauna ./source_fauna ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=dev +LABEL io.airbyte.version=0.1.0 LABEL io.airbyte.name=airbyte/source-fauna diff --git a/airbyte-integrations/connectors/source-fauna/unit_tests/database_test.py b/airbyte-integrations/connectors/source-fauna/unit_tests/database_test.py index 0db45365f874e..8ebaf61d675fa 100644 --- a/airbyte-integrations/connectors/source-fauna/unit_tests/database_test.py +++ b/airbyte-integrations/connectors/source-fauna/unit_tests/database_test.py @@ -228,7 +228,7 @@ def run_removes_order_test(source: SourceFauna, logger, stream: ConfiguredAirbyt def run_general_remove_test(source: SourceFauna, logger): stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="deletions_test", json_schema={}), + stream=AirbyteStream(name="deletions_test", json_schema={}, supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh]), sync_mode=SyncMode.incremental, destination_sync_mode=DestinationSyncMode.append_dedup, ) @@ -478,7 +478,7 @@ def run_updates_test(db_data, source: SourceFauna, logger, catalog: ConfiguredAi def run_test(db_data, source: SourceFauna): logger = mock_logger() stream = ConfiguredAirbyteStream( - stream=AirbyteStream(name="foo", json_schema={}), + stream=AirbyteStream(name="foo", json_schema={}, supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh]), sync_mode=SyncMode.incremental, destination_sync_mode=DestinationSyncMode.append_dedup, ) diff --git a/airbyte-integrations/connectors/source-fauna/unit_tests/incremental_test.py b/airbyte-integrations/connectors/source-fauna/unit_tests/incremental_test.py index 16747e438de01..6f976bd5b6676 100644 --- a/airbyte-integrations/connectors/source-fauna/unit_tests/incremental_test.py +++ b/airbyte-integrations/connectors/source-fauna/unit_tests/incremental_test.py @@ -115,6 +115,7 @@ def read_removes_hardcoded( stream=AirbyteStream( name="my_stream_name", json_schema={}, + supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] ), ) ] @@ -226,6 +227,7 @@ def read_removes_hardcoded( stream=AirbyteStream( name="my_stream_name", json_schema={}, + supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] ), ) ] @@ -683,6 +685,7 @@ def query_hardcoded(expr): stream=AirbyteStream( name="my_stream_name", json_schema={}, + supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] ), ), CollectionConfig(page_size=PAGE_SIZE), @@ -719,6 +722,7 @@ def query_hardcoded(expr): stream=AirbyteStream( name="my_stream_name", json_schema={}, + supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] ), ), CollectionConfig(page_size=PAGE_SIZE), @@ -741,6 +745,7 @@ def query_hardcoded(expr): stream=AirbyteStream( name="my_stream_name", json_schema={}, + supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] ), ), CollectionConfig(page_size=PAGE_SIZE), @@ -854,6 +859,7 @@ def query_hardcoded(expr): stream=AirbyteStream( name="my_stream_name", json_schema={}, + supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] ), ), CollectionConfig(page_size=PAGE_SIZE), @@ -885,6 +891,7 @@ def query_hardcoded(expr): stream=AirbyteStream( name="my_stream_name", json_schema={}, + supported_sync_modes=[SyncMode.incremental, SyncMode.full_refresh] ), ), CollectionConfig(page_size=PAGE_SIZE), diff --git a/airbyte-integrations/connectors/source-file-secure/Dockerfile b/airbyte-integrations/connectors/source-file-secure/Dockerfile index 0bfb5bbdbe95c..afac736853b60 100644 --- a/airbyte-integrations/connectors/source-file-secure/Dockerfile +++ b/airbyte-integrations/connectors/source-file-secure/Dockerfile @@ -1,4 +1,4 @@ -FROM airbyte/source-file:0.2.28 +FROM airbyte/source-file:0.2.31 WORKDIR /airbyte/integration_code COPY source_file_secure ./source_file_secure @@ -9,5 +9,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.28 +LABEL io.airbyte.version=0.2.31 LABEL io.airbyte.name=airbyte/source-file-secure diff --git a/airbyte-integrations/connectors/source-file-secure/acceptance-test-config.yml b/airbyte-integrations/connectors/source-file-secure/acceptance-test-config.yml index ad5aeb0e006e1..97ed8432dc5f9 100644 --- a/airbyte-integrations/connectors/source-file-secure/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-file-secure/acceptance-test-config.yml @@ -15,7 +15,7 @@ tests: status: "succeed" # for local should be failed - config_path: "integration_tests/local_config.json" - status: "exception" + status: "failed" discovery: # for https diff --git a/airbyte-integrations/connectors/source-file/Dockerfile b/airbyte-integrations/connectors/source-file/Dockerfile index 6a54c7114f161..d959925310afb 100644 --- a/airbyte-integrations/connectors/source-file/Dockerfile +++ b/airbyte-integrations/connectors/source-file/Dockerfile @@ -17,5 +17,5 @@ COPY source_file ./source_file ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.28 +LABEL io.airbyte.version=0.2.31 LABEL io.airbyte.name=airbyte/source-file diff --git a/airbyte-integrations/connectors/source-file/source_file/source.py b/airbyte-integrations/connectors/source-file/source_file/source.py index a9c51c0e86543..03449f5e75456 100644 --- a/airbyte-integrations/connectors/source-file/source_file/source.py +++ b/airbyte-integrations/connectors/source-file/source_file/source.py @@ -8,6 +8,7 @@ import traceback from datetime import datetime from typing import Any, Iterable, Iterator, Mapping, MutableMapping +from urllib.parse import urlparse from airbyte_cdk import AirbyteLogger from airbyte_cdk.models import ( @@ -83,10 +84,14 @@ def _validate_and_transform(self, config: Mapping[str, Any]): try: config["reader_options"] = json.loads(config["reader_options"]) except ValueError: - raise Exception("reader_options is not valid JSON") + raise ConfigurationError("reader_options is not valid JSON") else: config["reader_options"] = {} config["url"] = dropbox_force_download(config["url"]) + + parse_result = urlparse(config["url"]) + if parse_result.netloc == "docs.google.com" and parse_result.path.lower().startswith("/spreadsheets/"): + raise ConfigurationError(f'Failed to load {config["url"]}: please use the Official Google Sheets Source connector') return config def check(self, logger, config: Mapping) -> AirbyteConnectionStatus: @@ -94,14 +99,14 @@ def check(self, logger, config: Mapping) -> AirbyteConnectionStatus: Check involves verifying that the specified file is reachable with our credentials. """ - config = self._validate_and_transform(config) + try: + config = self._validate_and_transform(config) + except ConfigurationError as e: + logger.error(str(e)) + return AirbyteConnectionStatus(status=Status.FAILED, message=str(e)) + client = self._get_client(config) source_url = client.reader.full_url - logger.info(f"Checking access to {source_url}...") - if "docs.google.com/spreadsheets" in source_url: - reason = f"Failed to load {source_url}: please use the Official Google Sheets Source connector" - logger.error(reason) - return AirbyteConnectionStatus(status=Status.FAILED, message=reason) try: with client.reader.open(): list(client.streams) diff --git a/airbyte-integrations/connectors/source-file/unit_tests/test_source.py b/airbyte-integrations/connectors/source-file/unit_tests/test_source.py index ea71245a48456..56a5a030465b2 100644 --- a/airbyte-integrations/connectors/source-file/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-file/unit_tests/test_source.py @@ -149,5 +149,14 @@ def test_discover(source, config, client): def test_check_wrong_reader_options(source, config): config["reader_options"] = '{encoding":"utf_16"}' - with pytest.raises(Exception): - source.check(logger=logger, config=config) + assert source.check(logger=logger, config=config) == AirbyteConnectionStatus( + status=Status.FAILED, message="reader_options is not valid JSON" + ) + + +def test_check_google_spreadsheets_url(source, config): + config["url"] = "https://docs.google.com/spreadsheets/d/" + assert source.check(logger=logger, config=config) == AirbyteConnectionStatus( + status=Status.FAILED, + message="Failed to load https://docs.google.com/spreadsheets/d/: please use the Official Google Sheets Source connector", + ) diff --git a/airbyte-integrations/connectors/source-freshdesk/Dockerfile b/airbyte-integrations/connectors/source-freshdesk/Dockerfile index 8f5537ca25ad4..167dbbdd26681 100644 --- a/airbyte-integrations/connectors/source-freshdesk/Dockerfile +++ b/airbyte-integrations/connectors/source-freshdesk/Dockerfile @@ -34,5 +34,5 @@ COPY source_freshdesk ./source_freshdesk ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.3.6 +LABEL io.airbyte.version=0.3.8 LABEL io.airbyte.name=airbyte/source-freshdesk diff --git a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/streams.py b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/streams.py index ad58f56ce61bb..83877e24ab654 100644 --- a/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/streams.py +++ b/airbyte-integrations/connectors/source-freshdesk/source_freshdesk/streams.py @@ -44,20 +44,17 @@ def __init__(self, authenticator: AuthBase, config: Mapping[str, Any], *args, ** @property def url_base(self) -> str: - return parse.urljoin(f"https://{self.domain.rstrip('/')}", "/api/v2") + return parse.urljoin(f"https://{self.domain.rstrip('/')}", "/api/v2/") def backoff_time(self, response: requests.Response) -> Optional[float]: if response.status_code == requests.codes.too_many_requests: return float(response.headers.get("Retry-After", 0)) def should_retry(self, response: requests.Response) -> bool: - if isinstance(response.json(), dict): - if response.status_code == requests.codes.FORBIDDEN and response.json().get("code") == "require_feature": - self.forbidden_stream = True - setattr(self, "raise_on_http_errors", False) - self.logger.warn(f"Stream `{self.name}` is not available. {response.json().get('message')}") - else: - return super().should_retry(response) + if response.status_code == requests.codes.FORBIDDEN: + self.forbidden_stream = True + setattr(self, "raise_on_http_errors", False) + self.logger.warn(f"Stream `{self.name}` is not available. {response.text}") return super().should_retry(response) def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: @@ -98,8 +95,9 @@ def read_records( ) def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - data = response.json() - return {} if self.forbidden_stream else data if data else [] + if self.forbidden_stream: + return [] + return response.json() or [] class IncrementalFreshdeskStream(FreshdeskStream, IncrementalMixin): diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_300_page.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_300_page.py index 2adde708e81d9..7e0a98057fe07 100644 --- a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_300_page.py +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_300_page.py @@ -11,70 +11,70 @@ def responses_fixtures(): return [ { - "url": "/api/tickets?per_page=1&updated_since=2002-02-10T22%3A21%3A44Z", + "url": "/api/v2/tickets?per_page=1&updated_since=2002-02-10T22%3A21%3A44Z", "json": [{"id": 1, "updated_at": "2018-01-02T00:00:00Z"}], "headers": { - "Link": '; rel="next"' + "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&page=2&updated_since=2002-02-10T22%3A21%3A44Z", + "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2002-02-10T22%3A21%3A44Z", "json": [{"id": 2, "updated_at": "2018-02-02T00:00:00Z"}], "headers": { - "Link": '; rel="next"' + "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&updated_since=2018-02-02T00%3A00%3A00Z", + "url": "/api/v2/tickets?per_page=1&updated_since=2018-02-02T00%3A00%3A00Z", "json": [{"id": 2, "updated_at": "2018-02-02T00:00:00Z"}], "headers": { - "Link": '; rel="next"' + "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&page=2&updated_since=2018-02-02T00%3A00%3A00Z", + "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2018-02-02T00%3A00%3A00Z", "json": [{"id": 3, "updated_at": "2018-03-02T00:00:00Z"}], "headers": { - "Link": '; rel="next"' + "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&updated_since=2018-03-02T00%3A00%3A00Z", + "url": "/api/v2/tickets?per_page=1&updated_since=2018-03-02T00%3A00%3A00Z", "json": [{"id": 3, "updated_at": "2018-03-02T00:00:00Z"}], "headers": { - "Link": '; rel="next"' + "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&page=2&updated_since=2018-03-02T00%3A00%3A00Z", + "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2018-03-02T00%3A00%3A00Z", "json": [{"id": 4, "updated_at": "2019-01-03T00:00:00Z"}], "headers": { - "Link": '; rel="next"' + "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&updated_since=2019-01-03T00%3A00%3A00Z", + "url": "/api/v2/tickets?per_page=1&updated_since=2019-01-03T00%3A00%3A00Z", "json": [{"id": 4, "updated_at": "2019-01-03T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&page=2&updated_since=2019-01-03T00%3A00%3A00Z", + "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2019-01-03T00%3A00%3A00Z", "json": [{"id": 5, "updated_at": "2019-02-03T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&updated_since=2019-02-03T00%3A00%3A00Z", + "url": "/api/v2/tickets?per_page=1&updated_since=2019-02-03T00%3A00%3A00Z", "json": [{"id": 5, "updated_at": "2019-02-03T00:00:00Z"}], "headers": { "Link": '; rel="next"' }, }, { - "url": "/api/tickets?per_page=1&page=2&updated_since=2019-02-03T00%3A00%3A00Z", + "url": "/api/v2/tickets?per_page=1&page=2&updated_since=2019-02-03T00%3A00%3A00Z", "json": [{"id": 6, "updated_at": "2019-03-03T00:00:00Z"}], }, ] diff --git a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_streams.py index 00315a9bc5fa2..38817b39c6138 100644 --- a/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-freshdesk/unit_tests/test_streams.py @@ -4,7 +4,7 @@ import random from typing import Any, MutableMapping -from unittest.mock import PropertyMock, patch +from unittest.mock import PropertyMock import pytest from airbyte_cdk.models import SyncMode @@ -41,6 +41,11 @@ ) +@pytest.fixture(autouse=True) +def mock_tickets_use_cache(mocker): + mocker.patch("source_freshdesk.streams.Tickets.use_cache", new_callable=PropertyMock, return_value=False) + + def _read_full_refresh(stream_instance: Stream): records = [] slices = stream_instance.stream_slices(sync_mode=SyncMode.full_refresh) @@ -84,7 +89,7 @@ def _read_incremental(stream_instance: Stream, stream_state: MutableMapping[str, ], ) def test_full_refresh(stream, resource, authenticator, config, requests_mock): - requests_mock.register_uri("GET", f"/api/{resource}", json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(25)]) + requests_mock.register_uri("GET", f"/api/v2/{resource}", json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(25)]) stream = stream(authenticator=authenticator, config=config) records = _read_full_refresh(stream) @@ -93,9 +98,9 @@ def test_full_refresh(stream, resource, authenticator, config, requests_mock): def test_full_refresh_conversations(authenticator, config, requests_mock): - requests_mock.register_uri("GET", "/api/tickets", json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(5)]) + requests_mock.register_uri("GET", "/api/v2/tickets", json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(5)]) for i in range(5): - requests_mock.register_uri("GET", f"/api/tickets/{i}/conversations", json=[{"id": x} for x in range(10)]) + requests_mock.register_uri("GET", f"/api/v2/tickets/{i}/conversations", json=[{"id": x} for x in range(10)]) stream = Conversations(authenticator=authenticator, config=config) records = _read_full_refresh(stream) @@ -105,7 +110,7 @@ def test_full_refresh_conversations(authenticator, config, requests_mock): def test_full_refresh_settings(authenticator, config, requests_mock): json_resp = {"primary_language": "en", "supported_languages": [], "portal_languages": []} - requests_mock.register_uri("GET", "/api/settings/helpdesk", json=json_resp) + requests_mock.register_uri("GET", "/api/v2/settings/helpdesk", json=json_resp) stream = Settings(authenticator=authenticator, config=config) records = _read_full_refresh(stream) @@ -126,19 +131,19 @@ def test_incremental(stream, resource, authenticator, config, requests_mock): highest_updated_at = "2022-04-25T22:00:00Z" other_updated_at = "2022-04-01T00:00:00Z" highest_index = random.randint(0, 24) - with patch(f"source_freshdesk.streams.{stream.__name__}.use_cache", new_callable=PropertyMock, return_value=False): - requests_mock.register_uri( - "GET", - f"/api/{resource}", - json=[{"id": x, "updated_at": highest_updated_at if x == highest_index else other_updated_at} for x in range(25)], - ) - stream = stream(authenticator=authenticator, config=config) - records, state = _read_incremental(stream, {}) + requests_mock.register_uri( + "GET", + f"/api/v2/{resource}", + json=[{"id": x, "updated_at": highest_updated_at if x == highest_index else other_updated_at} for x in range(25)], + ) - assert len(records) == 25 - assert "updated_at" in state - assert state["updated_at"] == highest_updated_at + stream = stream(authenticator=authenticator, config=config) + records, state = _read_incremental(stream, {}) + + assert len(records) == 25 + assert "updated_at" in state + assert state["updated_at"] == highest_updated_at @pytest.mark.parametrize( @@ -151,9 +156,9 @@ def test_incremental(stream, resource, authenticator, config, requests_mock): ], ) def test_substream_full_refresh(requests_mock, stream_class, parent_path, sub_paths, authenticator, config): - requests_mock.register_uri("GET", "/api/" + parent_path, json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(5)]) + requests_mock.register_uri("GET", "/api/v2/" + parent_path, json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(5)]) for sub_path in sub_paths: - requests_mock.register_uri("GET", "/api/" + sub_path, json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(10)]) + requests_mock.register_uri("GET", "/api/v2/" + sub_path, json=[{"id": x, "updated_at": "2022-05-05T00:00:00Z"} for x in range(10)]) stream = stream_class(authenticator=authenticator, config=config) records = _read_full_refresh(stream) @@ -179,11 +184,11 @@ def test_substream_full_refresh(requests_mock, stream_class, parent_path, sub_pa ], ) def test_full_refresh_with_two_sub_levels(requests_mock, stream_class, parent_path, sub_paths, sub_sub_paths, authenticator, config): - requests_mock.register_uri("GET", f"/api/{parent_path}", json=[{"id": x} for x in range(5)]) + requests_mock.register_uri("GET", f"/api/v2/{parent_path}", json=[{"id": x} for x in range(5)]) for sub_path in sub_paths: - requests_mock.register_uri("GET", f"/api/{sub_path}", json=[{"id": x} for x in range(5)]) + requests_mock.register_uri("GET", f"/api/v2/{sub_path}", json=[{"id": x} for x in range(5)]) for sub_sub_path in sub_sub_paths: - requests_mock.register_uri("GET", f"/api/{sub_sub_path}", json=[{"id": x} for x in range(10)]) + requests_mock.register_uri("GET", f"/api/v2/{sub_sub_path}", json=[{"id": x} for x in range(10)]) stream = stream_class(authenticator=authenticator, config=config) records = _read_full_refresh(stream) @@ -192,15 +197,25 @@ def test_full_refresh_with_two_sub_levels(requests_mock, stream_class, parent_pa def test_full_refresh_discussion_comments(requests_mock, authenticator, config): - requests_mock.register_uri("GET", "/api/discussions/categories", json=[{"id": x} for x in range(2)]) + requests_mock.register_uri("GET", "/api/v2/discussions/categories", json=[{"id": x} for x in range(2)]) for i in range(2): - requests_mock.register_uri("GET", f"/api/discussions/categories/{i}/forums", json=[{"id": x} for x in range(3)]) + requests_mock.register_uri("GET", f"/api/v2/discussions/categories/{i}/forums", json=[{"id": x} for x in range(3)]) for j in range(3): - requests_mock.register_uri("GET", f"/api/discussions/forums/{j}/topics", json=[{"id": x} for x in range(4)]) + requests_mock.register_uri("GET", f"/api/v2/discussions/forums/{j}/topics", json=[{"id": x} for x in range(4)]) for k in range(4): - requests_mock.register_uri("GET", f"/api/discussions/topics/{k}/comments", json=[{"id": x} for x in range(5)]) + requests_mock.register_uri("GET", f"/api/v2/discussions/topics/{k}/comments", json=[{"id": x} for x in range(5)]) stream = DiscussionComments(authenticator=authenticator, config=config) records = _read_full_refresh(stream) assert len(records) == 120 + + +def test_403_skipped(requests_mock, authenticator, config): + # this case should neither raise an error nor retry + requests_mock.register_uri("GET", "/api/v2/tickets", json=[{"id": 1705, "updated_at": "2022-05-05T00:00:00Z"}]) + requests_mock.register_uri("GET", "/api/v2/tickets/1705/conversations", status_code=403) + stream = Conversations(authenticator=authenticator, config=config) + records = _read_full_refresh(stream) + assert records == [] + assert len(requests_mock.request_history) == 2 diff --git a/airbyte-integrations/connectors/source-getlago/.dockerignore b/airbyte-integrations/connectors/source-getlago/.dockerignore new file mode 100644 index 0000000000000..d6159079f8ab4 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_getlago +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-getlago/Dockerfile b/airbyte-integrations/connectors/source-getlago/Dockerfile new file mode 100644 index 0000000000000..8c1e400964bb1 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_getlago ./source_getlago + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-getlago diff --git a/airbyte-integrations/connectors/source-getlago/README.md b/airbyte-integrations/connectors/source-getlago/README.md new file mode 100644 index 0000000000000..43fe3c9ad32cc --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/README.md @@ -0,0 +1,79 @@ +# Getlago Source + +This is the repository for the Getlago configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/getlago). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-getlago:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/getlago) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_getlago/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source getlago test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-getlago:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-getlago:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-getlago:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-getlago:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-getlago:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-getlago:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-getlago:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-getlago:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-getlago/__init__.py b/airbyte-integrations/connectors/source-getlago/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-getlago/acceptance-test-config.yml b/airbyte-integrations/connectors/source-getlago/acceptance-test-config.yml new file mode 100644 index 0000000000000..394d407c7ea63 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-getlago:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_getlago/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-getlago/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-getlago/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-getlago/build.gradle b/airbyte-integrations/connectors/source-getlago/build.gradle new file mode 100644 index 0000000000000..f47ea3c56cd47 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_getlago' +} diff --git a/airbyte-integrations/connectors/source-getlago/integration_tests/__init__.py b/airbyte-integrations/connectors/source-getlago/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-getlago/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-getlago/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-getlago/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-getlago/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-getlago/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-getlago/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..1e8fc6d57a067 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/integration_tests/configured_catalog.json @@ -0,0 +1,67 @@ +{ + "streams": [ + { + "stream": { + "name": "billable_metrics", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "plans", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "coupons", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "add_ons", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "invoices", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "customers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "subscriptions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-getlago/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-getlago/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..d0ce30f189af7 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-getlago/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-getlago/integration_tests/sample_config.json new file mode 100644 index 0000000000000..f4bfdfef4a7c9 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "099423ad-b160-4b51-b534-9df95ffb3007" +} diff --git a/airbyte-integrations/connectors/source-getlago/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-getlago/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-getlago/main.py b/airbyte-integrations/connectors/source-getlago/main.py new file mode 100644 index 0000000000000..8804958ae5338 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_getlago import SourceGetlago + +if __name__ == "__main__": + source = SourceGetlago() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-getlago/requirements.txt b/airbyte-integrations/connectors/source-getlago/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-getlago/setup.py b/airbyte-integrations/connectors/source-getlago/setup.py new file mode 100644 index 0000000000000..c1252a7b39110 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_getlago", + description="Source implementation for Getlago.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/__init__.py b/airbyte-integrations/connectors/source-getlago/source_getlago/__init__.py new file mode 100644 index 0000000000000..bc8914838d674 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceGetlago + +__all__ = ["SourceGetlago"] diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/getlago.yaml b/airbyte-integrations/connectors/source-getlago/source_getlago/getlago.yaml new file mode 100644 index 0000000000000..fac63bbf8bc69 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/getlago.yaml @@ -0,0 +1,105 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["{{ options['name'] }}"] + requester: + url_base: "https://api.getlago.com/api/v1" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + cursor_paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + page_token_option: + inject_into: request_parameter + field_name: "page" + page_size_option: + inject_into: request_parameter + field_name: "per_page" + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response.meta.next_page }}" + stop_condition: "{{ response.meta.next_page is none}}" + page_size: 100 + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.cursor_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + billable_metrics_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "billable_metrics" + primary_key: "lago_id" + path: "/billable_metrics" + plans_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "plans" + primary_key: "lago_id" + path: "/plans" + coupons_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "coupons" + primary_key: "lago_id" + path: "/coupons" + add_ons_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "add_ons" + primary_key: "lago_id" + path: "/add_ons" + invoices_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "invoices" + primary_key: "lago_id" + path: "/invoices" + customers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "customers" + primary_key: "lago_id" + path: "/customers" + customer_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.customers_stream)" + parent_key: external_id + stream_slice_field: customer_external_id + subscriptions_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "subscriptions" + primary_key: "lago_id" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/subscriptions?external_customer_id={{ stream_slice.customer_external_id }}" + stream_slicer: + $ref: "*ref(definitions.customer_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + +streams: + - "*ref(definitions.billable_metrics_stream)" + - "*ref(definitions.plans_stream)" + - "*ref(definitions.coupons_stream)" + - "*ref(definitions.add_ons_stream)" + - "*ref(definitions.invoices_stream)" + - "*ref(definitions.customers_stream)" + - "*ref(definitions.subscriptions_stream)" + +check: + stream_names: + - "billable_metrics" diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/add_ons.json b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/add_ons.json new file mode 100644 index 0000000000000..81185dc445c6b --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/add_ons.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "lago_id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "amount_cents": { + "type": ["null", "integer"] + }, + "amount_currency": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/billable_metrics.json b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/billable_metrics.json new file mode 100644 index 0000000000000..3c65d5b9f05a6 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/billable_metrics.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "lago_id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "aggregation_type": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "field_name": { + "type": ["null", "string"] + }, + "group": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/coupons.json b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/coupons.json new file mode 100644 index 0000000000000..2fcae9c85b38c --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/coupons.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "lago_id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"] + }, + "coupon_type": { + "type": ["null", "string"] + }, + "amount_cents": { + "type": ["null", "integer"] + }, + "amount_currency": { + "type": ["null", "string"] + }, + "percentage_rate": { + "type": ["null", "string"] + }, + "frequency": { + "type": ["null", "string"] + }, + "frequency_duration": { + "type": ["null", "integer"] + }, + "created_at": { + "type": ["null", "string"] + }, + "expiration": { + "type": ["null", "string"] + }, + "expiration_date": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/customers.json b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/customers.json new file mode 100644 index 0000000000000..415d75f01bdd7 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/customers.json @@ -0,0 +1,63 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "lago_id": { + "type": ["null", "string"] + }, + "external_id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "sequential_id": { + "type": ["null", "integer"] + }, + "slug": { + "type": ["null", "string"] + }, + "vat_rate": { + "type": ["null", "number"] + }, + "created_at": { + "type": ["null", "string"] + }, + "address_line1": { + "type": ["null", "string"] + }, + "address_line2": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "phone": { + "type": ["null", "string"] + }, + "logo_url": { + "type": ["null", "string"] + }, + "legal_name": { + "type": ["null", "string"] + }, + "legal_number": { + "type": ["null", "string"] + }, + "currency": { + "type": ["null", "string"] + }, + "billing_configuration": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/invoices.json b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/invoices.json new file mode 100644 index 0000000000000..198fb2b0d8a6d --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/invoices.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "lago_id": { + "type": ["null", "string"] + }, + "sequential_id": { + "type": ["null", "integer"] + }, + "number": { + "type": ["null", "string"] + }, + "issuing_date": { + "type": ["null", "string"] + }, + "invoice_type": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "amount_cents": { + "type": ["null", "integer"] + }, + "amount_currency": { + "type": ["null", "string"] + }, + "vat_amount_cents": { + "type": ["null", "integer"] + }, + "vat_amount_currency": { + "type": ["null", "string"] + }, + "total_amount_cents": { + "type": ["null", "integer"] + }, + "total_amount_currency": { + "type": ["null", "string"] + }, + "file_url": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/plans.json b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/plans.json new file mode 100644 index 0000000000000..57385fe75d955 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/plans.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "lago_id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "code": { + "type": ["null", "string"] + }, + "interval": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "amount_cents": { + "type": ["null", "integer"] + }, + "amount_currency": { + "type": ["null", "string"] + }, + "trial_period": { + "type": ["null", "number"] + }, + "pay_in_advance": { + "type": ["null", "boolean"] + }, + "bill_charges_monthly": { + "type": ["null", "boolean"] + }, + "charges": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/subscriptions.json b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/subscriptions.json new file mode 100644 index 0000000000000..82c35e69bac7c --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/schemas/subscriptions.json @@ -0,0 +1,54 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "lago_id": { + "type": ["null", "string"] + }, + "external_id": { + "type": ["null", "string"] + }, + "lago_customer_id": { + "type": ["null", "string"] + }, + "external_customer_id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "plan_code": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "billing_time": { + "type": ["null", "string"] + }, + "subscription_date": { + "type": ["null", "string"] + }, + "started_at": { + "type": ["null", "string"] + }, + "terminated_at": { + "type": ["null", "string"] + }, + "canceled_at": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "previous_plan_code": { + "type": ["null", "string"] + }, + "next_plan_code": { + "type": ["null", "string"] + }, + "downgrade_plan_date": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/source.py b/airbyte-integrations/connectors/source-getlago/source_getlago/source.py new file mode 100644 index 0000000000000..88153da5e8331 --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceGetlago(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "getlago.yaml"}) diff --git a/airbyte-integrations/connectors/source-getlago/source_getlago/spec.yaml b/airbyte-integrations/connectors/source-getlago/source_getlago/spec.yaml new file mode 100644 index 0000000000000..2f9d5311e536f --- /dev/null +++ b/airbyte-integrations/connectors/source-getlago/source_getlago/spec.yaml @@ -0,0 +1,16 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/getlago +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Getlago Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + title: API Key + type: string + description: >- + Your API Key. See here. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-github/Dockerfile b/airbyte-integrations/connectors/source-github/Dockerfile index 3a625159c8485..02815898c9aa6 100644 --- a/airbyte-integrations/connectors/source-github/Dockerfile +++ b/airbyte-integrations/connectors/source-github/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.3.7 +LABEL io.airbyte.version=0.3.8 LABEL io.airbyte.name=airbyte/source-github diff --git a/airbyte-integrations/connectors/source-github/acceptance-test-config.yml b/airbyte-integrations/connectors/source-github/acceptance-test-config.yml index b030d57fd8a2a..f629b322298be 100644 --- a/airbyte-integrations/connectors/source-github/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-github/acceptance-test-config.yml @@ -21,12 +21,14 @@ tests: future_state_path: "integration_tests/abnormal_state.json" cursor_paths: comments: ["airbytehq/integration-test", "updated_at"] - commit_comment_reactions: ["airbytehq/integration-test", "55538825", "created_at"] + commit_comment_reactions: + ["airbytehq/integration-test", "55538825", "created_at"] commit_comments: ["airbytehq/integration-test", "updated_at"] commits: ["airbytehq/integration-test", "master", "created_at"] deployments: ["airbytehq/integration-test", "updated_at"] events: ["airbytehq/integration-test", "created_at"] - issue_comment_reactions: ["airbytehq/integration-test", "907296275", "created_at"] + issue_comment_reactions: + ["airbytehq/integration-test", "907296275", "created_at"] issue_events: ["airbytehq/integration-test", "created_at"] issue_milestones: ["airbytehq/integration-test", "updated_at"] issue_reactions: ["airbytehq/integration-test", "11", "created_at"] @@ -36,7 +38,8 @@ tests: project_columns: ["airbytehq/integration-test", "13167124", "updated_at"] projects: ["airbytehq/integration-test", "updated_at"] - pull_request_comment_reactions: ["airbytehq/integration-test", "699253726", "created_at"] + pull_request_comment_reactions: + ["airbytehq/integration-test", "699253726", "created_at"] pull_request_stats: ["airbytehq/integration-test", "updated_at"] pull_requests: ["airbytehq/integration-test", "updated_at"] releases: ["airbytehq/integration-test", "created_at"] diff --git a/airbyte-integrations/connectors/source-github/source_github/schemas/events.json b/airbyte-integrations/connectors/source-github/source_github/schemas/events.json index 79ac9cfc0cc38..53be89ef0f16d 100644 --- a/airbyte-integrations/connectors/source-github/source_github/schemas/events.json +++ b/airbyte-integrations/connectors/source-github/source_github/schemas/events.json @@ -53,7 +53,8 @@ } }, "created_at": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "id": { "type": ["null", "string"] diff --git a/airbyte-integrations/connectors/source-github/source_github/schemas/workflow_runs.json b/airbyte-integrations/connectors/source-github/source_github/schemas/workflow_runs.json index 15b3afa71c608..8d18571678691 100644 --- a/airbyte-integrations/connectors/source-github/source_github/schemas/workflow_runs.json +++ b/airbyte-integrations/connectors/source-github/source_github/schemas/workflow_runs.json @@ -53,10 +53,12 @@ } }, "created_at": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "updated_at": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "run_attempt": { "type": ["null", "integer"] diff --git a/airbyte-integrations/connectors/source-gnews/.dockerignore b/airbyte-integrations/connectors/source-gnews/.dockerignore new file mode 100644 index 0000000000000..4a6d18e490f39 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_gnews +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-gnews/Dockerfile b/airbyte-integrations/connectors/source-gnews/Dockerfile new file mode 100644 index 0000000000000..6d580660531dd --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_gnews ./source_gnews + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-gnews diff --git a/airbyte-integrations/connectors/source-gnews/README.md b/airbyte-integrations/connectors/source-gnews/README.md new file mode 100644 index 0000000000000..5eadf155eda17 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/README.md @@ -0,0 +1,79 @@ +# Gnews Source + +This is the repository for the Gnews configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/gnews). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-gnews:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/gnews) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_gnews/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source gnews test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-gnews:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-gnews:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-gnews:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-gnews:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-gnews:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-gnews:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-gnews:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-gnews:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-gnews/__init__.py b/airbyte-integrations/connectors/source-gnews/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-gnews/acceptance-test-config.yml b/airbyte-integrations/connectors/source-gnews/acceptance-test-config.yml new file mode 100644 index 0000000000000..bbceb32cd0d5a --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/acceptance-test-config.yml @@ -0,0 +1,30 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-gnews:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_gnews/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-gnews/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-gnews/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-gnews/build.gradle b/airbyte-integrations/connectors/source-gnews/build.gradle new file mode 100644 index 0000000000000..e431eb5fbcffd --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_gnews' +} diff --git a/airbyte-integrations/connectors/source-gnews/integration_tests/__init__.py b/airbyte-integrations/connectors/source-gnews/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-gnews/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-gnews/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..12140f99bf5f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/integration_tests/abnormal_state.json @@ -0,0 +1,8 @@ +{ + "search": { + "publishedAt": "2999-12-31T23:59:59Z" + }, + "top_headlines": { + "publishedAt": "2999-12-31T23:59:59Z" + } +} diff --git a/airbyte-integrations/connectors/source-gnews/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-gnews/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-gnews/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-gnews/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..916b58f547158 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/integration_tests/configured_catalog.json @@ -0,0 +1,22 @@ +{ + "streams": [ + { + "stream": { + "name": "search", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "top_headlines", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-gnews/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-gnews/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..848e5c928b8f1 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "api-key": "invalid_key", + "language": "eu", + "top_headlines_topic": "blogpost" +} diff --git a/airbyte-integrations/connectors/source-gnews/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-gnews/integration_tests/sample_config.json new file mode 100644 index 0000000000000..670c38c6d8237 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "api_key": "fdf8252f8df6fffbbfc07cb73c558d16", + "query": "Apple OR Microsoft", + "top_headlines_topic": "technology" +} diff --git a/airbyte-integrations/connectors/source-gnews/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-gnews/integration_tests/sample_state.json new file mode 100644 index 0000000000000..65e4a2c224fd1 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/integration_tests/sample_state.json @@ -0,0 +1,8 @@ +{ + "search": { + "publishedAt": "2021-12-31T23:59:59Z" + }, + "top_headlines": { + "publishedAt": "2021-12-31T23:59:59Z" + } +} diff --git a/airbyte-integrations/connectors/source-gnews/main.py b/airbyte-integrations/connectors/source-gnews/main.py new file mode 100644 index 0000000000000..c6f4ba80c9d33 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_gnews import SourceGnews + +if __name__ == "__main__": + source = SourceGnews() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-gnews/requirements.txt b/airbyte-integrations/connectors/source-gnews/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-gnews/setup.py b/airbyte-integrations/connectors/source-gnews/setup.py new file mode 100644 index 0000000000000..440ea3648709c --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_gnews", + description="Source implementation for Gnews.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-gnews/source_gnews/__init__.py b/airbyte-integrations/connectors/source-gnews/source_gnews/__init__.py new file mode 100644 index 0000000000000..69e77e0a5e298 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/source_gnews/__init__.py @@ -0,0 +1,9 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceGnews +from .wait_until_midnight_backoff_strategy import WaitUntilMidnightBackoffStrategy + +__all__ = ["SourceGnews", "WaitUntilMidnightBackoffStrategy"] diff --git a/airbyte-integrations/connectors/source-gnews/source_gnews/gnews.yaml b/airbyte-integrations/connectors/source-gnews/source_gnews/gnews.yaml new file mode 100644 index 0000000000000..bb7e4762d532e --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/source_gnews/gnews.yaml @@ -0,0 +1,86 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["articles"] + base_requester: + url_base: "https://gnews.io/api/v4" + http_method: "GET" + error_handler: + type: "CompositeErrorHandler" + error_handlers: + - response_filters: + - http_codes: [429] + action: RETRY + backoff_strategies: + - type: "ConstantBackoffStrategy" + backoff_time_in_seconds: 1.0 + - response_filters: + - http_codes: [403] + action: RETRY + backoff_strategies: + - class_name: "source_gnews.WaitUntilMidnightBackoffStrategy" + base_retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config['start_date'] or ' '.join(day_delta(-7).split('.')[0].split('T')) }}" + datetime_format: "%Y-%m-%d %H:%M:%S" + end_datetime: + datetime: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d %H:%M:%S') }}" + datetime_format: "%Y-%m-%d %H:%M:%S" + step: 1w + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_field: "{{ options['stream_cursor_field'] }}" + common_parameters: + token: "{{ config['api_key'] }}" + lang: "{{ config['language'] }}" + country: "{{ config['country'] }}" + nullable: "{{ ','.join(config['nullable']) }}" + from: "{{ stream_slice['start_time'] }}" + to: "{{ stream_slice['end_time'] }}" + search_stream: + $options: + name: "search" + primary_key: "url" + path: "/search" + stream_cursor_field: "publishedAt" + retriever: + $ref: "*ref(definitions.base_retriever)" + requester: + $ref: "*ref(definitions.base_requester)" + request_options_provider: + request_parameters: + $ref: "*ref(definitions.common_parameters)" + q: "{{ config['query'] }}" + in: "{{ ','.join(config['in']) }}" + sortby: "{{ config['sortby'] }}" + top_headlines_stream: + $options: + name: "top_headlines" + primary_key: "url" + path: "/top-headlines" + stream_cursor_field: "publishedAt" + retriever: + $ref: "*ref(definitions.base_retriever)" + requester: + $ref: "*ref(definitions.base_requester)" + request_options_provider: + request_parameters: + $ref: "*ref(definitions.common_parameters)" + topic: "{{ config['top_headlines_topic'] }}" + q: "{{ config['top_headlines_query'] }}" + +streams: + - "*ref(definitions.search_stream)" + - "*ref(definitions.top_headlines_stream)" + +check: + stream_names: + - "search" + - "top_headlines" diff --git a/airbyte-integrations/connectors/source-gnews/source_gnews/schemas/search.json b/airbyte-integrations/connectors/source-gnews/source_gnews/schemas/search.json new file mode 100644 index 0000000000000..334bc80240571 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/source_gnews/schemas/search.json @@ -0,0 +1,43 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "title": { + "type": ["null", "string"], + "description": "The main title of the article." + }, + "description": { + "type": ["null", "string"], + "description": "The small paragraph under the title." + }, + "content": { + "type": ["null", "string"], + "description": "All the content of the article." + }, + "url": { + "type": ["null", "string"], + "description": "The URL of the article." + }, + "image": { + "type": ["null", "string"], + "description": "The main image of the article." + }, + "publishedAt": { + "type": ["null", "string"], + "description": "The date of publication of the article. The date is always in the UTC time zone." + }, + "source": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"], + "description": "The name of the source." + }, + "url": { + "type": ["null", "string"], + "description": "The home page of the source." + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-gnews/source_gnews/schemas/top_headlines.json b/airbyte-integrations/connectors/source-gnews/source_gnews/schemas/top_headlines.json new file mode 100644 index 0000000000000..334bc80240571 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/source_gnews/schemas/top_headlines.json @@ -0,0 +1,43 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "title": { + "type": ["null", "string"], + "description": "The main title of the article." + }, + "description": { + "type": ["null", "string"], + "description": "The small paragraph under the title." + }, + "content": { + "type": ["null", "string"], + "description": "All the content of the article." + }, + "url": { + "type": ["null", "string"], + "description": "The URL of the article." + }, + "image": { + "type": ["null", "string"], + "description": "The main image of the article." + }, + "publishedAt": { + "type": ["null", "string"], + "description": "The date of publication of the article. The date is always in the UTC time zone." + }, + "source": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"], + "description": "The name of the source." + }, + "url": { + "type": ["null", "string"], + "description": "The home page of the source." + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-gnews/source_gnews/source.py b/airbyte-integrations/connectors/source-gnews/source_gnews/source.py new file mode 100644 index 0000000000000..d22e3fb82d023 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/source_gnews/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceGnews(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "gnews.yaml"}) diff --git a/airbyte-integrations/connectors/source-gnews/source_gnews/spec.yaml b/airbyte-integrations/connectors/source-gnews/source_gnews/spec.yaml new file mode 100644 index 0000000000000..3c6443583c10d --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/source_gnews/spec.yaml @@ -0,0 +1,214 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/gnews +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Gnews Spec + type: object + required: + - api_key + - query + additionalProperties: true + properties: + api_key: + type: string + title: API Key + description: API Key + order: 0 + airbyte_secret: true + query: + type: string + order: 1 + title: Query + description: >- + This parameter allows you to specify your search keywords to find the news articles you are looking for. + The keywords will be used to return the most relevant articles. It is possible to use logical operators + with keywords. + - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by + quotation marks are used to search for articles with the exact same keyword sequence. + For example the query: "Apple iPhone" will return articles matching at least once this sequence of keywords. + - Logical AND Operator: This operator allows you to make sure that several keywords are all used in the article + search. By default the space character acts as an AND operator, it is possible to replace the space character + by AND to obtain the same result. For example the query: Apple Microsoft is equivalent to Apple AND Microsoft + - Logical OR Operator: This operator allows you to retrieve articles matching the keyword a or the keyword b. + It is important to note that this operator has a higher precedence than the AND operator. For example the + query: Apple OR Microsoft will return all articles matching the keyword Apple as well as all articles matching + the keyword Microsoft + - Logical NOT Operator: This operator allows you to remove from the results the articles corresponding to the + specified keywords. To use it, you need to add NOT in front of each word or phrase surrounded by quotes. + For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword + iPhone + examples: + - Microsoft Windows 10 + - Apple OR Microsoft + - Apple AND NOT iPhone + - (Windows 7) AND (Windows 10) + - Intel AND (i7 OR i9) + language: + type: string + title: Language + decription: >- + This parameter allows you to specify the language of the news articles returned by the API. + You have to set as value the 2 letters code of the language you want to filter. + order: 2 + enum: + - ar + - zh + - nl + - en + - fr + - de + - el + - he + - hi + - it + - ja + - ml + - mr + - "no" + - pt + - ro + - ru + - es + - sv + - ta + - te + - uk + country: + type: string + title: Country + description: >- + This parameter allows you to specify the country where the news articles returned by the API + were published, the contents of the articles are not necessarily related to the specified + country. You have to set as value the 2 letters code of the country you want to filter. + order: 3 + enum: + - au + - br + - ca + - cn + - eg + - fr + - de + - gr + - hk + - in + - ie + - il + - it + - jp + - nl + - "no" + - pk + - pe + - ph + - pt + - ro + - ru + - sg + - es + - se + - ch + - tw + - ua + - gb + - us + in: + type: array + title: In + description: >- + This parameter allows you to choose in which attributes the keywords are searched. The attributes that can + be set are title, description and content. It is possible to combine several attributes. + order: 4 + items: + type: string + enum: + - title + - description + - content + nullable: + type: array + title: Nullable + description: >- + This parameter allows you to specify the attributes that you allow to return null values. The attributes that + can be set are title, description and content. It is possible to combine several attributes + order: 5 + items: + type: string + enum: + - title + - description + - content + start_date: + type: string + title: Start Date + description: >- + This parameter allows you to filter the articles that have a publication date greater than or equal to the + specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC) + order: 6 + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$ + examples: + - "2022-08-21 16:27:09" + end_date: + type: string + title: End Date + description: >- + This parameter allows you to filter the articles that have a publication date smaller than or equal to the + specified value. The date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC) + order: 6 + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$ + examples: + - "2022-08-21 16:27:09" + sortby: + type: string + title: Sort By + description: >- + This parameter allows you to choose with which type of sorting the articles should be returned. Two values + are possible: + - publishedAt = sort by publication date, the articles with the most recent publication date are returned first + - relevance = sort by best match to keywords, the articles with the best match are returned first + order: 7 + enum: + - publishedAt + - relevance + top_headlines_query: + type: string + order: 8 + title: Top Headlines Query + description: >- + This parameter allows you to specify your search keywords to find the news articles you are looking for. + The keywords will be used to return the most relevant articles. It is possible to use logical operators + with keywords. + - Phrase Search Operator: This operator allows you to make an exact search. Keywords surrounded by + quotation marks are used to search for articles with the exact same keyword sequence. + For example the query: "Apple iPhone" will return articles matching at least once this sequence of keywords. + - Logical AND Operator: This operator allows you to make sure that several keywords are all used in the article + search. By default the space character acts as an AND operator, it is possible to replace the space character + by AND to obtain the same result. For example the query: Apple Microsoft is equivalent to Apple AND Microsoft + - Logical OR Operator: This operator allows you to retrieve articles matching the keyword a or the keyword b. + It is important to note that this operator has a higher precedence than the AND operator. For example the + query: Apple OR Microsoft will return all articles matching the keyword Apple as well as all articles matching + the keyword Microsoft + - Logical NOT Operator: This operator allows you to remove from the results the articles corresponding to the + specified keywords. To use it, you need to add NOT in front of each word or phrase surrounded by quotes. + For example the query: Apple NOT iPhone will return all articles matching the keyword Apple but not the keyword + iPhone + examples: + - Microsoft Windows 10 + - Apple OR Microsoft + - Apple AND NOT iPhone + - (Windows 7) AND (Windows 10) + - Intel AND (i7 OR i9) + top_headlines_topic: + type: string + title: Top Headlines Topic + description: This parameter allows you to change the category for the request. + order: 9 + enum: + - breaking-news + - world + - nation + - business + - technology + - entertainment + - sports + - science + - health diff --git a/airbyte-integrations/connectors/source-gnews/source_gnews/wait_until_midnight_backoff_strategy.py b/airbyte-integrations/connectors/source-gnews/source_gnews/wait_until_midnight_backoff_strategy.py new file mode 100644 index 0000000000000..682af794a7eed --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/source_gnews/wait_until_midnight_backoff_strategy.py @@ -0,0 +1,28 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from dataclasses import InitVar, dataclass +from datetime import datetime, timedelta +from typing import Any, Mapping, Optional + +import requests +from airbyte_cdk.sources.declarative.requesters.error_handlers import BackoffStrategy +from airbyte_cdk.sources.declarative.types import Config +from dataclasses_jsonschema import JsonSchemaMixin + + +@dataclass +class WaitUntilMidnightBackoffStrategy(BackoffStrategy, JsonSchemaMixin): + """ + Backoff strategy that waits until next midnight + """ + + options: InitVar[Mapping[str, Any]] + config: Config + + def backoff(self, response: requests.Response, attempt_count: int) -> Optional[float]: + now_utc = datetime.utcnow() + midnight_utc = (now_utc + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0) + delta = midnight_utc - now_utc + return delta.total_seconds() if type(delta) is timedelta else delta.seconds diff --git a/airbyte-integrations/connectors/source-gnews/unit_tests/__init__.py b/airbyte-integrations/connectors/source-gnews/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-gnews/unit_tests/test_wait_until_midnight_backoff.py b/airbyte-integrations/connectors/source-gnews/unit_tests/test_wait_until_midnight_backoff.py new file mode 100644 index 0000000000000..8956a36ef2b5f --- /dev/null +++ b/airbyte-integrations/connectors/source-gnews/unit_tests/test_wait_until_midnight_backoff.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from datetime import datetime +from unittest.mock import Mock, patch + +import pytest +from source_gnews import WaitUntilMidnightBackoffStrategy + +DATE_FORMAT = "%Y-%m-%d %H:%M:%S" + + +@pytest.mark.parametrize( + "test_name, now_utc, expected_backoff_time", + [ + ("test_under_normal_conditions", "2021-06-10 15:00:30", 32370.0), + ("test_last_day_of_year", "2021-12-31 23:50:30", 570.0), + ("test_just_before_midnight", "2021-06-10 23:59:59", 1.0), + ("test_just_after_midnight", "2021-06-10 00:00:01", 86399.0), + ("test_just_during_midnight", "2021-06-10 00:00:00", 86400.0), + ], +) +@patch("source_gnews.wait_until_midnight_backoff_strategy.datetime") +def test_wait_until_midnight(test_datetime, test_name, now_utc, expected_backoff_time): + test_datetime.utcnow = Mock(return_value=datetime.strptime(now_utc, DATE_FORMAT)) + response_mock = Mock() + backoff_stratery = WaitUntilMidnightBackoffStrategy(options={}, config={}) + backoff = backoff_stratery.backoff(response_mock, 1) + assert backoff == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-gong/.dockerignore b/airbyte-integrations/connectors/source-gong/.dockerignore new file mode 100644 index 0000000000000..3d5b9be810f82 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_gong +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-gong/Dockerfile b/airbyte-integrations/connectors/source-gong/Dockerfile new file mode 100644 index 0000000000000..8c90d307427e3 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_gong ./source_gong + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-gong diff --git a/airbyte-integrations/connectors/source-gong/README.md b/airbyte-integrations/connectors/source-gong/README.md new file mode 100644 index 0000000000000..1a78b4baaacad --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/README.md @@ -0,0 +1,79 @@ +# Gong Source + +This is the repository for the Gong configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/gong). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-gong:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/gong) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_gong/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source gong test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-gong:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-gong:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-gong:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-gong:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-gong:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-gong:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-gong:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-gong:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-gong/__init__.py b/airbyte-integrations/connectors/source-gong/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-gong/acceptance-test-config.yml b/airbyte-integrations/connectors/source-gong/acceptance-test-config.yml new file mode 100644 index 0000000000000..a404498b934b8 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/acceptance-test-config.yml @@ -0,0 +1,25 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-gong:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_gong/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-gong/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-gong/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-gong/build.gradle b/airbyte-integrations/connectors/source-gong/build.gradle new file mode 100644 index 0000000000000..60c10f55a43f0 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_gong' +} diff --git a/airbyte-integrations/connectors/source-gong/integration_tests/__init__.py b/airbyte-integrations/connectors/source-gong/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-gong/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-gong/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-gong/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-gong/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..aca1e16d42f73 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/integration_tests/configured_catalog.json @@ -0,0 +1,40 @@ +{ + "streams": [ + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "calls", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "scorecards", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "answeredScorecards", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-gong/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-gong/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..069cf1569a878 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "access_key": "", + "access_key_secret": "", + "start_date": "2022-06-18" +} diff --git a/airbyte-integrations/connectors/source-gong/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-gong/integration_tests/sample_config.json new file mode 100644 index 0000000000000..91368fc1e5cfc --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "access_key": "", + "access_key_secret": "", + "start_date": "2022-06-18T08:00:00Z" +} diff --git a/airbyte-integrations/connectors/source-gong/integration_tests/simple_catalog.json b/airbyte-integrations/connectors/source-gong/integration_tests/simple_catalog.json new file mode 100644 index 0000000000000..0ab744b707a05 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/integration_tests/simple_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "answeredScorecards", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-gong/main.py b/airbyte-integrations/connectors/source-gong/main.py new file mode 100644 index 0000000000000..2e4bee4f1faed --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_gong import SourceGong + +if __name__ == "__main__": + source = SourceGong() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-gong/requirements.txt b/airbyte-integrations/connectors/source-gong/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-gong/setup.py b/airbyte-integrations/connectors/source-gong/setup.py new file mode 100644 index 0000000000000..0aae02822bb18 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.4", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_gong", + description="Source implementation for Gong.", + author="Elliot Trabac", + author_email="elliot.trabac1@gmail.com", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-gong/source_gong/__init__.py b/airbyte-integrations/connectors/source-gong/source_gong/__init__.py new file mode 100644 index 0000000000000..5a00e429163d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/source_gong/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceGong + +__all__ = ["SourceGong"] diff --git a/airbyte-integrations/connectors/source-gong/source_gong/gong.yaml b/airbyte-integrations/connectors/source-gong/source_gong/gong.yaml new file mode 100644 index 0000000000000..ad8004c04dfe1 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/source_gong/gong.yaml @@ -0,0 +1,96 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["{{ options.name }}"] + requester: + url_base: "https://api.gong.io/v2/" + http_method: "GET" + authenticator: + type: BasicHttpAuthenticator + username: "{{ config['access_key'] }}" + password: "{{ config['access_key_secret'] }}" + request_options_provider: + request_parameters: + fromDateTime: "{{ config['start_date'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response.records.cursor }}" + stop_condition: "{{ 'records' not in response }}" + page_size: 100 + page_size_option: + field_name: "limit" + inject_into: "request_parameter" + page_token_option: + field_name: "cursor" + inject_into: "request_parameter" + url_base: + $ref: "*ref(definitions.requester.url_base)" + requester: + $ref: "*ref(definitions.requester)" + + # base stream + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + + # streams + users_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "users" + primary_key: "id" + path: "/users" + + calls_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "calls" + primary_key: "id" + path: "/calls" + + scorecards_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "scorecards" + primary_key: "scorecardId" + path: "/settings/scorecards" + + answered_scorecards_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "answeredScorecards" + primary_key: "answeredScorecardId" + path: "/stats/activity/scorecards" + retriever: + $ref: "*ref(definitions.base_stream.retriever)" + paginator: + $ref: "*ref(definitions.retriever.paginator)" + page_size_option: + field_name: "limit" + inject_into: "body_json" + page_token_option: + field_name: "cursor" + inject_into: "body_json" + requester: + $ref: "*ref(definitions.requester)" + http_method: "POST" + request_options_provider: + request_body_json: + filter: '{"callFromDate": "{{ config["start_date"] }}"}' + +streams: + - "*ref(definitions.users_stream)" + - "*ref(definitions.calls_stream)" + - "*ref(definitions.scorecards_stream)" + - "*ref(definitions.answered_scorecards_stream)" + +check: + stream_names: + - "users" diff --git a/airbyte-integrations/connectors/source-gong/source_gong/schemas/answeredScorecards.json b/airbyte-integrations/connectors/source-gong/source_gong/schemas/answeredScorecards.json new file mode 100644 index 0000000000000..80e3651b32209 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/source_gong/schemas/answeredScorecards.json @@ -0,0 +1,61 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "answeredScorecardId": { + "type": ["null", "string"] + }, + "scorecardId": { + "type": ["null", "string"] + }, + "scorecardName": { + "type": ["null", "string"] + }, + "callId": { + "type": ["null", "string"] + }, + "callStartTime": { + "type": ["null", "string"], + "format": "date-time" + }, + "reviewedUserId": { + "type": ["null", "string"] + }, + "reviewerUserId": { + "type": ["null", "string"] + }, + "reviewTime": { + "type": ["null", "string"], + "format": "date-time" + }, + "visibilityType": { + "type": ["null", "string"] + }, + "answers": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "questionId": { + "type": ["null", "string"] + }, + "questionRevisionId": { + "type": ["null", "string"] + }, + "isOverall": { + "type": ["null", "boolean"] + }, + "score": { + "type": ["null", "integer"] + }, + "answerText": { + "type": ["null", "string"] + }, + "notApplicable": { + "type": ["null", "boolean"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-gong/source_gong/schemas/calls.json b/airbyte-integrations/connectors/source-gong/source_gong/schemas/calls.json new file mode 100644 index 0000000000000..d2488ac806f54 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/source_gong/schemas/calls.json @@ -0,0 +1,65 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "scheduled": { + "type": ["null", "string"], + "format": "date-time" + }, + "started": { + "type": ["null", "string"], + "format": "date-time" + }, + "duration": { + "type": ["null", "integer"] + }, + "primaryUserId": { + "type": ["null", "string"] + }, + "direction": { + "type": ["null", "string"] + }, + "system": { + "type": ["null", "string"] + }, + "scope": { + "type": ["null", "string"] + }, + "media": { + "type": ["null", "string"] + }, + "language": { + "type": ["null", "string"] + }, + "workspaceId": { + "type": ["null", "string"] + }, + "sdrDisposition": { + "type": ["null", "string"] + }, + "clientUniqueId": { + "type": ["null", "string"] + }, + "customData": { + "type": ["null", "string"] + }, + "purpose": { + "type": ["null", "string"] + }, + "meetingUrl": { + "type": ["null", "string"] + }, + "isPrivate": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-gong/source_gong/schemas/scorecards.json b/airbyte-integrations/connectors/source-gong/source_gong/schemas/scorecards.json new file mode 100644 index 0000000000000..bf9d71aa328e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/source_gong/schemas/scorecards.json @@ -0,0 +1,60 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "scorecardId": { + "type": ["null", "string"] + }, + "scorecardName": { + "type": ["null", "string"] + }, + "workspaceId": { + "type": ["null", "string"] + }, + "enabled": { + "type": ["null", "boolean"] + }, + "updaterUserId": { + "type": ["null", "string"] + }, + "created": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated": { + "type": ["null", "string"], + "format": "date-time" + }, + "questions": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "questionId": { + "type": ["null", "string"] + }, + "questionRevisionId": { + "type": ["null", "string"] + }, + "questionText": { + "type": ["null", "string"] + }, + "isOverall": { + "type": ["null", "boolean"] + }, + "updaterUserId": { + "type": ["null", "string"] + }, + "created": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated": { + "type": ["null", "string"], + "format": "date-time" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-gong/source_gong/schemas/users.json b/airbyte-integrations/connectors/source-gong/source_gong/schemas/users.json new file mode 100644 index 0000000000000..f23814c77c14f --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/source_gong/schemas/users.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "emailAddress": { + "type": ["null", "string"] + }, + "created": { + "type": ["null", "string"], + "format": "date-time" + }, + "active": { + "type": ["null", "boolean"] + }, + "emailAliases": { + "type": ["null", "array"] + }, + "firstName": { + "type": ["null", "string"] + }, + "lastName": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "phoneNumber": { + "type": ["null", "string"] + }, + "extension": { + "type": ["null", "string"] + }, + "personalMeetingUrls": { + "type": ["null", "array"] + }, + "settings": { + "type": ["null", "object"] + }, + "managerId": { + "type": ["null", "string"] + }, + "meetingConsentPageUrl": { + "type": ["null", "string"] + }, + "spokenLanguages": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-gong/source_gong/source.py b/airbyte-integrations/connectors/source-gong/source_gong/source.py new file mode 100644 index 0000000000000..7755222887f02 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/source_gong/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceGong(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "gong.yaml"}) diff --git a/airbyte-integrations/connectors/source-gong/source_gong/spec.yaml b/airbyte-integrations/connectors/source-gong/source_gong/spec.yaml new file mode 100644 index 0000000000000..54ce597129292 --- /dev/null +++ b/airbyte-integrations/connectors/source-gong/source_gong/spec.yaml @@ -0,0 +1,29 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/gong +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Gong Spec + type: object + required: + - access_key + - access_key_secret + additionalProperties: true + properties: + access_key: + type: string + title: Gong Access Key + description: Gong Access Key + airbyte_secret: true + access_key_secret: + type: string + title: Gong Access Key Secret + description: Gong Access Key Secret + airbyte_secret: true + start_date: + type: string + title: Start date + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ + description: >- + The date from which to list calls, in the ISO-8601 format; if not specified, the calls start with the earliest recorded call. + For web-conference calls recorded by Gong, the date denotes its scheduled time, otherwise, it denotes its actual start time. + examples: + - "2018-02-18T08:00:00Z" diff --git a/airbyte-integrations/connectors/source-google-ads/Dockerfile b/airbyte-integrations/connectors/source-google-ads/Dockerfile index 04b924655c804..409d7e734fa8b 100644 --- a/airbyte-integrations/connectors/source-google-ads/Dockerfile +++ b/airbyte-integrations/connectors/source-google-ads/Dockerfile @@ -13,5 +13,5 @@ COPY main.py ./ ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.3 +LABEL io.airbyte.version=0.2.4 LABEL io.airbyte.name=airbyte/source-google-ads diff --git a/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py b/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py index ac0e661a4baea..998519e9bdcc0 100644 --- a/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py +++ b/airbyte-integrations/connectors/source-google-ads/source_google_ads/streams.py @@ -8,6 +8,7 @@ import pendulum from airbyte_cdk.models import SyncMode from airbyte_cdk.sources.streams import IncrementalMixin, Stream +from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer from google.ads.googleads.errors import GoogleAdsException from google.ads.googleads.v11.errors.types.authorization_error import AuthorizationErrorEnum from google.ads.googleads.v11.errors.types.request_error import RequestErrorEnum @@ -256,6 +257,7 @@ class Campaigns(IncrementalGoogleAdsStream): Campaigns stream: https://developers.google.com/google-ads/api/fields/v11/campaign """ + transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) primary_key = ["campaign.id", "segments.date"] diff --git a/airbyte-integrations/connectors/source-google-sheets/Dockerfile b/airbyte-integrations/connectors/source-google-sheets/Dockerfile index eeea91de04c03..469e33a5964af 100644 --- a/airbyte-integrations/connectors/source-google-sheets/Dockerfile +++ b/airbyte-integrations/connectors/source-google-sheets/Dockerfile @@ -34,5 +34,5 @@ COPY google_sheets_source ./google_sheets_source ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.21 +LABEL io.airbyte.version=0.2.31 LABEL io.airbyte.name=airbyte/source-google-sheets diff --git a/airbyte-integrations/connectors/source-gridly/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-gridly/integration_tests/sample_config.json index 236d2d8170852..a48b18af25927 100644 --- a/airbyte-integrations/connectors/source-gridly/integration_tests/sample_config.json +++ b/airbyte-integrations/connectors/source-gridly/integration_tests/sample_config.json @@ -1,4 +1,4 @@ { "api_key": "IbuIBdkFjrJps6", "grid_id": "4539o52kmdjmzwp" -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-hubspot/Dockerfile b/airbyte-integrations/connectors/source-hubspot/Dockerfile index fcb67bf2bd5fa..bdde4c977312e 100644 --- a/airbyte-integrations/connectors/source-hubspot/Dockerfile +++ b/airbyte-integrations/connectors/source-hubspot/Dockerfile @@ -34,5 +34,5 @@ COPY source_hubspot ./source_hubspot ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.2 +LABEL io.airbyte.version=0.2.3 LABEL io.airbyte.name=airbyte/source-hubspot diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py index db286e7fd7e5f..a721b33ed64f0 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/source.py @@ -3,6 +3,7 @@ # import logging +from itertools import chain from typing import Any, Iterator, List, Mapping, MutableMapping, Optional, Tuple, Union import requests @@ -130,6 +131,12 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: available_streams = [stream for stream in streams if stream.scope_is_granted(granted_scopes)] unavailable_streams = [stream for stream in streams if not stream.scope_is_granted(granted_scopes)] self.logger.info(f"The following streams are unavailable: {[s.name for s in unavailable_streams]}") + partially_available_streams = [stream for stream in streams if not stream.properties_scope_is_granted()] + required_scoped = set(chain(*[x.properties_scopes for x in partially_available_streams])) + self.logger.info( + f"The following streams are partially available: {[s.name for s in partially_available_streams]}, " + f"add the following scopes to download all available data: {required_scoped}" + ) else: self.logger.info("No scopes to grant when authenticating with API key.") available_streams = streams diff --git a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py index 8a664c8f6c498..da887312df879 100644 --- a/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py +++ b/airbyte-integrations/connectors/source-hubspot/source_hubspot/streams.py @@ -209,6 +209,8 @@ class Stream(HttpStream, ABC): filter_old_records: bool = True denormalize_records: bool = False # one record from API response can result in multiple records emitted raise_on_http_errors: bool = True + granted_scopes: Set = None + properties_scopes: Set = None @property @abstractmethod @@ -216,6 +218,7 @@ def scopes(self) -> Set[str]: """Set of required scopes. Users need to grant at least one of the scopes for the stream to be avaialble to them""" def scope_is_granted(self, granted_scopes: Set[str]) -> bool: + self.granted_scopes = set(granted_scopes) if not self.scopes: return True else: @@ -631,16 +634,24 @@ def _get_field_props(field_type: str) -> Mapping[str, List[str]]: @lru_cache() def properties(self) -> Mapping[str, Any]: """Some entities has dynamic set of properties, so we trying to resolve those at runtime""" - if not self.entity: - return {} - props = {} + if not self.entity: + return props + if not self.properties_scope_is_granted(): + logger.warning( + f"Check your API key has the following permissions granted: {self.properties_scopes}, " + f"to be able to fetch all properties available." + ) + return props data, response = self._api.get(f"/properties/v2/{self.entity}/properties") for row in data: props[row["name"]] = self._get_field_props(row["type"]) return props + def properties_scope_is_granted(self): + return not self.properties_scopes - self.granted_scopes if self.properties_scopes and self.granted_scopes else True + def _flat_associations(self, records: Iterable[MutableMapping]) -> Iterable[MutableMapping]: """When result has associations we prefer to have it flat, so we transform this: @@ -1127,6 +1138,7 @@ class ContactsListMemberships(Stream): page_field = "vid-offset" primary_key = "canonical-vid" scopes = {"crm.objects.contacts.read"} + properties_scopes = {"crm.schemas.contacts.read"} def _transform(self, records: Iterable) -> Iterable: """Extracting list membership records from contacts @@ -1421,6 +1433,7 @@ class PropertyHistory(Stream): limit_field = "count" limit = 100 scopes = {"crm.objects.contacts.read"} + properties_scopes = {"crm.schemas.contacts.read"} def request_params( self, diff --git a/airbyte-integrations/connectors/source-intruder/.dockerignore b/airbyte-integrations/connectors/source-intruder/.dockerignore new file mode 100644 index 0000000000000..42b93e1c24713 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_intruder +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-intruder/Dockerfile b/airbyte-integrations/connectors/source-intruder/Dockerfile new file mode 100644 index 0000000000000..dcbe1dd52a17b --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_intruder ./source_intruder + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-intruder diff --git a/airbyte-integrations/connectors/source-intruder/README.md b/airbyte-integrations/connectors/source-intruder/README.md new file mode 100644 index 0000000000000..bc6dbf57caebb --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/README.md @@ -0,0 +1,79 @@ +# Intruder Source + +This is the repository for the Intruder configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/intruder). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-intruder:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/intruder) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_intruder/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source intruder test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-intruder:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-intruder:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-intruder:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-intruder:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-intruder:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-intruder:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-intruder:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-intruder:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-intruder/__init__.py b/airbyte-integrations/connectors/source-intruder/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-intruder/acceptance-test-config.yml b/airbyte-integrations/connectors/source-intruder/acceptance-test-config.yml new file mode 100644 index 0000000000000..6c66594d7c7e9 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-intruder:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_intruder/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-intruder/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-intruder/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-intruder/build.gradle b/airbyte-integrations/connectors/source-intruder/build.gradle new file mode 100644 index 0000000000000..7422eaeb21e78 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_intruder' +} diff --git a/airbyte-integrations/connectors/source-intruder/integration_tests/__init__.py b/airbyte-integrations/connectors/source-intruder/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-intruder/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-intruder/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-intruder/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-intruder/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-intruder/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-intruder/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..71615c7caffc8 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/integration_tests/configured_catalog.json @@ -0,0 +1,44 @@ +{ + "streams": [ + { + "stream": { + "name": "issues", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append", + "supported_destination_sync_modes": ["overwrite", "append"] + }, + { + "stream": { + "name": "occurrences_issues", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append", + "supported_destination_sync_modes": ["overwrite", "append"] + }, + { + "stream": { + "name": "scans", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append", + "supported_destination_sync_modes": ["overwrite", "append"] + }, + { + "stream": { + "name": "targets", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append", + "supported_destination_sync_modes": ["overwrite", "append"] + } + ] +} diff --git a/airbyte-integrations/connectors/source-intruder/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-intruder/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..e7823c2be36c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "access_token": "" +} diff --git a/airbyte-integrations/connectors/source-intruder/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-intruder/integration_tests/sample_config.json new file mode 100644 index 0000000000000..ba83c78863db5 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "access_token": "kmMCJRtJ5dOrXDu9Roxtp53Nad25UE5NAi1eqTaqabk" +} diff --git a/airbyte-integrations/connectors/source-intruder/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-intruder/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-intruder/main.py b/airbyte-integrations/connectors/source-intruder/main.py new file mode 100644 index 0000000000000..c8a57a357617e --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_intruder import SourceIntruder + +if __name__ == "__main__": + source = SourceIntruder() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-intruder/requirements.txt b/airbyte-integrations/connectors/source-intruder/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-intruder/setup.py b/airbyte-integrations/connectors/source-intruder/setup.py new file mode 100644 index 0000000000000..7f6881387ce99 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_intruder", + description="Source implementation for Intruder.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-intruder/source_intruder/__init__.py b/airbyte-integrations/connectors/source-intruder/source_intruder/__init__.py new file mode 100644 index 0000000000000..5a05bc1e0ba5a --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/source_intruder/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceIntruder + +__all__ = ["SourceIntruder"] diff --git a/airbyte-integrations/connectors/source-intruder/source_intruder/intruder.yaml b/airbyte-integrations/connectors/source-intruder/source_intruder/intruder.yaml new file mode 100644 index 0000000000000..ff5403a981907 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/source_intruder/intruder.yaml @@ -0,0 +1,83 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["results"] + requester: + url_base: "https://api.intruder.io/v1" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['access_token'] }}" + offset_paginator: + type: DefaultPaginator + $options: + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "OffsetIncrement" + page_size: 100 + page_token_option: + field_name: "offset" + inject_into: "request_parameter" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.offset_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + issues_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "issues" + primary_key: "id" + path: "/issues" + issue_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.issues_stream)" + parent_key: id + stream_slice_field: id + occurrences_issue_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "occurrences_issues" + primary_key: "id" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector)" + requester: + $ref: "*ref(definitions.requester)" + path: "/issues/{{ stream_slice.id }}/occurrences" + stream_slicer: + $ref: "*ref(definitions.issue_stream_slicer)" + scans_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "scans" + primary_key: "id" + path: "/scans" + targets_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "targets" + primary_key: "id" + path: "/targets" + +streams: + - "*ref(definitions.issues_stream)" + - "*ref(definitions.occurrences_issue_stream)" + - "*ref(definitions.scans_stream)" + - "*ref(definitions.targets_stream)" + +check: + stream_names: + - "issues" diff --git a/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/issues.json b/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/issues.json new file mode 100644 index 0000000000000..3427aa537c9a4 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/issues.json @@ -0,0 +1,33 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "severity": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "remediation": { + "type": ["null", "string"] + }, + "snoozed": { + "type": ["null", "boolean"] + }, + "snooze_reason": { + "type": ["null", "string"] + }, + "snooze_until": { + "type": ["null", "string"] + }, + "occurrences": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/occurrences_issues.json b/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/occurrences_issues.json new file mode 100644 index 0000000000000..63a87930bc2d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/occurrences_issues.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "target": { + "type": ["null", "string"] + }, + "port": { + "type": ["null", "integer"] + }, + "extra_info": { + "type": ["null", "object"] + }, + "age": { + "type": ["null", "string"] + }, + "snoozed": { + "type": ["null", "boolean"] + }, + "snooze_reason": { + "type": ["null", "string"] + }, + "snooze_until": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/scans.json b/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/scans.json new file mode 100644 index 0000000000000..c751b8e187c52 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/scans.json @@ -0,0 +1,15 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "status": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/targets.json b/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/targets.json new file mode 100644 index 0000000000000..327ec85064f7c --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/source_intruder/schemas/targets.json @@ -0,0 +1,15 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "address": { + "type": ["null", "string"] + }, + "tags": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-intruder/source_intruder/source.py b/airbyte-integrations/connectors/source-intruder/source_intruder/source.py new file mode 100644 index 0000000000000..f39be55b586d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/source_intruder/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceIntruder(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "intruder.yaml"}) diff --git a/airbyte-integrations/connectors/source-intruder/source_intruder/spec.yaml b/airbyte-integrations/connectors/source-intruder/source_intruder/spec.yaml new file mode 100644 index 0000000000000..9d78bb4fa4505 --- /dev/null +++ b/airbyte-integrations/connectors/source-intruder/source_intruder/spec.yaml @@ -0,0 +1,16 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/intruder +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Intruder Spec + type: object + required: + - access_token + additionalProperties: true + properties: + access_token: + title: API Access token + type: string + description: >- + Your API Access token. See here. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-ip2whois/.dockerignore b/airbyte-integrations/connectors/source-ip2whois/.dockerignore new file mode 100644 index 0000000000000..899e0b5e07da1 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_ip2whois +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-ip2whois/Dockerfile b/airbyte-integrations/connectors/source-ip2whois/Dockerfile new file mode 100644 index 0000000000000..128da893d0df0 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_ip2whois ./source_ip2whois + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-ip2whois diff --git a/airbyte-integrations/connectors/source-ip2whois/README.md b/airbyte-integrations/connectors/source-ip2whois/README.md new file mode 100644 index 0000000000000..63ea134c95d27 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/README.md @@ -0,0 +1,79 @@ +# Ip2whois Source + +This is the repository for the Ip2whois configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/ip2whois). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-ip2whois:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/ip2whois) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_ip2whois/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source ip2whois test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-ip2whois:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-ip2whois:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-ip2whois:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-ip2whois:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-ip2whois:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-ip2whois:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-ip2whois:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-ip2whois:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-ip2whois/__init__.py b/airbyte-integrations/connectors/source-ip2whois/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-ip2whois/acceptance-test-config.yml b/airbyte-integrations/connectors/source-ip2whois/acceptance-test-config.yml new file mode 100644 index 0000000000000..1119f94cf402c --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-ip2whois:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_ip2whois/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-ip2whois/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-ip2whois/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-ip2whois/build.gradle b/airbyte-integrations/connectors/source-ip2whois/build.gradle new file mode 100644 index 0000000000000..9cb21ae42cb51 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_ip2whois' +} diff --git a/airbyte-integrations/connectors/source-ip2whois/integration_tests/__init__.py b/airbyte-integrations/connectors/source-ip2whois/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-ip2whois/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-ip2whois/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-ip2whois/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-ip2whois/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-ip2whois/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-ip2whois/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..f87240160cda4 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "whois", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-ip2whois/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-ip2whois/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..04faf5e204c45 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "invalid-key", + "domain": "www.lenssutra.com" +} diff --git a/airbyte-integrations/connectors/source-ip2whois/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-ip2whois/integration_tests/sample_config.json new file mode 100644 index 0000000000000..66c38a48cef30 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "GHBTXWPTEKYP4PHQ2PWRDSLIZVJN6OGS", + "domain": "www.lenssutra.com" +} diff --git a/airbyte-integrations/connectors/source-ip2whois/main.py b/airbyte-integrations/connectors/source-ip2whois/main.py new file mode 100644 index 0000000000000..e6aea668a07cc --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_ip2whois import SourceIp2whois + +if __name__ == "__main__": + source = SourceIp2whois() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-ip2whois/requirements.txt b/airbyte-integrations/connectors/source-ip2whois/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-ip2whois/setup.py b/airbyte-integrations/connectors/source-ip2whois/setup.py new file mode 100644 index 0000000000000..fbebfc836c337 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_ip2whois", + description="Source implementation for Ip2whois.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/__init__.py b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/__init__.py new file mode 100644 index 0000000000000..bb4a4d05750b4 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceIp2whois + +__all__ = ["SourceIp2whois"] diff --git a/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/ip2whois.yaml b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/ip2whois.yaml new file mode 100644 index 0000000000000..8c7ba73f4c4c8 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/ip2whois.yaml @@ -0,0 +1,35 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://api.ip2whois.com" + http_method: "GET" + request_options_provider: + request_parameters: + key: "{{ config['api_key'] }}" + domain: "{{ config['domain'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + whois_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "whois" + primary_key: "domain_id" + path: "/v2" + +streams: + - "*ref(definitions.whois_stream)" +check: + stream_names: + - "whois" diff --git a/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/schemas/whois.json b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/schemas/whois.json new file mode 100644 index 0000000000000..24373b7b69cb3 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/schemas/whois.json @@ -0,0 +1,261 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "domain": { + "type": "string" + }, + "domain_id": { + "type": "string" + }, + "status": { + "type": "string" + }, + "create_date": { + "type": "string", + "format": "date-time" + }, + "update_date": { + "type": "string", + "format": "date-time" + }, + "expire_date": { + "type": "string", + "format": "date-time" + }, + "domain_age": { + "type": "integer" + }, + "whois_server": { + "type": "string" + }, + "registrar": { + "type": "object", + "properties": { + "iana_id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "required": ["iana_id", "name", "url"], + "additionalProperties": true + }, + "registrant": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "organization": { + "type": "string" + }, + "street_address": { + "type": "string" + }, + "city": { + "type": "string" + }, + "region": { + "type": "string" + }, + "zip_code": { + "type": "string" + }, + "country": { + "type": "string" + }, + "phone": { + "type": "string" + }, + "fax": { + "type": "string" + }, + "email": { + "type": "string" + } + }, + "required": [ + "name", + "organization", + "street_address", + "city", + "region", + "zip_code", + "country", + "phone", + "fax", + "email" + ], + "additionalProperties": true + }, + "admin": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "organization": { + "type": "string" + }, + "street_address": { + "type": "string" + }, + "city": { + "type": "string" + }, + "region": { + "type": "string" + }, + "zip_code": { + "type": "string" + }, + "country": { + "type": "string" + }, + "phone": { + "type": "string" + }, + "fax": { + "type": "string" + }, + "email": { + "type": "string" + } + }, + "required": [ + "name", + "organization", + "street_address", + "city", + "region", + "zip_code", + "country", + "phone", + "fax", + "email" + ], + "additionalProperties": true + }, + "tech": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "organization": { + "type": "string" + }, + "street_address": { + "type": "string" + }, + "city": { + "type": "string" + }, + "region": { + "type": "string" + }, + "zip_code": { + "type": "string" + }, + "country": { + "type": "string" + }, + "phone": { + "type": "string" + }, + "fax": { + "type": "string" + }, + "email": { + "type": "string" + } + }, + "required": [ + "name", + "organization", + "street_address", + "city", + "region", + "zip_code", + "country", + "phone", + "fax", + "email" + ], + "additionalProperties": true + }, + "billing": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "organization": { + "type": "string" + }, + "street_address": { + "type": "string" + }, + "city": { + "type": "string" + }, + "region": { + "type": "string" + }, + "zip_code": { + "type": "string" + }, + "country": { + "type": "string" + }, + "phone": { + "type": "string" + }, + "fax": { + "type": "string" + }, + "email": { + "type": "string" + } + }, + "required": [ + "name", + "organization", + "street_address", + "city", + "region", + "zip_code", + "country", + "phone", + "fax", + "email" + ], + "additionalProperties": true + }, + "nameservers": { + "type": "array" + } + }, + "required": [ + "domain", + "domain_id", + "status", + "create_date", + "update_date", + "expire_date", + "domain_age", + "whois_server", + "registrar", + "registrant", + "admin", + "tech", + "billing", + "nameservers" + ], + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/source.py b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/source.py new file mode 100644 index 0000000000000..5602b74e4df99 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceIp2whois(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "ip2whois.yaml"}) diff --git a/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/spec.yaml b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/spec.yaml new file mode 100644 index 0000000000000..f0af6a7d51fa4 --- /dev/null +++ b/airbyte-integrations/connectors/source-ip2whois/source_ip2whois/spec.yaml @@ -0,0 +1,23 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/ip2whois +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Ip2whois Spec + type: object + additionalProperties: true + properties: + api_key: + title: API key + type: string + description: >- + Your API Key. See here. + airbyte_secret: true + domain: + title: Domain + type: string + description: >- + Domain name. See here. + examples: + - www.google.com + - www.facebook.com diff --git a/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml b/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml new file mode 100644 index 0000000000000..9a4392c5347b6 --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-jdbc:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-jdbc/build.gradle b/airbyte-integrations/connectors/source-jdbc/build.gradle index 2e9393f323350..4daa6755626b9 100644 --- a/airbyte-integrations/connectors/source-jdbc/build.gradle +++ b/airbyte-integrations/connectors/source-jdbc/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' id "java-library" // https://docs.gradle.org/current/userguide/java_testing.html#sec:java_test_fixtures id "java-test-fixtures" diff --git a/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java index 9ae17ff938be6..3c2461cfae31f 100644 --- a/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java +++ b/airbyte-integrations/connectors/source-jdbc/src/main/java/io/airbyte/integrations/source/jdbc/AbstractJdbcSource.java @@ -40,13 +40,13 @@ import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.db.jdbc.streaming.JdbcStreamingQueryConfig; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.dto.JdbcPrivilegeDto; import io.airbyte.integrations.source.relationaldb.AbstractDbSource; import io.airbyte.integrations.source.relationaldb.CursorInfo; import io.airbyte.integrations.source.relationaldb.TableInfo; import io.airbyte.integrations.source.relationaldb.state.StateManager; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.CommonField; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; @@ -141,9 +141,9 @@ public AbstractJdbcSource(final String driverClass, @Override protected AutoCloseableIterator queryTableFullRefresh(final JdbcDatabase database, - final List columnNames, - final String schemaName, - final String tableName) { + final List columnNames, + final String schemaName, + final String tableName) { LOGGER.info("Queueing query for table: {}", tableName); return queryTable(database, String.format("SELECT %s FROM %s", enquoteIdentifierList(columnNames, getQuoteString()), @@ -617,7 +617,7 @@ protected List identifyStreamsToSnapshot(final Configur final Set newlyAddedStreams = new HashSet<>(Sets.difference(allStreams, alreadySyncedStreams)); return catalog.getStreams().stream() - .filter(stream -> newlyAddedStreams.contains(AirbyteStreamNameNamespacePair.fromAirbyteSteam(stream.getStream()))) + .filter(stream -> newlyAddedStreams.contains(AirbyteStreamNameNamespacePair.fromAirbyteStream(stream.getStream()))) .map(Jsons::clone) .collect(Collectors.toList()); } diff --git a/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..892b30269c606 --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/dummy_config.json @@ -0,0 +1,4 @@ +{ + "username": "default", + "jdbc_url": "default" +} diff --git a/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..95e6b354ddcb4 --- /dev/null +++ b/airbyte-integrations/connectors/source-jdbc/src/test-integration/resources/expected_spec.json @@ -0,0 +1,35 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/postgres", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "JDBC Source Spec", + "type": "object", + "required": ["username", "jdbc_url"], + "properties": { + "username": { + "title": "Username", + "description": "The username which is used to access the database.", + "type": "string" + }, + "password": { + "title": "Password", + "description": "The password associated with this username.", + "type": "string", + "airbyte_secret": true + }, + "jdbc_url": { + "title": "JDBC URL", + "description": "JDBC formatted URL. See the standard here.", + "type": "string" + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string" + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-jira/Dockerfile b/airbyte-integrations/connectors/source-jira/Dockerfile index 9a103dfeaf2dd..f30726f603a70 100644 --- a/airbyte-integrations/connectors/source-jira/Dockerfile +++ b/airbyte-integrations/connectors/source-jira/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.22 +LABEL io.airbyte.version=0.3.0 LABEL io.airbyte.name=airbyte/source-jira diff --git a/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml b/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml index c5de3a4731604..06261ed152ef6 100644 --- a/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-jira/acceptance-test-config.yml @@ -4,6 +4,8 @@ connector_image: airbyte/source-jira:dev tests: spec: - spec_path: "source_jira/spec.json" + backward_compatibility_tests_config: + disable_for_version: "0.2.23" connection: - config_path: "secrets/config.json" status: "succeed" @@ -11,6 +13,8 @@ tests: status: "failed" discovery: - config_path: "secrets/config.json" + backward_compatibility_tests_config: + disable_for_version: "0.2.23" basic_read: # TEST for the Labels stream - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/full_configured_catalog.json b/airbyte-integrations/connectors/source-jira/integration_tests/full_configured_catalog.json index 423a9e5ebc3c1..09a34e3041242 100644 --- a/airbyte-integrations/connectors/source-jira/integration_tests/full_configured_catalog.json +++ b/airbyte-integrations/connectors/source-jira/integration_tests/full_configured_catalog.json @@ -7455,10 +7455,10 @@ }, "supported_sync_modes": ["incremental"], "source_defined_cursor": true, - "default_cursor_field": ["fields", "updated"] + "default_cursor_field": ["updated"] }, "sync_mode": "incremental", - "cursor_field": ["fields", "updated"], + "cursor_field": ["updated"], "destination_sync_mode": "append" }, { diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/inc_configured_catalog.json b/airbyte-integrations/connectors/source-jira/integration_tests/inc_configured_catalog.json index 2df7e74823d1f..e7b087c991b15 100644 --- a/airbyte-integrations/connectors/source-jira/integration_tests/inc_configured_catalog.json +++ b/airbyte-integrations/connectors/source-jira/integration_tests/inc_configured_catalog.json @@ -6,21 +6,35 @@ "json_schema": {}, "supported_sync_modes": ["incremental"], "source_defined_cursor": true, - "default_cursor_field": ["fields", "updated"] + "default_cursor_field": ["updated"] }, "sync_mode": "incremental", - "cursor_field": ["fields", "updated"], + "cursor_field": ["updated"], "destination_sync_mode": "append" }, { "stream": { "name": "issue_worklogs", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false + "supported_sync_modes": ["incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updated"], + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "issue_comments", + "json_schema": {}, + "supported_sync_modes": ["incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"] + }, + "sync_mode": "incremental", + "cursor_field": ["updated"], + "destination_sync_mode": "append" }, { "stream": { diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-jira/integration_tests/invalid_config.json index 8545578f3b813..9232da9f03d8e 100644 --- a/airbyte-integrations/connectors/source-jira/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-jira/integration_tests/invalid_config.json @@ -3,6 +3,5 @@ "domain": "invaliddomain.atlassian.net", "email": "test@test.com", "projects": ["invalidproject"], - "start_date": "2021-09-25T00:00:00Z", - "max_results": 0 + "start_date": "2021-09-25T00:00:00Z" } diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/issue_worklogs_configured_catalog.json b/airbyte-integrations/connectors/source-jira/integration_tests/issue_worklogs_configured_catalog.json index 4daba2f259eee..9988b4462befc 100644 --- a/airbyte-integrations/connectors/source-jira/integration_tests/issue_worklogs_configured_catalog.json +++ b/airbyte-integrations/connectors/source-jira/integration_tests/issue_worklogs_configured_catalog.json @@ -4,11 +4,13 @@ "stream": { "name": "issue_worklogs", "json_schema": {}, - "supported_sync_modes": ["full_refresh"], - "source_defined_cursor": false + "supported_sync_modes": ["incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["updated"] }, - "sync_mode": "full_refresh", - "destination_sync_mode": "overwrite" + "sync_mode": "incremental", + "cursor_field": ["updated"], + "destination_sync_mode": "append" } ] } diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-jira/integration_tests/sample_config.json index 30b75408a8907..8688c1e94a71a 100644 --- a/airbyte-integrations/connectors/source-jira/integration_tests/sample_config.json +++ b/airbyte-integrations/connectors/source-jira/integration_tests/sample_config.json @@ -2,6 +2,5 @@ "api_token": "", "domain": "", "email": "", - "projects": [], - "max_results": "" + "projects": [] } diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/users_catalog.json b/airbyte-integrations/connectors/source-jira/integration_tests/users_catalog.json new file mode 100644 index 0000000000000..ae547e4af2cf3 --- /dev/null +++ b/airbyte-integrations/connectors/source-jira/integration_tests/users_catalog.json @@ -0,0 +1,22 @@ +{ + "streams": [ + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "users_groups_detailed", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/board_issues.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/board_issues.json index ffd6e813a76c1..7005d03e3d5ea 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/board_issues.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/board_issues.json @@ -58,6 +58,18 @@ }, "boardId": { "type": "integer" + }, + "created": { + "type": ["string", "null"], + "format": "date-time", + "description": "This field transformed from fields attr", + "readOnly": true + }, + "updated": { + "type": ["string", "null"], + "format": "date-time", + "description": "This field transformed from fields attr", + "readOnly": true } } } diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/epics.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/epics.json index 431ca1e9e5dd3..02a72bc54d694 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/epics.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/epics.json @@ -58,6 +58,18 @@ "type": "string", "description": "The key of the project containing the epic.", "readOnly": true + }, + "created": { + "type": ["string", "null"], + "format": "date-time", + "description": "This field transformed from fields attr", + "readOnly": true + }, + "updated": { + "type": ["string", "null"], + "format": "date-time", + "description": "This field transformed from fields attr", + "readOnly": true } }, "additionalProperties": true diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/issues.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/issues.json index a4ee8cfc81d2d..e1f69d82c3c45 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/issues.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/issues.json @@ -100,6 +100,18 @@ "type": "string", "description": "The key of the project containing the issue.", "readOnly": true + }, + "created": { + "type": ["string", "null"], + "format": "date-time", + "description": "This field transformed from fields attr", + "readOnly": true + }, + "updated": { + "type": ["string", "null"], + "format": "date-time", + "description": "This field transformed from fields attr", + "readOnly": true } }, "additionalProperties": true diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/sprint_issues.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/sprint_issues.json index 74f8d73643da1..3dbe298f2b9ea 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/schemas/sprint_issues.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/sprint_issues.json @@ -61,6 +61,18 @@ }, "sprintId": { "type": "integer" + }, + "created": { + "type": ["string", "null"], + "format": "date-time", + "description": "This field transformed from fields attr", + "readOnly": true + }, + "updated": { + "type": ["string", "null"], + "format": "date-time", + "description": "This field transformed from fields attr", + "readOnly": true } } } diff --git a/airbyte-integrations/connectors/source-jira/source_jira/schemas/users_groups_detailed.json b/airbyte-integrations/connectors/source-jira/source_jira/schemas/users_groups_detailed.json new file mode 100644 index 0000000000000..2cb29894b5704 --- /dev/null +++ b/airbyte-integrations/connectors/source-jira/source_jira/schemas/users_groups_detailed.json @@ -0,0 +1,236 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "self": { + "type": "string", + "description": "The URL of the user.", + "format": "uri", + "readOnly": true + }, + "key": { + "type": "string", + "description": "This property is no longer available and will be removed from the documentation soon. See the [deprecation notice](https://developer.atlassian.com/cloud/jira/platform/deprecation-notice-user-privacy-api-migration-guide/) for details." + }, + "accountId": { + "maxLength": 128, + "type": "string", + "description": "The account ID of the user, which uniquely identifies the user across all Atlassian products. For example, *5b10ac8d82e05b22cc7d4ef5*. Required in requests." + }, + "accountType": { + "type": "string", + "description": "The user account type. Can take the following values:\n\n * `atlassian` regular Atlassian user account\n * `app` system account used for Connect applications and OAuth to represent external systems\n * `customer` Jira Service Desk account representing an external service desk", + "readOnly": true, + "enum": ["atlassian", "app", "customer", "unknown"] + }, + "name": { + "type": "string", + "description": "This property is no longer available and will be removed from the documentation soon. See the [deprecation notice](https://developer.atlassian.com/cloud/jira/platform/deprecation-notice-user-privacy-api-migration-guide/) for details." + }, + "emailAddress": { + "type": "string", + "description": "The email address of the user. Depending on the user\u2019s privacy setting, this may be returned as null.", + "readOnly": true + }, + "avatarUrls": { + "description": "The avatars of the user.", + "readOnly": true, + "type": "object", + "properties": { + "16x16": { + "type": "string", + "description": "The URL of the item's 16x16 pixel avatar.", + "format": "uri" + }, + "24x24": { + "type": "string", + "description": "The URL of the item's 24x24 pixel avatar.", + "format": "uri" + }, + "32x32": { + "type": "string", + "description": "The URL of the item's 32x32 pixel avatar.", + "format": "uri" + }, + "48x48": { + "type": "string", + "description": "The URL of the item's 48x48 pixel avatar.", + "format": "uri" + } + } + }, + "displayName": { + "type": "string", + "description": "The display name of the user. Depending on the user\u2019s privacy setting, this may return an alternative value.", + "readOnly": true + }, + "active": { + "type": "boolean", + "description": "Whether the user is active.", + "readOnly": true + }, + "timeZone": { + "type": "string", + "description": "The time zone specified in the user's profile. Depending on the user\u2019s privacy setting, this may be returned as null.", + "readOnly": true + }, + "locale": { + "type": "string", + "description": "The locale of the user. Depending on the user\u2019s privacy setting, this may be returned as null.", + "readOnly": true + }, + "groups": { + "description": "The groups that the user belongs to.", + "readOnly": true, + "type": "object", + "properties": { + "size": { + "type": "integer", + "format": "int32", + "xml": { + "attribute": true + } + }, + "items": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of group." + }, + "self": { + "type": "string", + "description": "The URL for these group details.", + "format": "uri", + "readOnly": true + } + } + } + }, + "pagingCallback": { + "type": "object" + }, + "callback": { + "type": "object" + }, + "max-results": { + "type": "integer", + "format": "int32", + "xml": { + "name": "max-results", + "attribute": true + } + } + } + }, + "applicationRoles": { + "description": "The application roles the user is assigned to.", + "readOnly": true, + "type": "object", + "properties": { + "size": { + "type": "integer", + "format": "int32", + "xml": { + "attribute": true + } + }, + "items": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": { + "type": "string", + "description": "The key of the application role." + }, + "groups": { + "uniqueItems": true, + "type": "array", + "description": "The groups associated with the application role.", + "items": { + "type": "string" + } + }, + "name": { + "type": "string", + "description": "The display name of the application role." + }, + "defaultGroups": { + "uniqueItems": true, + "type": "array", + "description": "The groups that are granted default access for this application role.", + "items": { + "type": "string" + } + }, + "selectedByDefault": { + "type": "boolean", + "description": "Determines whether this application role should be selected by default on user creation." + }, + "defined": { + "type": "boolean", + "description": "Deprecated." + }, + "numberOfSeats": { + "type": "integer", + "description": "The maximum count of users on your license.", + "format": "int32" + }, + "remainingSeats": { + "type": "integer", + "description": "The count of users remaining on your license.", + "format": "int32" + }, + "userCount": { + "type": "integer", + "description": "The number of users counting against your license.", + "format": "int32" + }, + "userCountDescription": { + "type": "string", + "description": "The [type of users](https://confluence.atlassian.com/x/lRW3Ng) being counted against your license." + }, + "hasUnlimitedSeats": { + "type": "boolean" + }, + "platform": { + "type": "boolean", + "description": "Indicates if the application role belongs to Jira platform (`jira-core`)." + } + } + } + }, + "pagingCallback": { + "type": "object" + }, + "callback": { + "type": "object" + }, + "max-results": { + "type": "integer", + "format": "int32", + "xml": { + "name": "max-results", + "attribute": true + } + } + } + }, + "expand": { + "type": "string", + "description": "Expand options that include additional user details in the response.", + "readOnly": true, + "xml": { + "attribute": true + } + } + }, + "additionalProperties": true, + "description": "A user with details as permitted by the user's Atlassian Account privacy settings. However, be aware of these exceptions:\n\n * User record deleted from Atlassian: This occurs as the result of a right to be forgotten request. In this case, `displayName` provides an indication and other parameters have default values or are blank (for example, email is blank).\n * User record corrupted: This occurs as a results of events such as a server import and can only happen to deleted users. In this case, `accountId` returns *unknown* and all other parameters have fallback values.\n * User record unavailable: This usually occurs due to an internal service outage. In this case, all parameters have fallback values.", + "xml": { + "name": "user" + } +} diff --git a/airbyte-integrations/connectors/source-jira/source_jira/source.py b/airbyte-integrations/connectors/source-jira/source_jira/source.py index b893781318770..e73b06c3fc178 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/source.py +++ b/airbyte-integrations/connectors/source-jira/source_jira/source.py @@ -61,6 +61,7 @@ Sprints, TimeTracking, Users, + UsersGroupsDetailed, Workflows, WorkflowSchemes, WorkflowStatusCategories, @@ -101,7 +102,6 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> def streams(self, config: Mapping[str, Any]) -> List[Stream]: authenticator = self.get_authenticator(config) args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - users_args = {**args, "max_results": config.get("max_results", 50)} incremental_args = {**args, "start_date": config.get("start_date", "")} render_fields = config.get("render_fields", False) issues_stream = Issues( @@ -163,7 +163,8 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: Sprints(**args), SprintIssues(**incremental_args), TimeTracking(**args), - Users(**users_args), + Users(**args), + UsersGroupsDetailed(**args), Workflows(**args), WorkflowSchemes(**args), WorkflowStatuses(**args), diff --git a/airbyte-integrations/connectors/source-jira/source_jira/spec.json b/airbyte-integrations/connectors/source-jira/source_jira/spec.json index 576685c5f437b..1f21bc250d876 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/spec.json +++ b/airbyte-integrations/connectors/source-jira/source_jira/spec.json @@ -25,12 +25,6 @@ "title": "Email", "description": "The user email for your Jira account." }, - "max_results": { - "type": "number", - "title": "Max Results", - "description": "Pagination max results (only for users stream)", - "default": 50 - }, "projects": { "type": "array", "title": "Projects", diff --git a/airbyte-integrations/connectors/source-jira/source_jira/streams.py b/airbyte-integrations/connectors/source-jira/source_jira/streams.py index 858a6c804fa19..cecf825623331 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/streams.py +++ b/airbyte-integrations/connectors/source-jira/source_jira/streams.py @@ -42,7 +42,7 @@ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, else: if all(paging_metadata in response_data for paging_metadata in ("startAt", "maxResults", "total")): start_at = response_data["startAt"] - max_results = response_data["maxResults"] + max_results = int(response_data["maxResults"]) total = response_data["total"] end_at = start_at + max_results if not end_at > total: @@ -94,6 +94,7 @@ def jql_compare_date(self, stream_state: Mapping[str, Any]) -> Optional[str]: issues_state = None cursor_exist_in_state: Any = False cursor_field = self.cursor_field + if isinstance(self.cursor_field, str): cursor_exist_in_state = stream_state.get(self.cursor_field) elif isinstance(self.cursor_field, list) and self.cursor_field: @@ -101,6 +102,7 @@ def jql_compare_date(self, stream_state: Mapping[str, Any]) -> Optional[str]: for cursor_part in self.cursor_field: cursor_exist_in_state = stream_state.get(cursor_part) cursor_field = cursor_field[-1] + if cursor_exist_in_state: issues_state = pendulum.parse(stream_state.get(cursor_field, self._start_date)) elif self._start_date: @@ -193,7 +195,7 @@ class BoardIssues(V1ApiJiraStream, IncrementalJiraStream): https://developer.atlassian.com/cloud/jira/software/rest/api-group-board/#api-agile-1-0-board-boardid-issue-get """ - cursor_field = ["fields", "updated"] + cursor_field = "updated" parse_response_root = "issues" def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: @@ -221,6 +223,9 @@ def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwarg def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: record["boardId"] = stream_slice["board_id"] + issue_fields = record["fields"] + record["created"] = issue_fields.get("created") + record["updated"] = issue_fields.get("updated") or issue_fields.get("created") return record @@ -244,7 +249,7 @@ def __init__(self, render_fields: bool = False, **kwargs): super().__init__(**kwargs) self._render_fields = render_fields - cursor_field = ["fields", "updated"] + cursor_field = "updated" parse_response_root = "issues" def path(self, **kwargs) -> str: @@ -273,6 +278,9 @@ def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwarg def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: record["projectId"] = stream_slice["project_id"] record["projectKey"] = stream_slice["project_key"] + issue_fields = record["fields"] + record["created"] = issue_fields.get("created") + record["updated"] = issue_fields.get("updated") or issue_fields.get("created") return record @@ -325,7 +333,7 @@ class Issues(IncrementalJiraStream): https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-search/#api-rest-api-3-search-get """ - cursor_field = ["fields", "updated"] + cursor_field = "updated" parse_response_root = "issues" use_cache = True @@ -394,21 +402,28 @@ def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwarg def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: record["projectId"] = stream_slice["project_id"] record["projectKey"] = stream_slice["project_key"] + issue_fields = record["fields"] + record["created"] = issue_fields.get("created") + record["updated"] = issue_fields.get("updated") or issue_fields.get("created") return record -class IssueComments(StartDateJiraStream): +class IssueComments(IncrementalJiraStream): """ https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-comments/#api-rest-api-3-issue-issueidorkey-comment-get """ parse_response_root = "comments" + primary_key = "id" + cursor_field = "updated" def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: key = stream_slice["key"] return f"issue/{key}/comment" - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + def read_records( + self, stream_slice: Optional[Mapping[str, Any]] = None, stream_state: Mapping[str, Any] = None, **kwargs + ) -> Iterable[Mapping[str, Any]]: issues_stream = Issues( additional_fields=[], authenticator=self.authenticator, @@ -416,8 +431,8 @@ def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwarg projects=self._projects, start_date=self._start_date, ) - for issue in issues_stream.read_records(sync_mode=SyncMode.full_refresh): - yield from super().read_records(stream_slice={"key": issue["key"]}, **kwargs) + for issue in issues_stream.read_records(sync_mode=SyncMode.full_refresh, stream_state=stream_state): + yield from super().read_records(stream_slice={"key": issue["key"]}, stream_state=stream_state, **kwargs) class IssueFields(JiraStream): @@ -671,19 +686,22 @@ def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwarg yield from super().read_records(stream_slice={"key": issue["key"]}, **kwargs) -class IssueWorklogs(StartDateJiraStream): +class IssueWorklogs(IncrementalJiraStream): """ https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-worklogs/#api-rest-api-3-issue-issueidorkey-worklog-get """ parse_response_root = "worklogs" - primary_key = None + primary_key = "id" + cursor_field = "updated" def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: key = stream_slice["key"] return f"issue/{key}/worklog" - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + def read_records( + self, stream_slice: Optional[Mapping[str, Any]] = None, stream_state: Mapping[str, Any] = None, **kwargs + ) -> Iterable[Mapping[str, Any]]: issues_stream = Issues( additional_fields=[], authenticator=self.authenticator, @@ -691,8 +709,8 @@ def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwarg projects=self._projects, start_date=self._start_date, ) - for issue in issues_stream.read_records(sync_mode=SyncMode.full_refresh): - yield from super().read_records(stream_slice={"key": issue["key"]}, **kwargs) + for issue in issues_stream.read_records(sync_mode=SyncMode.full_refresh, stream_state=stream_state): + yield from super().read_records(stream_slice={"key": issue["key"]}, stream_state=stream_state, **kwargs) class JiraSettings(JiraStream): @@ -1027,7 +1045,7 @@ class SprintIssues(V1ApiJiraStream, IncrementalJiraStream): https://developer.atlassian.com/cloud/jira/software/rest/api-group-sprint/#api-agile-1-0-sprint-sprintid-issue-get """ - cursor_field = ["fields", "updated"] + cursor_field = "updated" parse_response_root = "issues" def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: @@ -1062,6 +1080,9 @@ def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, record["issueId"] = record["id"] record["id"] = "-".join([str(stream_slice["sprint_id"]), record["id"]]) record["sprintId"] = stream_slice["sprint_id"] + issue_fields = record["fields"] + record["created"] = issue_fields.get("created") + record["updated"] = issue_fields.get("updated") or issue_fields.get("created") return record @@ -1081,18 +1102,48 @@ class Users(JiraStream): https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-users/#api-rest-api-3-users-search-get """ - primary_key = None + primary_key = "accountId" + use_cache = True - def __init__(self, domain: str, projects: List[str], max_results: int, **kwargs): + def __init__(self, domain: str, projects: List[str], **kwargs): super(JiraStream, self).__init__(**kwargs) self._domain = domain self._projects = projects - self._max_results = max_results + self._max_results = 100 + self._total = self._max_results + self._startAt = 0 + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + params = {} + response_data = response.json() + + if users_returned := len(response_data): + self._total = self._total + users_returned + self._startAt = self._startAt + users_returned + params["startAt"] = self._startAt + params["maxResults"] = self._max_results + + return params def path(self, **kwargs) -> str: - if int(self._max_results) > 0: - return "user/search?maxResults=" + str(self._max_results) + "&query=" - return "user/search?query=" + return "users/search" + + +class UsersGroupsDetailed(JiraStream): + """ + https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-comments/#api-rest-api-3-issue-issueidorkey-comment-get + """ + + primary_key = "accountId" + + def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: + key = stream_slice["accountId"] + return f"user?accountId={key}&expand=groups,applicationRoles" + + def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + users_stream = Users(authenticator=self.authenticator, domain=self._domain, projects=self._projects) + for user in users_stream.read_records(sync_mode=SyncMode.full_refresh): + yield from super().read_records(stream_slice={"accountId": user["accountId"]}, **kwargs) class Workflows(JiraStream): diff --git a/airbyte-integrations/connectors/source-k6-cloud/.dockerignore b/airbyte-integrations/connectors/source-k6-cloud/.dockerignore new file mode 100644 index 0000000000000..9e51dcd8f57b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_k6_cloud +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-k6-cloud/Dockerfile b/airbyte-integrations/connectors/source-k6-cloud/Dockerfile new file mode 100644 index 0000000000000..ae2f081875056 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_k6_cloud ./source_k6_cloud + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-k6-cloud diff --git a/airbyte-integrations/connectors/source-k6-cloud/README.md b/airbyte-integrations/connectors/source-k6-cloud/README.md new file mode 100644 index 0000000000000..6cb5fc82fd7a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/README.md @@ -0,0 +1,79 @@ +# K6 Cloud Source + +This is the repository for the K6 Cloud configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/k6-cloud). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-k6-cloud:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/k6-cloud) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_k6_cloud/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source k6-cloud test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-k6-cloud:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-k6-cloud:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-k6-cloud:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-k6-cloud:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-k6-cloud:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-k6-cloud:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-k6-cloud:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-k6-cloud:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-k6-cloud/__init__.py b/airbyte-integrations/connectors/source-k6-cloud/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-config.yml b/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-config.yml new file mode 100644 index 0000000000000..4ee04a5b1623d --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-config.yml @@ -0,0 +1,26 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-k6-cloud:dev +tests: + spec: + - spec_path: "source_k6_cloud/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["k6-tests"] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.txt" + # extra_fields: no + # exact_order: no + # extra_records: yes + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-k6-cloud/build.gradle b/airbyte-integrations/connectors/source-k6-cloud/build.gradle new file mode 100644 index 0000000000000..7b54fb7876c8c --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_k6_cloud' +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/integration_tests/__init__.py b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-k6-cloud/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-k6-cloud/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..3759423f255ea --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/configured_catalog.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "stream": { + "name": "organizations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "k6-tests", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "projects", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..69c76bda271ba --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_token": "64df5a9991c177f3a08bde53c5b95ce7006b160cf11ba30eb3465f49c0072948" +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/sample_config.json new file mode 100644 index 0000000000000..79adc7c72c439 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_token": "64d1ba30eb34f5a9b95ce79991c177f3a08006b160cf165f4bde53c5c0072948" +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/main.py b/airbyte-integrations/connectors/source-k6-cloud/main.py new file mode 100644 index 0000000000000..f2aa3164abd68 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_k6_cloud import SourceK6Cloud + +if __name__ == "__main__": + source = SourceK6Cloud() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-k6-cloud/requirements.txt b/airbyte-integrations/connectors/source-k6-cloud/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-k6-cloud/setup.py b/airbyte-integrations/connectors/source-k6-cloud/setup.py new file mode 100644 index 0000000000000..6366423287464 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_k6_cloud", + description="Source implementation for K6 Cloud.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/__init__.py b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/__init__.py new file mode 100644 index 0000000000000..de24f17bd660c --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceK6Cloud + +__all__ = ["SourceK6Cloud"] diff --git a/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/k6_cloud.yaml b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/k6_cloud.yaml new file mode 100644 index 0000000000000..320bddaafcef6 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/k6_cloud.yaml @@ -0,0 +1,91 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: + - "{{ options['name'] }}" + requester: + url_base: "https://api.k6.io" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_token'] }}" + + increment_paginator: + type: "DefaultPaginator" + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "PageIncrement" + page_size: 32 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + page_size_option: + inject_into: "body_data" + field_name: "page_size" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + organizations_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + $options: + name: "organizations" + primary_key: "id" + path: "/v3/organizations" + organizations_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.organizations_stream)" + parent_key: id + stream_slice_field: id + projects_stream: + $ref: "*ref(definitions.base_stream)" + type: DeclarativeStream + $options: + name: "projects" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/v3/organizations/{{ stream_slice.id }}/projects" + stream_slicer: + $ref: "*ref(definitions.organizations_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + tests_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + requester: + $ref: "*ref(definitions.requester)" + $options: + name: "k6-tests" + primary_key: "id" + path: "loadtests/v2/tests" + +streams: + - "*ref(definitions.organizations_stream)" + - "*ref(definitions.projects_stream)" + - "*ref(definitions.tests_stream)" + +check: + stream_names: + - "organizations" diff --git a/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/k6-tests.json b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/k6-tests.json new file mode 100644 index 0000000000000..9f04456f60fe5 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/k6-tests.json @@ -0,0 +1,33 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "project_id": { + "type": "integer" + }, + "user_id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "created": { + "type": "string" + }, + "updated": { + "type": "string" + }, + "last_test_run_id": { + "type": "string" + }, + "test_run_ids": { + "type": "array" + }, + "script": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/organizations.json b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/organizations.json new file mode 100644 index 0000000000000..c50c400a8ee3a --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/organizations.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "owner_id": { + "type": "integer" + }, + "description": { + "type": "string" + }, + "billing_address": { + "type": "string" + }, + "billing_country": { + "type": "string" + }, + "billing_email": { + "type": "string" + }, + "vat_number": { + "type": "string" + }, + "created": { + "type": "string" + }, + "updated": { + "type": "string" + }, + "is_default": { + "type": "boolean" + }, + "is_saml_org": { + "type": "boolean" + } + } +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/projects.json b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/projects.json new file mode 100644 index 0000000000000..a83469ba7a569 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/schemas/projects.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "organization_id": { + "type": "integer" + }, + "created": { + "type": "string" + }, + "updated": { + "type": "string" + }, + "is_default": { + "type": "boolean" + } + } +} diff --git a/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/source.py b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/source.py new file mode 100644 index 0000000000000..cac9732cd675f --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceK6Cloud(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "k6_cloud.yaml"}) diff --git a/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/spec.yaml b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/spec.yaml new file mode 100644 index 0000000000000..f626cb7532d08 --- /dev/null +++ b/airbyte-integrations/connectors/source-k6-cloud/source_k6_cloud/spec.yaml @@ -0,0 +1,17 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/k6-cloud +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: K6 Cloud Spec + type: object + required: + - api_token + additionalProperties: true + properties: + api_token: + title: Api Token + type: string + description: >- + Your API Token. See here. The key is + case sensitive. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-kafka/Dockerfile b/airbyte-integrations/connectors/source-kafka/Dockerfile index c16cd48974bb2..6b1e8104bae87 100644 --- a/airbyte-integrations/connectors/source-kafka/Dockerfile +++ b/airbyte-integrations/connectors/source-kafka/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-kafka COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.2.0 +LABEL io.airbyte.version=0.2.2 LABEL io.airbyte.name=airbyte/source-kafka diff --git a/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml b/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml new file mode 100644 index 0000000000000..6bebc5793b0a6 --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-kafka:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-kafka/build.gradle b/airbyte-integrations/connectors/source-kafka/build.gradle index 6ef80b0db86e7..d06782d151507 100644 --- a/airbyte-integrations/connectors/source-kafka/build.gradle +++ b/airbyte-integrations/connectors/source-kafka/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-kafka/src/main/java/io/airbyte/integrations/source/kafka/format/JsonFormat.java b/airbyte-integrations/connectors/source-kafka/src/main/java/io/airbyte/integrations/source/kafka/format/JsonFormat.java index c34fe80dd56e2..e5670b96bd691 100644 --- a/airbyte-integrations/connectors/source-kafka/src/main/java/io/airbyte/integrations/source/kafka/format/JsonFormat.java +++ b/airbyte-integrations/connectors/source-kafka/src/main/java/io/airbyte/integrations/source/kafka/format/JsonFormat.java @@ -127,6 +127,7 @@ public AutoCloseableIterator read() { } consumerRecords.forEach(record -> { + record_count.getAndIncrement(); recordsList.add(record); }); consumer.commitAsync(); diff --git a/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json b/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json index 60ddd5e0c343e..5a0bdcbcb8c0a 100644 --- a/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-kafka/src/main/resources/spec.json @@ -9,7 +9,7 @@ "title": "Kafka Source Spec", "type": "object", "required": ["bootstrap_servers", "subscription", "protocol"], - "additionalProperties": false, + "additionalProperties": true, "properties": { "MessageFormat": { "title": "MessageFormat", @@ -21,8 +21,7 @@ "properties": { "deserialization_type": { "type": "string", - "enum": ["JSON"], - "default": "JSON" + "const": "JSON" } } }, @@ -30,9 +29,7 @@ "title": "AVRO", "properties": { "deserialization_type": { - "type": "string", - "enum": ["AVRO"], - "default": "AVRO" + "const": "AVRO" }, "deserialization_strategy": { "type": "string", @@ -77,9 +74,7 @@ "subscription_type": { "description": "Manually assign a list of partitions to this consumer. This interface does not allow for incremental assignment and will replace the previous assignment (if there is one).\nIf the given list of topic partitions is empty, it is treated the same as unsubscribe().", "type": "string", - "const": "assign", - "enum": ["assign"], - "default": "assign" + "const": "assign" }, "topic_partitions": { "title": "List of topic:partition Pairs", @@ -95,9 +90,7 @@ "subscription_type": { "description": "The Topic pattern from which the records will be read.", "type": "string", - "const": "subscribe", - "enum": ["subscribe"], - "default": "subscribe" + "const": "subscribe" }, "topic_pattern": { "title": "Topic Pattern", @@ -143,8 +136,7 @@ "properties": { "security_protocol": { "type": "string", - "enum": ["PLAINTEXT"], - "default": "PLAINTEXT" + "const": "PLAINTEXT" } } }, @@ -158,15 +150,13 @@ "properties": { "security_protocol": { "type": "string", - "enum": ["SASL_PLAINTEXT"], - "default": "SASL_PLAINTEXT" + "const": "SASL_PLAINTEXT" }, "sasl_mechanism": { "title": "SASL Mechanism", "description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.", "type": "string", - "default": "PLAIN", - "enum": ["PLAIN"] + "const": "PLAIN" }, "sasl_jaas_config": { "title": "SASL JAAS Config", @@ -187,8 +177,7 @@ "properties": { "security_protocol": { "type": "string", - "enum": ["SASL_SSL"], - "default": "SASL_SSL" + "const": "SASL_SSL" }, "sasl_mechanism": { "title": "SASL Mechanism", diff --git a/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..520d509e91548 --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/dummy_config.json @@ -0,0 +1,10 @@ +{ + "bootstrap_servers": "default", + "subscription": { + "subscription_type": "assign", + "topic_partitions": "default" + }, + "protocol": { + "security_protocol": "PLAINTEXT" + } +} diff --git a/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..3eae6e008ad7b --- /dev/null +++ b/airbyte-integrations/connectors/source-kafka/src/test-integration/resources/expected_spec.json @@ -0,0 +1,275 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/kafka", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Kafka Source Spec", + "type": "object", + "required": ["bootstrap_servers", "subscription", "protocol"], + "additionalProperties": true, + "properties": { + "MessageFormat": { + "title": "MessageFormat", + "type": "object", + "description": "The serialization used based on this ", + "oneOf": [ + { + "title": "JSON", + "properties": { + "deserialization_type": { + "type": "string", + "const": "JSON" + } + } + }, + { + "title": "AVRO", + "properties": { + "deserialization_type": { + "const": "AVRO" + }, + "deserialization_strategy": { + "type": "string", + "enum": [ + "TopicNameStrategy", + "RecordNameStrategy", + "TopicRecordNameStrategy" + ], + "default": "TopicNameStrategy" + }, + "schema_registry_url": { + "type": "string", + "examples": ["http://localhost:8081"] + }, + "schema_registry_username": { + "type": "string", + "default": "" + }, + "schema_registry_password": { + "type": "string", + "default": "" + } + } + } + ] + }, + "bootstrap_servers": { + "title": "Bootstrap Servers", + "description": "A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. The client will make use of all servers irrespective of which servers are specified here for bootstrapping—this list only impacts the initial hosts used to discover the full set of servers. This list should be in the form host1:port1,host2:port2,.... Since these servers are just used for the initial connection to discover the full cluster membership (which may change dynamically), this list need not contain the full set of servers (you may want more than one, though, in case a server is down).", + "type": "string", + "examples": ["kafka-broker1:9092,kafka-broker2:9092"] + }, + "subscription": { + "title": "Subscription Method", + "type": "object", + "description": "You can choose to manually assign a list of partitions, or subscribe to all topics matching specified pattern to get dynamically assigned partitions.", + "oneOf": [ + { + "title": "Manually assign a list of partitions", + "required": ["subscription_type", "topic_partitions"], + "properties": { + "subscription_type": { + "description": "Manually assign a list of partitions to this consumer. This interface does not allow for incremental assignment and will replace the previous assignment (if there is one).\nIf the given list of topic partitions is empty, it is treated the same as unsubscribe().", + "type": "string", + "const": "assign" + }, + "topic_partitions": { + "title": "List of topic:partition Pairs", + "type": "string", + "examples": ["sample.topic:0, sample.topic:1"] + } + } + }, + { + "title": "Subscribe to all topics matching specified pattern", + "required": ["subscription_type", "topic_pattern"], + "properties": { + "subscription_type": { + "description": "The Topic pattern from which the records will be read.", + "type": "string", + "const": "subscribe" + }, + "topic_pattern": { + "title": "Topic Pattern", + "type": "string", + "examples": ["sample.topic"] + } + } + } + ] + }, + "test_topic": { + "title": "Test Topic", + "description": "The Topic to test in case the Airbyte can consume messages.", + "type": "string", + "examples": ["test.topic"] + }, + "group_id": { + "title": "Group ID", + "description": "The Group ID is how you distinguish different consumer groups.", + "type": "string", + "examples": ["group.id"] + }, + "max_poll_records": { + "title": "Max Poll Records", + "description": "The maximum number of records returned in a single call to poll(). Note, that max_poll_records does not impact the underlying fetching behavior. The consumer will cache the records from each fetch request and returns them incrementally from each poll.", + "type": "integer", + "default": 500 + }, + "polling_time": { + "title": "Polling Time", + "description": "Amount of time Kafka connector should try to poll for messages.", + "type": "integer", + "default": 100 + }, + "protocol": { + "title": "Protocol", + "type": "object", + "description": "The Protocol used to communicate with brokers.", + "oneOf": [ + { + "title": "PLAINTEXT", + "required": ["security_protocol"], + "properties": { + "security_protocol": { + "type": "string", + "const": "PLAINTEXT" + } + } + }, + { + "title": "SASL PLAINTEXT", + "required": [ + "security_protocol", + "sasl_mechanism", + "sasl_jaas_config" + ], + "properties": { + "security_protocol": { + "type": "string", + "const": "SASL_PLAINTEXT" + }, + "sasl_mechanism": { + "title": "SASL Mechanism", + "description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.", + "type": "string", + "const": "PLAIN" + }, + "sasl_jaas_config": { + "title": "SASL JAAS Config", + "description": "The JAAS login context parameters for SASL connections in the format used by JAAS configuration files.", + "type": "string", + "default": "", + "airbyte_secret": true + } + } + }, + { + "title": "SASL SSL", + "required": [ + "security_protocol", + "sasl_mechanism", + "sasl_jaas_config" + ], + "properties": { + "security_protocol": { + "type": "string", + "const": "SASL_SSL" + }, + "sasl_mechanism": { + "title": "SASL Mechanism", + "description": "The SASL mechanism used for client connections. This may be any mechanism for which a security provider is available.", + "type": "string", + "default": "GSSAPI", + "enum": [ + "GSSAPI", + "OAUTHBEARER", + "SCRAM-SHA-256", + "SCRAM-SHA-512", + "PLAIN" + ] + }, + "sasl_jaas_config": { + "title": "SASL JAAS Config", + "description": "The JAAS login context parameters for SASL connections in the format used by JAAS configuration files.", + "type": "string", + "default": "", + "airbyte_secret": true + } + } + } + ] + }, + "client_id": { + "title": "Client ID", + "description": "An ID string to pass to the server when making requests. The purpose of this is to be able to track the source of requests beyond just ip/port by allowing a logical application name to be included in server-side request logging.", + "type": "string", + "examples": ["airbyte-consumer"] + }, + "enable_auto_commit": { + "title": "Enable Auto Commit", + "description": "If true, the consumer's offset will be periodically committed in the background.", + "type": "boolean", + "default": true + }, + "auto_commit_interval_ms": { + "title": "Auto Commit Interval, ms", + "description": "The frequency in milliseconds that the consumer offsets are auto-committed to Kafka if enable.auto.commit is set to true.", + "type": "integer", + "default": 5000 + }, + "client_dns_lookup": { + "title": "Client DNS Lookup", + "description": "Controls how the client uses DNS lookups. If set to use_all_dns_ips, connect to each returned IP address in sequence until a successful connection is established. After a disconnection, the next IP is used. Once all IPs have been used once, the client resolves the IP(s) from the hostname again. If set to resolve_canonical_bootstrap_servers_only, resolve each bootstrap address into a list of canonical names. After the bootstrap phase, this behaves the same as use_all_dns_ips. If set to default (deprecated), attempt to connect to the first IP address returned by the lookup, even if the lookup returns multiple IP addresses.", + "type": "string", + "default": "use_all_dns_ips", + "enum": [ + "default", + "use_all_dns_ips", + "resolve_canonical_bootstrap_servers_only" + ] + }, + "retry_backoff_ms": { + "title": "Retry Backoff, ms", + "description": "The amount of time to wait before attempting to retry a failed request to a given topic partition. This avoids repeatedly sending requests in a tight loop under some failure scenarios.", + "type": "integer", + "default": 100 + }, + "request_timeout_ms": { + "title": "Request Timeout, ms", + "description": "The configuration controls the maximum amount of time the client will wait for the response of a request. If the response is not received before the timeout elapses the client will resend the request if necessary or fail the request if retries are exhausted.", + "type": "integer", + "default": 30000 + }, + "receive_buffer_bytes": { + "title": "Receive Buffer, bytes", + "description": "The size of the TCP receive buffer (SO_RCVBUF) to use when reading data. If the value is -1, the OS default will be used.", + "type": "integer", + "default": 32768 + }, + "auto_offset_reset": { + "title": "Auto Offset Reset", + "description": "What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server - earliest: automatically reset the offset to the earliest offset, latest: automatically reset the offset to the latest offset, none: throw exception to the consumer if no previous offset is found for the consumer's group, anything else: throw exception to the consumer.", + "type": "string", + "default": "latest", + "enum": ["latest", "earliest", "none"] + }, + "repeated_calls": { + "title": "Repeated Calls", + "description": "The number of repeated calls to poll() if no messages were received.", + "type": "integer", + "default": 3 + }, + "max_records_process": { + "title": "Maximum Records", + "description": "The Maximum to be processed per execution", + "type": "integer", + "default": 100000 + } + } + }, + "supportsIncremental": true, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [], + "supported_source_sync_modes": ["append"] +} diff --git a/airbyte-integrations/connectors/source-klarna/.dockerignore b/airbyte-integrations/connectors/source-klarna/.dockerignore new file mode 100644 index 0000000000000..afe3b8fc5abbd --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_klarna +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-klarna/Dockerfile b/airbyte-integrations/connectors/source-klarna/Dockerfile new file mode 100644 index 0000000000000..ce2d2878dd50f --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_klarna ./source_klarna + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-klarna diff --git a/airbyte-integrations/connectors/source-klarna/README.md b/airbyte-integrations/connectors/source-klarna/README.md new file mode 100644 index 0000000000000..7491793a46be3 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/README.md @@ -0,0 +1,132 @@ +# Klarna Source + +This is the repository for the Klarna source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/klarna). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-klarna:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/klarna) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_klarna/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source klarna test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-klarna:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-klarna:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-klarna:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-klarna:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-klarna:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-klarna:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-klarna:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-klarna:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-klarna/acceptance-test-config.yml b/airbyte-integrations/connectors/source-klarna/acceptance-test-config.yml new file mode 100644 index 0000000000000..ee1253808a3d7 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/acceptance-test-config.yml @@ -0,0 +1,30 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-klarna:dev +tests: + spec: + - spec_path: "source_klarna/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.txt" + # extra_fields: no + # exact_order: no + # extra_records: yes + # incremental: # TODO Implementation of incremental sync is possible + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-klarna/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-klarna/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-klarna/build.gradle b/airbyte-integrations/connectors/source-klarna/build.gradle new file mode 100644 index 0000000000000..0dfdcdb725864 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_klarna' +} diff --git a/airbyte-integrations/connectors/source-klarna/integration_tests/__init__.py b/airbyte-integrations/connectors/source-klarna/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-klarna/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-klarna/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-klarna/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-klarna/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..7d03865215b61 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/integration_tests/configured_catalog.json @@ -0,0 +1,26 @@ +{ + "streams": [ + { + "stream": { + "name": "payouts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": null, + "default_cursor_field": null + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "transactions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_cursor": null, + "default_cursor_field": null + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-klarna/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-klarna/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..7f60f63f406b1 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "region": "eu", + "playground": true, + "username": "PK57312_4d86a3a57622", + "password": "g9QhHeZtkakKBX26" +} diff --git a/airbyte-integrations/connectors/source-klarna/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-klarna/integration_tests/sample_config.json new file mode 100644 index 0000000000000..3d087e2b34df2 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "region": "eu", + "playground": true, + "username": "", + "password": "" +} diff --git a/airbyte-integrations/connectors/source-klarna/main.py b/airbyte-integrations/connectors/source-klarna/main.py new file mode 100644 index 0000000000000..3d6dca25467cf --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_klarna import SourceKlarna + +if __name__ == "__main__": + source = SourceKlarna() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-klarna/requirements.txt b/airbyte-integrations/connectors/source-klarna/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-klarna/setup.py b/airbyte-integrations/connectors/source-klarna/setup.py new file mode 100644 index 0000000000000..0521ee4647c41 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/setup.py @@ -0,0 +1,28 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.2", ""] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "responses~=0.22.0", + "source-acceptance-test", +] + +setup( + name="source_klarna", + description="Source implementation for Klarna.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-klarna/source_klarna/__init__.py b/airbyte-integrations/connectors/source-klarna/source_klarna/__init__.py new file mode 100644 index 0000000000000..58ba02ddefa0a --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/source_klarna/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceKlarna + +__all__ = ["SourceKlarna"] diff --git a/airbyte-integrations/connectors/source-klarna/source_klarna/schemas/payouts.json b/airbyte-integrations/connectors/source-klarna/source_klarna/schemas/payouts.json new file mode 100644 index 0000000000000..e600cca16b698 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/source_klarna/schemas/payouts.json @@ -0,0 +1,135 @@ +{ + "type": "object", + "additionalProperties": true, + "required": [ + "totals", + "payment_reference", + "payout_date", + "currency_code", + "merchant_settlement_type", + "merchant_id" + ], + "properties": { + "totals": { + "type": "object", + "additionalProperties": true, + "properties": { + "commission_amount": { + "description": "The total amount of commissions, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "repay_amount": { + "description": "The total amount of money that has been repaid by the merchant from the debt to Klarna, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "sale_amount": { + "description": "The total amount of sales, in minor units", + "example": 500, + "type": "integer", + "format": "int64" + }, + "holdback_amount": { + "description": "The total amount of money withheld by Klarna, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "tax_amount": { + "description": "The total amount of tax, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "settlement_amount": { + "description": "The total amount of the settlement in question, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "fee_correction_amount": { + "description": "The total amount of fee correction, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "reversal_amount": { + "description": "The total amount of reversals, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "release_amount": { + "description": "The total amount of money released from holdback by Klarna, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "return_amount": { + "description": "The total amount of returns, in minor units", + "example": 550, + "type": "integer", + "format": "int64" + }, + "fee_amount": { + "description": "The total amount of fees, in minor units", + "example": 500, + "type": "integer", + "format": "int64" + }, + "charge_amount": { + "description": "The total amount of charges, in minor units. The additional field detailed_type contains the purpose of the charge", + "example": 500, + "type": "integer", + "format": "int64" + }, + "credit_amount": { + "description": "The total amount of credits, in minor units. The additional field detailed_type contains the purpose of the credit", + "example": 500, + "type": "integer", + "format": "int64" + } + } + }, + "payment_reference": { + "description": "The reference id of the payout", + "example": "XISA93DJ", + "type": "string" + }, + "payout_date": { + "description": "ISO 8601 formatted date-time string", + "example": "2016-12-14T07:52:26Z", + "type": "string", + "format": "date-time" + }, + "currency_code": { + "description": "ISO 4217 Currency Code. Like USD, EUR, AUD or GBP.", + "example": "USD", + "type": "string" + }, + "currency_code_of_registration_country": { + "type": "string", + "description": "ISO 4217 Currency Code of the country you are registered in.", + "example": "EUR" + }, + "merchant_settlement_type": { + "description": "Whether the amounts are net or gross", + "example": "NET", + "type": "string", + "enum": ["GROSS", "NET", "GROSS_FEE"] + }, + "merchant_id": { + "description": "The merchant id", + "type": "string" + }, + "transactions": { + "description": "Link to the transactions that are part of this payout", + "example": "https://{settlements_api}/transactions?payment_reference=XISA93DJ", + "type": "string" + } + }, + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-klarna/source_klarna/schemas/transactions.json b/airbyte-integrations/connectors/source-klarna/source_klarna/schemas/transactions.json new file mode 100644 index 0000000000000..2ab0d30627c14 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/source_klarna/schemas/transactions.json @@ -0,0 +1,191 @@ +{ + "type": "object", + "additionalProperties": true, + "required": ["capture_id", "sale_date", "capture_date", "order_id"], + "properties": { + "amount": { + "description": "Total amount of the specific transaction, in minor units", + "example": 2000, + "type": "integer", + "format": "int64" + }, + "capture_id": { + "description": "The Klarna assigned id reference of a specific capture", + "example": "33db6f16-9f43-43fa-a587-cc51411c98e4", + "type": "string" + }, + "merchant_reference1": { + "description": "Merchant assigned reference, typically a reference to an order management system id", + "type": "string" + }, + "sale_date": { + "description": "ISO 8601 formatted date-time string", + "example": "2016-12-14T07:52:26Z", + "type": "string", + "format": "date-time" + }, + "type": { + "description": "The type of transaction.", + "example": "SALE", + "type": "string", + "enum": [ + "COMMISSION", + "SALE", + "REVERSAL", + "RETURN", + "TAX", + "FEE", + "FEE_REFUND", + "CORRECTION", + "REVERSAL_MERCHANT_PROTECTION", + "CHARGE", + "CREDIT", + "HOLDBACK", + "RELEASE" + ] + }, + "capture_date": { + "description": "ISO 8601 formatted date-time string", + "example": "2016-12-14T07:52:26Z", + "type": "string", + "format": "date-time" + }, + "payment_reference": { + "description": "Reference to the specific payout the transaction is part of, if available.", + "example": "XISA93DJ", + "type": "string" + }, + "order_id": { + "description": "The Klarna assigned order id reference", + "example": "ce17b4cb-147f-48b7-b8e6-dde2fa397f04", + "type": "string", + "format": "uuid" + }, + "payout": { + "description": "Link to the payout that this transaction is part of", + "example": "https://{settlements_api}/payouts/XISA93DJ", + "type": "string" + }, + "refund_id": { + "description": "The Klarna assigned id reference of a specific refund", + "example": "ef1baa1f-b42e-44be-b9e4-4b94510b53e5", + "type": "string" + }, + "short_order_id": { + "description": "The Klarna assigned short order id reference", + "example": "shortrid", + "type": "string" + }, + "merchant_reference2": { + "description": "Merchant assigned reference, typically a reference to an order management system id", + "type": "string" + }, + "currency_code": { + "description": "ISO 4217 Currency Code. Like USD, EUR, AUD or GBP.", + "example": "USD", + "type": "string" + }, + "purchase_country": { + "type": "string", + "description": "ISO Alpha-2 Country Code", + "example": "PL" + }, + "vat_rate": { + "type": "integer", + "description": "VAT (Value added tax) rate on Klarna fees", + "example": 2000 + }, + "vat_amount": { + "type": "integer", + "description": "VAT (Value added tax) amount on Klarna fees, in minor units", + "example": 1000 + }, + "shipping_country": { + "type": "string", + "description": "ISO Alpha-2 Country Code", + "example": "PL" + }, + "initial_payment_method_type": { + "type": "string", + "description": "Payment method the consumer chose during checkout", + "example": "direct_debit" + }, + "initial_number_of_installments": { + "type": "integer", + "description": "Number of installments the consumer chose during checkout in case of installment payments", + "example": 3 + }, + "initial_payment_method_monthly_downpayments": { + "type": "integer", + "description": "Number of monthly downpayments that were chosen during the checkout in case of installment payments.", + "example": 12 + }, + "merchant_capture_reference": { + "type": "string", + "description": "Your internal reference to the capture, that has been submitted during capturing an order via API" + }, + "merchant_refund_reference": { + "type": "string", + "description": "Your internal reference to the refund, that has been submitted during refunding an order via API" + }, + "detailed_type": { + "type": "string", + "description": "Detailed description of the transaction type", + "example": "PURCHASE", + "enum": [ + "COMMISSION", + "CREDITED_CORRECTION", + "PURCHASE_FEE_PERCENTAGE", + "PURCHASE_FEE_PERCENTAGE_REFUND", + "LATE_RETURN_FEE", + "PURCHASE_FEE_FIXED", + "EXPIRY_FEE_GROSS", + "EXPIRY_FEE", + "SERVICING_FEE", + "RETURN_FEE", + "EXTRA_INVOICE_FEE", + "PURCHASE_RETURN", + "COMMISSION_RETURN", + "REVERSAL", + "FRAUD_POLICY_CHARGE", + "COMMISSION_RETURN_GROSS", + "FRAUD_POLICY_CREDIT_NET", + "PURCHASE", + "MANUAL_ENTRY", + "LOAN_PAYOUT", + "LOAN_AMORTISATION", + "LOAN_FEE", + "FEE_REFUND", + "PURCHASE_COMMISSION_PERCENTAGE", + "EXTEND_DUE_DATE_FEE", + "TRANSFER_FROM_LEGACY_INTEGRATION", + "FIXED_FEE_CORRECTION_UK", + "PERCENTAGE_FEE_CORRECTION_UK", + "VAT_ON_FEE_CORRECTION_UK", + "FIXED_FEE_CORRECTION_SE", + "PERCENTAGE_FEE_CORRECTION_SE", + "PERCENTAGE_FEE_CORRECTION", + "FIXED_FEE_CORRECTION", + "ROLLING_RESERVE", + "PERCENTAGE_FEES", + "PAYMENT_REMINDER", + "CORRECTION", + "UNDER_REVIEW", + "INSUFFICIENT_BANK_ACCOUNT_DETAILS", + "DISPUTE_FEE", + "DISPUTE_FEE_REFUND" + ] + }, + "tax_in_currency_of_registration_country": { + "type": "integer", + "description": "The tax amount on the respective fee, converted into the currency of your registration country. In case you are a German merchant selling in another currency then EUR or a Swedish merchant selling in another currency then SEK, we convert the VAT amount on the Klarna fees into the currency of the country you are registered in, based on the exchange rate of the capture date.", + "example": 1000 + }, + "currency_code_of_registration_country": { + "type": "string", + "description": "ISO 4217 Currency Code of the country you are registered in.", + "example": "EUR" + } + }, + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-klarna/source_klarna/source.py b/airbyte-integrations/connectors/source-klarna/source_klarna/source.py new file mode 100644 index 0000000000000..0c07cee6b4fc9 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/source_klarna/source.py @@ -0,0 +1,118 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +from urllib.parse import parse_qs, urlparse + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.requests_native_auth import BasicHttpAuthenticator + + +# Basic full refresh stream +class KlarnaStream(HttpStream, ABC): + def __init__(self, region: str, playground: bool, authenticator: BasicHttpAuthenticator, **kwargs): + self.region = region + self.playground = playground + self.kwargs = kwargs + super().__init__(authenticator=authenticator) + + page_size = 500 + data_api_field: str + + @property + def url_base(self) -> str: + playground_path = "playground." if self.playground else "" + if self.region == "eu": + endpoint = f"https://api.{playground_path}klarna.com/" + else: + endpoint = f"https://api-{self.region}.{playground_path}klarna.com/" + return endpoint + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + response_json = response.json() + if "next" in response_json.get("pagination", {}).keys(): + parsed_url = urlparse(response_json["pagination"]["next"]) + query_params = parse_qs(parsed_url.query) + # noinspection PyTypeChecker + return query_params + else: + return None + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + if next_page_token: + return dict(next_page_token) + else: + return {"offset": 0, "size": self.page_size} + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + :return an iterable containing each record in the response + """ + payouts = response.json().get(self.data_api_field, []) + yield from payouts + + +class Payouts(KlarnaStream): + """ + Payouts read from Klarna Settlements API https://developers.klarna.com/api/?json#settlements-api + """ + + primary_key = "payout_date" # TODO verify + data_api_field = "payouts" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "/settlements/v1/payouts" + + +class Transactions(KlarnaStream): + """ + Transactions read from Klarna Settlements API https://developers.klarna.com/api/?json#settlements-api + """ + + primary_key = "capture_id" # TODO verify + data_api_field = "transactions" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "/settlements/v1/transactions" + + +# Source +class SourceKlarna(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + """ + :param config: the user-input config object conforming to the connector's spec.yaml + :param logger: logger object + :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. + """ + try: + auth = BasicHttpAuthenticator(username=config["username"], password=config["password"]) + conn_test_stream = Transactions(authenticator=auth, **config) + conn_test_stream.page_size = 1 + conn_test_stream.next_page_token = lambda x: None + records = conn_test_stream.read_records(sync_mode=SyncMode.full_refresh) + # Try to read one value from records iterator + next(records, None) + return True, None + except Exception as e: + print(e) + return False, repr(e) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """ + :param config: A Mapping of the user input configuration as defined in the connector spec. + """ + auth = BasicHttpAuthenticator(username=config["username"], password=config["password"]) + return [Payouts(authenticator=auth, **config), Transactions(authenticator=auth, **config)] diff --git a/airbyte-integrations/connectors/source-klarna/source_klarna/spec.yaml b/airbyte-integrations/connectors/source-klarna/source_klarna/spec.yaml new file mode 100644 index 0000000000000..e2d7dfc9c71f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/source_klarna/spec.yaml @@ -0,0 +1,34 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/klarna +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Klarna Spec + type: object + required: + - region + - playground + - username + - password + additionalProperties: true + properties: + region: + title: Region + type: string + enum: + - eu + - us + - oc + description: Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs). Supported 'eu', 'us', 'oc' + playground: + title: Playground + type: boolean + description: Propertie defining if connector is used against playground or production environment + default: false + username: + title: Username + type: string + description: Consists of your Merchant ID (eid) - a unique number that identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication) + password: + title: Password + type: string + description: A string which is associated with your Merchant ID and is used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication) + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-klarna/unit_tests/__init__.py b/airbyte-integrations/connectors/source-klarna/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-klarna/unit_tests/conftest.py b/airbyte-integrations/connectors/source-klarna/unit_tests/conftest.py new file mode 100644 index 0000000000000..c1164156a062a --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/unit_tests/conftest.py @@ -0,0 +1,23 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest as pytest +from airbyte_cdk.sources.streams.http.requests_native_auth import BasicHttpAuthenticator +from source_klarna import SourceKlarna +from source_klarna.source import KlarnaStream + + +@pytest.fixture(name="source_klarna") +def get_source_klarna(): + return SourceKlarna() + + +@pytest.fixture(name="klarna_config") +def get_klarna_config(): + return dict(playground=False, region="eu", username="user", password="password") + + +@pytest.fixture(name="klarna_stream") +def get_klarna_stream(klarna_config): + return KlarnaStream(authenticator=BasicHttpAuthenticator("", ""), **klarna_config) diff --git a/airbyte-integrations/connectors/source-klarna/unit_tests/test_source.py b/airbyte-integrations/connectors/source-klarna/unit_tests/test_source.py new file mode 100644 index 0000000000000..79616cc3b02c2 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/unit_tests/test_source.py @@ -0,0 +1,24 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import responses +from source_klarna.source import SourceKlarna + + +@responses.activate +def test_check_connection(mocker, source_klarna, klarna_config): + responses.add(responses.GET, "https://api.klarna.com/settlements/v1/transactions?offset=0&size=1", json={}) + + logger_mock, config_mock = MagicMock(), klarna_config + assert source_klarna.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker, klarna_config): + source = SourceKlarna() + config_mock = klarna_config + streams = source.streams(config_mock) + expected_streams_number = 2 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-klarna/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-klarna/unit_tests/test_streams.py new file mode 100644 index 0000000000000..782d85ef6ebf3 --- /dev/null +++ b/airbyte-integrations/connectors/source-klarna/unit_tests/test_streams.py @@ -0,0 +1,92 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from airbyte_cdk.sources.streams.http.requests_native_auth import BasicHttpAuthenticator +from source_klarna.source import KlarnaStream, Payouts, Transactions + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(KlarnaStream, "path", "v0/example_endpoint") + mocker.patch.object(KlarnaStream, "primary_key", "test_primary_key") + mocker.patch.object(KlarnaStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class, klarna_stream): + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {"offset": 0, "size": 500} + assert klarna_stream.request_params(**inputs) == expected_params + + +@pytest.mark.parametrize( + "total,count,offset,next_,expected_params", + [ + (9, 4, 0, "https://api.playground.klarna.com/settlements/v1/payouts?offset=4&size=4", {"offset": ["4"], "size": ["4"]}), + (9, 4, 4, "https://api.playground.klarna.com/settlements/v1/payouts?offset=48&size=4", {"offset": ["48"], "size": ["4"]}), + ], +) +def test_next_page_token(patch_base_class, klarna_stream, total, count, offset, next_, expected_params): + response_mock = MagicMock() + response_mock.json.return_value = { + "pagination": { + "total": total, + "count": count, + "offset": offset, + "next": next_, + } + } + inputs = {"response": response_mock} + assert klarna_stream.next_page_token(**inputs) == expected_params + + +@pytest.mark.parametrize( + ("specific_klarna_stream", "response"), + [ + (Payouts, {"payouts": [{}]}), + (Transactions, {"transactions": [{}]}), + ], +) +def test_parse_response(patch_base_class, klarna_config, specific_klarna_stream, response): + mock_response = MagicMock() + mock_response.json.return_value = response + inputs = {"response": mock_response, "stream_state": {}} + stream = specific_klarna_stream(authenticator=BasicHttpAuthenticator("", ""), **klarna_config) + assert next(stream.parse_response(**inputs)) == {} + + +def test_request_headers(patch_base_class, klarna_stream): + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {} + assert klarna_stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class, klarna_stream): + expected_method = "GET" + assert klarna_stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry, klarna_stream): + response_mock = MagicMock() + response_mock.status_code = http_status + assert klarna_stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class, klarna_stream): + response_mock = MagicMock() + expected_backoff_time = None + assert klarna_stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-launchdarkly/.dockerignore b/airbyte-integrations/connectors/source-launchdarkly/.dockerignore new file mode 100644 index 0000000000000..972209ccb9cd6 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_launchdarkly +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-launchdarkly/Dockerfile b/airbyte-integrations/connectors/source-launchdarkly/Dockerfile new file mode 100644 index 0000000000000..74cb36c14a79e --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_launchdarkly ./source_launchdarkly + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-launchdarkly diff --git a/airbyte-integrations/connectors/source-launchdarkly/README.md b/airbyte-integrations/connectors/source-launchdarkly/README.md new file mode 100644 index 0000000000000..eb7656e96dae6 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/README.md @@ -0,0 +1,79 @@ +# Launchdarkly Source + +This is the repository for the Launchdarkly configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/launchdarkly). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-launchdarkly:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/launchdarkly) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_launchdarkly/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source launchdarkly test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-launchdarkly:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-launchdarkly:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-launchdarkly:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-launchdarkly:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-launchdarkly:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-launchdarkly:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-launchdarkly:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-launchdarkly:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-launchdarkly/__init__.py b/airbyte-integrations/connectors/source-launchdarkly/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-config.yml b/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-config.yml new file mode 100644 index 0000000000000..9703c351904dd --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-config.yml @@ -0,0 +1,43 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-launchdarkly:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_launchdarkly/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: + - name: metrics + bypass_reason: "This stream can't be seeded in our sandbox account" +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + ignored_fields: + members: + - "_lastSeen" diff --git a/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-launchdarkly/build.gradle b/airbyte-integrations/connectors/source-launchdarkly/build.gradle new file mode 100644 index 0000000000000..aa38e150b9119 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_launchdarkly' +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/integration_tests/__init__.py b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-launchdarkly/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-launchdarkly/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..fe4c93dc393ca --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/configured_catalog.json @@ -0,0 +1,49 @@ +{ + "streams": [ + { + "stream": { + "name": "projects", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "environments", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "metrics", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "members", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "auditlog", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..e7823c2be36c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "access_token": "" +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/sample_config.json new file mode 100644 index 0000000000000..382d2458aa375 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "access_token": "api-e840575d-2e3d-4216-8999-d2072c13c0c6" +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/main.py b/airbyte-integrations/connectors/source-launchdarkly/main.py new file mode 100644 index 0000000000000..d058fe6754ce9 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_launchdarkly import SourceLaunchdarkly + +if __name__ == "__main__": + source = SourceLaunchdarkly() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-launchdarkly/requirements.txt b/airbyte-integrations/connectors/source-launchdarkly/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-launchdarkly/setup.py b/airbyte-integrations/connectors/source-launchdarkly/setup.py new file mode 100644 index 0000000000000..0369cb54cefce --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_launchdarkly", + description="Source implementation for Launchdarkly.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/__init__.py b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/__init__.py new file mode 100644 index 0000000000000..3053526fff394 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceLaunchdarkly + +__all__ = ["SourceLaunchdarkly"] diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/launchdarkly.yaml b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/launchdarkly.yaml new file mode 100644 index 0000000000000..2b21538ac544b --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/launchdarkly.yaml @@ -0,0 +1,114 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["items"] + requester: + url_base: "https://app.launchdarkly.com/api/v2" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "Authorization" + api_token: "{{ config['access_token'] }}" + offset_paginator: + type: DefaultPaginator + $options: + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "OffsetIncrement" + page_size: 20 + page_token_option: + field_name: "offset" + inject_into: "request_parameter" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.offset_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + projects_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "projects" + primary_key: "_id" + path: "/projects" + project_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.projects_stream)" + parent_key: key + stream_slice_field: project_key + environments_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "environments" + primary_key: "_id" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector)" + requester: + $ref: "*ref(definitions.requester)" + path: "/projects/{{ stream_slice.project_key }}/environments" + stream_slicer: + $ref: "*ref(definitions.project_stream_slicer)" + metrics_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "metrics" + primary_key: "_id" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector)" + requester: + $ref: "*ref(definitions.requester)" + path: "/metrics/{{ stream_slice.project_key }}" + stream_slicer: + $ref: "*ref(definitions.project_stream_slicer)" + members_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "members" + primary_key: "_id" + path: "/members" + audit_log_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "auditlog" + primary_key: "_id" + path: "/auditlog" + flags_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "flags" + primary_key: "key" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector)" + requester: + $ref: "*ref(definitions.requester)" + path: "/flags/{{ stream_slice.project_key }}" + stream_slicer: + $ref: "*ref(definitions.project_stream_slicer)" + +streams: + - "*ref(definitions.projects_stream)" + - "*ref(definitions.environments_stream)" + - "*ref(definitions.metrics_stream)" + - "*ref(definitions.members_stream)" + - "*ref(definitions.audit_log_stream)" + - "*ref(definitions.flags_stream)" + +check: + stream_names: + - "projects" diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/auditlog.json b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/auditlog.json new file mode 100644 index 0000000000000..d10c30631ce6c --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/auditlog.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "_accountId": { + "type": ["null", "string"] + }, + "date": { + "type": ["null", "integer"] + }, + "accesses": { + "type": ["null", "array"] + }, + "kind": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "shortDescription": { + "type": ["null", "string"] + }, + "comment": { + "type": ["null", "string"] + }, + "member": { + "type": ["null", "object"] + }, + "titleVerb": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/environments.json b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/environments.json new file mode 100644 index 0000000000000..af5088c100211 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/environments.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "_pubnub": { + "type": ["null", "object"] + }, + "key": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "apiKey": { + "type": ["null", "string"] + }, + "mobileKey": { + "type": ["null", "string"] + }, + "color": { + "type": ["null", "string"] + }, + "defaultTtl": { + "type": ["null", "integer"] + }, + "secureMode": { + "type": ["null", "boolean"] + }, + "defaultTrackEvents": { + "type": ["null", "boolean"] + }, + "requireComments": { + "type": ["null", "boolean"] + }, + "confirmChanges": { + "type": ["null", "boolean"] + }, + "tags": { + "type": ["null", "array"] + }, + "approvalSettings": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/flags.json b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/flags.json new file mode 100644 index 0000000000000..ccaa78b64d458 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/flags.json @@ -0,0 +1,60 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_maintainer": { + "type": ["null", "object"] + }, + "_version": { + "type": ["null", "integer"] + }, + "archived": { + "type": ["null", "boolean"] + }, + "clientSideAvailability": { + "type": ["null", "object"] + }, + "creationDate": { + "type": ["null", "integer"] + }, + "customProperties": { + "type": ["null", "object"] + }, + "description": { + "type": ["null", "string"] + }, + "environments": { + "type": ["null", "object"] + }, + "experiments": { + "type": ["null", "object"] + }, + "goalIds": { + "type": ["null", "array"] + }, + "includeInSnippet": { + "type": ["null", "boolean"] + }, + "key": { + "type": ["null", "string"] + }, + "maintainerId": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "tags": { + "type": ["null", "array"] + }, + "temporary": { + "type": ["null", "boolean"] + }, + "variationJsonSchema": { + "type": ["null", "object"] + }, + "variations": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/members.json b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/members.json new file mode 100644 index 0000000000000..af8303bbb1457 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/members.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "firstName": { + "type": ["null", "string"] + }, + "lastName": { + "type": ["null", "string"] + }, + "role": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "_pendingInvite": { + "type": ["null", "boolean"] + }, + "_verified": { + "type": ["null", "boolean"] + }, + "isBeta": { + "type": ["null", "boolean"] + }, + "customRoles": { + "type": ["null", "array"] + }, + "mfa": { + "type": ["null", "string"] + }, + "excludedDashboards": { + "type": ["null", "array"] + }, + "_lastSeen": { + "type": ["null", "integer"] + }, + "_lastSeenMetadata": { + "type": ["null", "object"] + }, + "creationDate": { + "type": ["null", "integer"] + }, + "oauthProviders": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/metrics.json b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/metrics.json new file mode 100644 index 0000000000000..11d3cefeb6f7d --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/metrics.json @@ -0,0 +1,63 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "key": { + "type": ["null", "string"] + }, + "experimentCount": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "kind": { + "type": ["null", "string"] + }, + "_attachedFlagCount": { + "type": ["null", "integer"] + }, + "_links": { + "type": ["null", "object"] + }, + "_site": { + "type": ["null", "object"] + }, + "_access": { + "type": ["null", "object"] + }, + "tags": { + "type": ["null", "array"] + }, + "_creationDate": { + "type": ["null", "integer"] + }, + "lastModified": { + "type": ["null", "object"] + }, + "maintainerId": { + "type": ["null", "string"] + }, + "_maintainer": { + "type": ["null", "object"] + }, + "description": { + "type": ["null", "string"] + }, + "isNumeric": { + "type": ["null", "boolean"] + }, + "successCriteria": { + "type": ["null", "string"] + }, + "unit": { + "type": ["null", "string"] + }, + "eventKey": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/projects.json b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/projects.json new file mode 100644 index 0000000000000..edcca4f6a0d71 --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/schemas/projects.json @@ -0,0 +1,24 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "key": { + "type": ["null", "string"] + }, + "includeInSnippetByDefault": { + "type": ["null", "boolean"] + }, + "defaultClientSideAvailability": { + "type": ["null", "object"] + }, + "name": { + "type": ["null", "string"] + }, + "tags": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/source.py b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/source.py new file mode 100644 index 0000000000000..55b7ceef44f5e --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceLaunchdarkly(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "launchdarkly.yaml"}) diff --git a/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/spec.yaml b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/spec.yaml new file mode 100644 index 0000000000000..8d4823e3e804a --- /dev/null +++ b/airbyte-integrations/connectors/source-launchdarkly/source_launchdarkly/spec.yaml @@ -0,0 +1,16 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/launchdarkly +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Launchdarkly Spec + type: object + required: + - access_token + additionalProperties: true + properties: + access_token: + title: Access token + type: string + description: >- + Your Access token. See here. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-lokalise/source_lokalise/lokalise.yaml b/airbyte-integrations/connectors/source-lokalise/source_lokalise/lokalise.yaml index 56ef4a46d18c3..bea4a94ed287a 100644 --- a/airbyte-integrations/connectors/source-lokalise/source_lokalise/lokalise.yaml +++ b/airbyte-integrations/connectors/source-lokalise/source_lokalise/lokalise.yaml @@ -3,7 +3,7 @@ version: "0.1.0" definitions: selector: extractor: - field_pointer: + field_pointer: - "{{ options['name'] }}" requester: @@ -114,7 +114,6 @@ definitions: primary_key: "translation_id" path: "/api2/projects/{{ config['project_id'] }}/translations" - streams: - "*ref(definitions.keys_stream)" - "*ref(definitions.languages_stream)" diff --git a/airbyte-integrations/connectors/source-mailersend/.dockerignore b/airbyte-integrations/connectors/source-mailersend/.dockerignore new file mode 100644 index 0000000000000..bdcf3c857308a --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_mailersend +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-mailersend/Dockerfile b/airbyte-integrations/connectors/source-mailersend/Dockerfile new file mode 100644 index 0000000000000..9eacdb43b4273 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_mailersend ./source_mailersend + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-mailersend diff --git a/airbyte-integrations/connectors/source-mailersend/README.md b/airbyte-integrations/connectors/source-mailersend/README.md new file mode 100644 index 0000000000000..53f11774addf6 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/README.md @@ -0,0 +1,79 @@ +# Mailersend Source + +This is the repository for the Mailersend configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/mailersend). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-mailersend:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/mailersend) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_mailersend/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source mailersend test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-mailersend:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-mailersend:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-mailersend:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-mailersend:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-mailersend:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-mailersend:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-mailersend:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-mailersend:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-mailersend/__init__.py b/airbyte-integrations/connectors/source-mailersend/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-mailersend/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mailersend/acceptance-test-config.yml new file mode 100644 index 0000000000000..46533de7f7447 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-mailersend:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_mailersend/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.txt" + # extra_fields: no + # exact_order: no + # extra_recods: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" + # TODO uncomment this block this block if your connector implements incremental sync: + # tests: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-mailersend/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mailersend/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-mailersend/build.gradle b/airbyte-integrations/connectors/source-mailersend/build.gradle new file mode 100644 index 0000000000000..15a2083d97183 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_mailersend' +} diff --git a/airbyte-integrations/connectors/source-mailersend/integration_tests/__init__.py b/airbyte-integrations/connectors/source-mailersend/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-mailersend/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-mailersend/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..251c9d6a0bf89 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "activity": { + "date": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-mailersend/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mailersend/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-mailersend/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-mailersend/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..b151cb448eeb7 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "activity", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-mailersend/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-mailersend/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..d1f3ad6152729 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "api_token": "invalid", + "domain_id": "0000000000", + "date_from": "1666963299", + "date_to": "1667049682" +} diff --git a/airbyte-integrations/connectors/source-mailersend/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-mailersend/integration_tests/sample_config.json new file mode 100644 index 0000000000000..35f141620e8b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "api_token": "", + "domain_id": "0p7kx4xn7vg9yjree", + "date_from": "1667746836", + "date_to": "1667049682" +} diff --git a/airbyte-integrations/connectors/source-mailersend/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-mailersend/integration_tests/sample_state.json new file mode 100644 index 0000000000000..d4bd418eba241 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/integration_tests/sample_state.json @@ -0,0 +1,26 @@ +{ + "activity": { + "id": "635d289a9d503fb0760382ba", + "created_at": "2022-10-29T13:20:26.020000Z", + "updated_at": "2022-10-29T13:20:26.020000Z", + "type": "soft_bounced", + "email": { + "id": "635d1a39e8e985ebb60bb41e", + "from": "no-reply@exemple.com", + "subject": "subject", + "text": null, + "html": null, + "status": "rejected", + "tags": null, + "created_at": "2022-10-29T12:19:05.181000Z", + "updated_at": "2022-10-29T13:20:25.998000Z", + "recipient": { + "id": "62c34cacaf1b3db0da0f6ee9", + "email": "test@exemple.com", + "created_at": "2022-07-04T20:25:16.846000Z", + "updated_at": "2022-09-19T22:47:26.486000Z", + "deleted_at": "2022-09-19T22:47:26.486000Z" + } + } + } +} diff --git a/airbyte-integrations/connectors/source-mailersend/main.py b/airbyte-integrations/connectors/source-mailersend/main.py new file mode 100644 index 0000000000000..aff885f4889b3 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_mailersend import SourceMailersend + +if __name__ == "__main__": + source = SourceMailersend() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-mailersend/requirements.txt b/airbyte-integrations/connectors/source-mailersend/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-mailersend/setup.py b/airbyte-integrations/connectors/source-mailersend/setup.py new file mode 100644 index 0000000000000..91769700bd11c --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.4", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_mailersend", + description="Source implementation for Mailersend.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-mailersend/source_mailersend/__init__.py b/airbyte-integrations/connectors/source-mailersend/source_mailersend/__init__.py new file mode 100644 index 0000000000000..3ba312859ad86 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/source_mailersend/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceMailersend + +__all__ = ["SourceMailersend"] diff --git a/airbyte-integrations/connectors/source-mailersend/source_mailersend/mailersend.yaml b/airbyte-integrations/connectors/source-mailersend/source_mailersend/mailersend.yaml new file mode 100644 index 0000000000000..11dd18976de75 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/source_mailersend/mailersend.yaml @@ -0,0 +1,67 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["data"] + requester: + url_base: "https://api.mailersend.com/v1" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_token'] }}" + request_options_provider: + request_parameters: + limit: "100" + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%s" + end_datetime: + datetime: "{{ now_utc().strftime('%s') }}" + datetime_format: "%s" + step: "1d" + cursor_field: "{{ options['stream_cursor_field'] }}" + start_time_option: + field_name: "date_from" + inject_into: "request_parameter" + datetime_format: "%s" + paginator: + type: "DefaultPaginator" + page_size_option: + inject_into: "request_parameter" + field_name: "page_size" + pagination_strategy: + type: "PageIncrement" + page_size: 100 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + url_base: "*ref(definitions.requester.url_base)" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.paginator)" + requester: + $ref: "*ref(definitions.requester)" + stream_slicer: + $ref: "*ref(definitions.stream_slicer)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + activity_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "activity" + primary_key: "id" + path: "/activity/{{ config['domain_id'] }}" + stream_cursor_field: "created_at" + +streams: + - "*ref(definitions.activity_stream)" + +check: + stream_names: + - "activity" diff --git a/airbyte-integrations/connectors/source-mailersend/source_mailersend/schemas/activity.json b/airbyte-integrations/connectors/source-mailersend/source_mailersend/schemas/activity.json new file mode 100644 index 0000000000000..7831357119793 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/source_mailersend/schemas/activity.json @@ -0,0 +1,76 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["string"] + }, + "created_at": { + "type": ["string"], + "format": "date-time" + }, + "updated_at": { + "type": ["string"], + "format": "date-time" + }, + "type": { + "type": ["string"] + }, + "email": { + "type": ["object"], + "properties": { + "id": { + "type": ["string"] + }, + "from": { + "type": ["string"] + }, + "subject": { + "type": ["string"] + }, + "text": { + "type": ["null", "string"] + }, + "html": { + "type": ["null", "string"] + }, + "status": { + "type": ["string"] + }, + "tags": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["string"], + "format": "date-time" + }, + "updated_at": { + "type": ["string"], + "format": "date-time" + }, + "recipient": { + "type": ["object"], + "properties": { + "id": { + "type": ["string"] + }, + "email": { + "type": ["string"] + }, + "created_at": { + "type": ["string"], + "format": "date-time" + }, + "updated_at": { + "type": ["string"], + "format": "date-time" + }, + "deleted_at": { + "type": ["string"] + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-mailersend/source_mailersend/source.py b/airbyte-integrations/connectors/source-mailersend/source_mailersend/source.py new file mode 100644 index 0000000000000..b0b111a3883cb --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/source_mailersend/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceMailersend(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "mailersend.yaml"}) diff --git a/airbyte-integrations/connectors/source-mailersend/source_mailersend/spec.yaml b/airbyte-integrations/connectors/source-mailersend/source_mailersend/spec.yaml new file mode 100644 index 0000000000000..96d6bb18f9964 --- /dev/null +++ b/airbyte-integrations/connectors/source-mailersend/source_mailersend/spec.yaml @@ -0,0 +1,25 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/mailersend +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Mailersend Spec + type: object + required: + - api_token + - domain_id + additionalProperties: true + properties: + api_token: + type: string + description: Your API Token. See here. + airbyte_secret: true + domain_id: + type: string + description: The domain entity in mailersend + examples: + - airbyte.com + - linkana.com + start_date: + type: number + description: Timestamp is assumed to be UTC. + examples: + - 123131321 diff --git a/airbyte-integrations/connectors/source-mailjet-sms/source_mailjet_sms/schemas/sms.json b/airbyte-integrations/connectors/source-mailjet-sms/source_mailjet_sms/schemas/sms.json index 055e2b553a565..64e0671e9344c 100644 --- a/airbyte-integrations/connectors/source-mailjet-sms/source_mailjet_sms/schemas/sms.json +++ b/airbyte-integrations/connectors/source-mailjet-sms/source_mailjet_sms/schemas/sms.json @@ -14,31 +14,31 @@ "Status": { "type": "object", "properties": { - "Code": { - "type": "number" - }, - "Name": { - "type": "string" - }, - "Description": { - "type": "string" - } + "Code": { + "type": "number" + }, + "Name": { + "type": "string" + }, + "Description": { + "type": "string" } + } }, "Cost": { "type": "object", "properties": { - "Value": { - "type": "number" - }, - "Currency": { - "type": "string" - } + "Value": { + "type": "number" + }, + "Currency": { + "type": "string" } + } }, "CreationTS": { "type": "integer" - }, + }, "SmsCount": { "type": "integer" } diff --git a/airbyte-integrations/connectors/source-mailjet-sms/source_mailjet_sms/spec.yaml b/airbyte-integrations/connectors/source-mailjet-sms/source_mailjet_sms/spec.yaml index 14d4d60f5edfe..ef839981b4dfa 100644 --- a/airbyte-integrations/connectors/source-mailjet-sms/source_mailjet_sms/spec.yaml +++ b/airbyte-integrations/connectors/source-mailjet-sms/source_mailjet_sms/spec.yaml @@ -10,7 +10,7 @@ connectionSpecification: token: title: Access Token type: string - description: >- + description: >- Your access token. See here. airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-metabase/integration_tests/config_http_url.json b/airbyte-integrations/connectors/source-metabase/integration_tests/config_http_url.json index 1785e4008d5b8..d5ee525794b4c 100644 --- a/airbyte-integrations/connectors/source-metabase/integration_tests/config_http_url.json +++ b/airbyte-integrations/connectors/source-metabase/integration_tests/config_http_url.json @@ -1,3 +1,3 @@ { "instance_api_url": "http://localhost:3000" -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/activity.json b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/activity.json index 51835b17d0566..e7afabe5080ae 100644 --- a/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/activity.json +++ b/airbyte-integrations/connectors/source-metabase/source_metabase/schemas/activity.json @@ -7,22 +7,22 @@ "table": { "type": ["null", "object"], "properties": { - "active": {"type": "boolean"}, - "caveats": {"type": ["null", "string"]}, - "created_at": {"type": ["null", "string"]}, - "db_id": {"type": "integer"}, - "description": {"type": ["null", "string"]}, - "display_name": {"type": ["null", "string"]}, - "entity_type": {"type": ["null", "string"]}, - "field_order": {"type": ["null", "string"]}, - "id": {"type": "integer"}, - "initial_sync_status": {"type": ["null", "string"]}, - "name": {"type": ["null", "string"]}, - "points_of_interest": {"type": ["null", "array"]}, - "schema": {"type": ["null", "string"]}, - "show_in_getting_started": {"type": "boolean"}, - "updated_at": {"type": ["null", "string"]}, - "visibility_type": {"type": ["null", "string"]} + "active": { "type": "boolean" }, + "caveats": { "type": ["null", "string"] }, + "created_at": { "type": ["null", "string"] }, + "db_id": { "type": "integer" }, + "description": { "type": ["null", "string"] }, + "display_name": { "type": ["null", "string"] }, + "entity_type": { "type": ["null", "string"] }, + "field_order": { "type": ["null", "string"] }, + "id": { "type": "integer" }, + "initial_sync_status": { "type": ["null", "string"] }, + "name": { "type": ["null", "string"] }, + "points_of_interest": { "type": ["null", "array"] }, + "schema": { "type": ["null", "string"] }, + "show_in_getting_started": { "type": "boolean" }, + "updated_at": { "type": ["null", "string"] }, + "visibility_type": { "type": ["null", "string"] } } }, "database_id": { diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/.dockerignore b/airbyte-integrations/connectors/source-microsoft-dataverse/.dockerignore new file mode 100644 index 0000000000000..73db5322b3dcd --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_microsoft_dataverse +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/Dockerfile b/airbyte-integrations/connectors/source-microsoft-dataverse/Dockerfile new file mode 100644 index 0000000000000..52c806d763a38 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_microsoft_dataverse ./source_microsoft_dataverse + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-microsoft-dataverse diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/README.md b/airbyte-integrations/connectors/source-microsoft-dataverse/README.md new file mode 100644 index 0000000000000..261fc5d4b899e --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/README.md @@ -0,0 +1,132 @@ +# Microsoft Dataverse Source + +This is the repository for the Microsoft Dataverse source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-microsoft-dataverse:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/microsoft-dataverse) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_microsoft_dataverse/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source microsoft-dataverse test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-microsoft-dataverse:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-microsoft-dataverse:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-microsoft-dataverse:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-microsoft-dataverse:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-microsoft-dataverse:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-microsoft-dataverse:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-microsoft-dataverse:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/acceptance-test-config.yml b/airbyte-integrations/connectors/source-microsoft-dataverse/acceptance-test-config.yml new file mode 100644 index 0000000000000..38c7d1899a41c --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/acceptance-test-config.yml @@ -0,0 +1,25 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-microsoft-dataverse:dev +tests: + spec: + - spec_path: "source_microsoft_dataverse/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + timeout_seconds: 180 + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-microsoft-dataverse/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/build.gradle b/airbyte-integrations/connectors/source-microsoft-dataverse/build.gradle new file mode 100644 index 0000000000000..ba9d9a834a74d --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_microsoft_dataverse' +} diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/__init__.py b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..5ea158951cc13 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "": { + "$deltatoken": "12644418993!10/06/2050 20:06:12" + } +} diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..cc9f5ef9d2009 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/configured_catalog.json @@ -0,0 +1,15 @@ +{ + "streams": [ + { + "stream": { + "name": "", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "cursor_field": ["modifiedon"], + "primary_key": [[""]], + "sync_mode": "incremental", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..176722f0015ea --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/invalid_config.json @@ -0,0 +1,7 @@ +{ + "client_id": "", + "tenant_id": "", + "client_secret_value": "", + "url": "", + "odata.maxpagesize": 100 +} diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/sample_config.json new file mode 100644 index 0000000000000..371ecbca0a6c4 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/sample_config.json @@ -0,0 +1,7 @@ +{ + "url": "", + "client_id": "", + "tenant_id": "", + "client_secret_value": "", + "odata_maxpagesize": 5000 +} diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/sample_state.json new file mode 100644 index 0000000000000..467d3cc4d96a6 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "": { + "$deltatoken": "12644418993!10/06/2022 20:06:12" + } +} diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/main.py b/airbyte-integrations/connectors/source-microsoft-dataverse/main.py new file mode 100644 index 0000000000000..6c673246b217d --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_microsoft_dataverse import SourceMicrosoftDataverse + +if __name__ == "__main__": + source = SourceMicrosoftDataverse() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/requirements.txt b/airbyte-integrations/connectors/source-microsoft-dataverse/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/setup.py b/airbyte-integrations/connectors/source-microsoft-dataverse/setup.py new file mode 100644 index 0000000000000..2eb0ceeda35e7 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_microsoft_dataverse", + description="Source implementation for Microsoft Dataverse.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/__init__.py b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/__init__.py new file mode 100644 index 0000000000000..ac8f00705ef9e --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceMicrosoftDataverse + +__all__ = ["SourceMicrosoftDataverse"] diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/dataverse.py b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/dataverse.py new file mode 100644 index 0000000000000..4581bc9af7c68 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/dataverse.py @@ -0,0 +1,80 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from enum import Enum +from typing import Any, Mapping, MutableMapping, Optional + +import requests +from airbyte_cdk.sources.streams.http.requests_native_auth.oauth import Oauth2Authenticator + + +class MicrosoftOauth2Authenticator(Oauth2Authenticator): + def build_refresh_request_body(self) -> Mapping[str, Any]: + """ + Returns the request body to set on the refresh request + """ + payload: MutableMapping[str, Any] = { + "grant_type": "client_credentials", + "client_id": self.get_client_id(), + "client_secret": self.get_client_secret(), + "scope": self.get_scopes(), + } + + return payload + + +class AirbyteType(Enum): + + String = {"type": ["null", "string"]} + Boolean = {"type": ["null", "boolean"]} + Timestamp = {"type": ["null", "string"], "format": "date-time", "airbyte_type": "timestamp_with_timezone"} + Integer = {"type": ["null", "integer"]} + Number = {"type": ["null", "number"]} + + +class DataverseType(Enum): + + String = AirbyteType.String + Uniqueidentifier = AirbyteType.String + DateTime = AirbyteType.Timestamp + Integer = AirbyteType.Integer + BigInt = AirbyteType.Integer + Money = AirbyteType.Number + Boolean = AirbyteType.Boolean + Double = AirbyteType.Number + Decimal = AirbyteType.Number + Status = AirbyteType.Integer + State = AirbyteType.Integer + Picklist = AirbyteType.Integer + Lookup = AirbyteType.String + Virtual = None + + +def get_auth(config: Mapping[str, Any]) -> MicrosoftOauth2Authenticator: + return MicrosoftOauth2Authenticator( + token_refresh_endpoint=f'https://login.microsoftonline.com/{config["tenant_id"]}/oauth2/v2.0/token', + client_id=config["client_id"], + client_secret=config["client_secret_value"], + scopes=[f'{config["url"]}/.default'], + refresh_token="", + ) + + +def do_request(config: Mapping[str, Any], path: str): + auth = get_auth(config) + headers = auth.get_auth_header() + # Call a protected API with the access token. + return requests.get( + config["url"] + "/api/data/v9.2/" + path, + headers=headers, + ) + + +def convert_dataverse_type(dataverse_type: str) -> Optional[dict]: + if dataverse_type in DataverseType.__members__: + enum_type = DataverseType[dataverse_type] + if enum_type: + return enum_type.value if enum_type.value is None else enum_type.value.value + + return AirbyteType.String.value diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/source.py b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/source.py new file mode 100644 index 0000000000000..ed99f1f51b5fb --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/source.py @@ -0,0 +1,102 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import logging +from typing import Any, Iterator, List, Mapping, MutableMapping, Tuple, Union + +from airbyte_cdk.models import AirbyteCatalog, AirbyteMessage, AirbyteStateMessage, AirbyteStream, ConfiguredAirbyteCatalog, SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream + +from .dataverse import convert_dataverse_type, do_request, get_auth +from .streams import IncrementalMicrosoftDataverseStream, MicrosoftDataverseStream + + +class SourceMicrosoftDataverse(AbstractSource): + def __init__(self): + self.catalogs = None + + def discover(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteCatalog: + response = do_request(config, "EntityDefinitions?$expand=Attributes") + response_json = response.json() + streams = [] + for entity in response_json["value"]: + schema = {"properties": {}} + for attribute in entity["Attributes"]: + dataverse_type = attribute["AttributeType"] + if dataverse_type == "Lookup": + attribute["LogicalName"] = "_" + attribute["LogicalName"] + "_value" + attribute_type = convert_dataverse_type(dataverse_type) + + if not attribute_type: + continue + + schema["properties"][attribute["LogicalName"]] = attribute_type + + if entity["CanChangeTrackingBeEnabled"]["Value"] and entity["ChangeTrackingEnabled"]: + schema["properties"].update({"_ab_cdc_updated_at": {"type": "string"}, "_ab_cdc_deleted_at": {"type": ["null", "string"]}}) + stream = AirbyteStream( + name=entity["LogicalName"], json_schema=schema, supported_sync_modes=[SyncMode.full_refresh, SyncMode.incremental] + ) + stream.source_defined_cursor = True + if "modifiedon" in schema["properties"]: + stream.default_cursor_field = ["modifiedon"] + else: + stream = AirbyteStream(name=entity["LogicalName"], json_schema=schema, supported_sync_modes=[SyncMode.full_refresh]) + + stream.source_defined_primary_key = [[entity["PrimaryIdAttribute"]]] + streams.append(stream) + return AirbyteCatalog(streams=streams) + + def check_connection(self, logger, config) -> Tuple[bool, any]: + """ + :param config: the user-input config object conforming to the connector's spec.yaml + :param logger: logger object + :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. + """ + try: + response = do_request(config, "") + # Raises an exception for error codes (4xx or 5xx) + response.raise_for_status() + return True, None + except Exception as e: + return False, e + + def read( + self, + logger: logging.Logger, + config: Mapping[str, Any], + catalog: ConfiguredAirbyteCatalog, + state: Union[List[AirbyteStateMessage], MutableMapping[str, Any]] = None, + ) -> Iterator[AirbyteMessage]: + self.catalogs = catalog + return super().read(logger, config, catalog, state) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """ + :param config: A Mapping of the user input configuration as defined in the connector spec. + """ + auth = get_auth(config) + + streams = [] + for catalog in self.catalogs.streams: + response = do_request(config, f"EntityDefinitions(LogicalName='{catalog.stream.name}')") + response_json = response.json() + + args = { + "url": config["url"], + "stream_name": catalog.stream.name, + "stream_path": response_json["EntitySetName"], + "primary_key": catalog.primary_key, + "schema": catalog.stream.json_schema, + "odata_maxpagesize": config["odata_maxpagesize"], + "authenticator": auth, + } + + if catalog.sync_mode == SyncMode.incremental: + streams.append(IncrementalMicrosoftDataverseStream(**args, config_cursor_field=catalog.cursor_field)) + else: + streams.append(MicrosoftDataverseStream(**args)) + + return streams diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/spec.yaml b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/spec.yaml new file mode 100644 index 0000000000000..1768da97ee2b5 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/spec.yaml @@ -0,0 +1,47 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/microsoft-dataverse +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Microsoft Dataverse Spec + type: object + required: + - url + - tenant_id + - client_id + - client_secret_value + additionalProperties: true + properties: + url: + type: string + description: URL to Microsoft Dataverse API + title: URL + examples: + - https://.crm.dynamics.com + order: 0 + + tenant_id: + type: string + description: Tenant Id of your Microsoft Dataverse Instance + title: Tenant Id + airbyte_secret: true + order: 1 + + client_id: + type: string + description: App Registration Client Id + title: Client Id + airbyte_secret: true + order: 2 + + client_secret_value: + type: string + description: App Registration Client Secret + title: Client Secret + airbyte_secret: true + order: 3 + + odata_maxpagesize: + type: integer + description: Max number of results per page. Default=5000 + title: Max page size + default: 5000 + order: 4 diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/streams.py b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/streams.py new file mode 100644 index 0000000000000..2e8b6a35fdc9d --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/source_microsoft_dataverse/streams.py @@ -0,0 +1,150 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from abc import ABC +from datetime import datetime +from typing import Any, Iterable, Mapping, MutableMapping, Optional +from urllib import parse + +import requests +from airbyte_cdk.sources.streams import IncrementalMixin +from airbyte_cdk.sources.streams.http import HttpStream + + +# Basic full refresh stream +class MicrosoftDataverseStream(HttpStream, ABC): + + # Base url will be set by init(), using information provided by the user through config input + url_base = "" + primary_key = "" + + def __init__(self, url, stream_name, stream_path, schema, primary_key, odata_maxpagesize, **kwargs): + super().__init__(**kwargs) + self.url_base = url + "/api/data/v9.2/" + self.stream_name = stream_name + self.stream_path = stream_path + self.primary_key = primary_key + self.schema = schema + self.odata_maxpagesize = odata_maxpagesize + + @property + def name(self) -> str: + """Source name""" + return self.stream_name + + def get_json_schema(self) -> Mapping[str, Any]: + return self.schema + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + """ + :param response: the most recent response from the API + :return If there is another page in the result, a mapping (e.g: dict) containing information needed to query the next page in the response. + If there are no more pages in the result, return None. + """ + + response_json = response.json() + + if "@odata.nextLink" in response_json: + next_link = response_json["@odata.nextLink"] + next_link_params = dict(parse.parse_qsl(parse.urlsplit(next_link).query)) + return next_link_params + else: + return None + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + """ + :return a dict containing the parameters to be used in the request + """ + request_params = super().request_params(stream_state) + # If there is not a nextLink(contains "next_page_token") in the response, means it is the last page. + # In this case, the deltatoken is passed instead. + if next_page_token is None: + request_params.update(stream_state) + return request_params + elif next_page_token is not None: + request_params.update(next_page_token) + return request_params + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + :return an iterable containing each record in the response + """ + for result in response.json()["value"]: + yield result + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + return { + "Cache-Control": "no-cache", + "OData-Version": "4.0", + "Content-Type": "application/json", + "Prefer": "odata.maxpagesize=" + str(self.odata_maxpagesize), + } + + def path( + self, + *, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> str: + return self.stream_path + + +# Basic incremental stream +class IncrementalMicrosoftDataverseStream(MicrosoftDataverseStream, IncrementalMixin, ABC): + + delta_token_field = "$deltatoken" + state_checkpoint_interval = None # For now we just use the change tracking as state, and it is only emitted on last page + + def __init__(self, url, stream_name, stream_path, schema, primary_key, odata_maxpagesize, config_cursor_field, **kwargs): + super().__init__(url, stream_name, stream_path, schema, primary_key, odata_maxpagesize, **kwargs) + self._cursor_value = None + self.config_cursor_field = config_cursor_field + + @property + def state(self) -> Mapping[str, Any]: + return {self.delta_token_field: str(self._cursor_value)} + + @property + def cursor_field(self) -> str: + return self.config_cursor_field + + # Sets the state got by state getter. "value" is the return of state getter -> dict + @state.setter + def state(self, value: Mapping[str, Any]): + self._cursor_value = value[self.delta_token_field] + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + """ + Override to return any non-auth headers. Authentication headers will overwrite any overlapping headers returned from this method. + """ + request_headers = super().request_headers(stream_state=stream_state) + request_headers.update( + {"Prefer": "odata.track-changes," + request_headers["Prefer"]} + ) # odata.track-changes -> Header that enables change tracking + return request_headers + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + if "@odata.deltaLink" in response_json: + delta_link = response_json["@odata.deltaLink"] + delta_link_params = dict(parse.parse_qsl(parse.urlsplit(delta_link).query)) + self._cursor_value = delta_link_params[self.delta_token_field] + for result in response_json["value"]: + if "@odata.context" in result and result["reason"] == "deleted": + result.update({self.primary_key[0][0]: result["id"]}) + result.pop("@odata.context", None) + result.pop("id", None) + result.pop("reason", None) + result.update({"_ab_cdc_deleted_at": datetime.now().isoformat()}) + else: + result.update({"_ab_cdc_updated_at": result[self.cursor_field[0]]}) + + yield result diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/__init__.py b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_dataverse.py b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_dataverse.py new file mode 100644 index 0000000000000..834daa5923da7 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_dataverse.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import pytest +from source_microsoft_dataverse.dataverse import AirbyteType, convert_dataverse_type + + +@pytest.mark.parametrize("dataverse_type,expected_result", [ + ("String", AirbyteType.String.value), + ("Integer", AirbyteType.Integer.value), + ("Virtual", None), + ("Random", AirbyteType.String.value) +]) +def test_convert_dataverse_type(dataverse_type, expected_result): + result = convert_dataverse_type(dataverse_type) + assert result == expected_result diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_incremental_streams.py new file mode 100644 index 0000000000000..f4db9179ad44e --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_incremental_streams.py @@ -0,0 +1,110 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from pytest import fixture +from source_microsoft_dataverse.source import IncrementalMicrosoftDataverseStream + + +@fixture +def incremental_config(): + return { + "url": "http://test-url", + "stream_name": "test_stream", + "stream_path": "test_path", + "primary_key": [["test_primary_key"]], + "schema": { + + }, + "odata_maxpagesize": 100, + "config_cursor_field": ["test_cursor_field"], + "authenticator": MagicMock() + } + + +@fixture +def incremental_response(incremental_config): + return { + "@odata.deltaLink": f"{incremental_config['url']}?$deltatoken=12644418993%2110%2F06%2F2022%2020%3A06%3A12", + "value": [ + { + "test_primary_key": "pk", + "test_cursor_field": "test-date" + }, + { + "id": "pk2", + "@odata.context": "context", + "reason": "deleted" + } + ] + } + + +def test_primary_key(incremental_config): + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + expected_primary_key = [["test_primary_key"]] + assert stream.primary_key == expected_primary_key + + +def test_stream_name(incremental_config): + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + expected_stream_name = "test_stream" + assert stream.name == expected_stream_name + + +def test_stream_path(incremental_config): + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + expected_stream_path = "test_path" + assert stream.path() == expected_stream_path + + +def test_cursor_field(incremental_config): + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + expected_cursor_field = ["test_cursor_field"] + assert stream.cursor_field == expected_cursor_field + + +def test_supports_incremental(incremental_config, mocker): + mocker.patch.object(IncrementalMicrosoftDataverseStream, "cursor_field", "dummy_field") + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + assert stream.supports_incremental + + +def test_source_defined_cursor(incremental_config): + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(incremental_config): + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + expected_checkpoint_interval = None + assert stream.state_checkpoint_interval == expected_checkpoint_interval + + +def test_parse_request(incremental_config, incremental_response, mocker): + response_mock, datetime_mock = MagicMock(), MagicMock() + response_mock.json.return_value = incremental_response + datetime_mock.now.return_value.isoformat.return_value = "test-time" + mocker.patch("source_microsoft_dataverse.streams.datetime", datetime_mock) + + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + + iterable = stream.parse_response(response_mock) + iterable_list = list(iterable) + assert len(iterable_list) == 2 + assert stream.state[stream.delta_token_field] == "12644418993!10/06/2022 20:06:12" + assert iterable_list[0]["_ab_cdc_updated_at"] == "test-date" + assert iterable_list[1]["_ab_cdc_deleted_at"] == "test-time" + assert iterable_list[1][incremental_config["primary_key"][0][0]] == "pk2" + assert "id" not in iterable_list[1] + assert "reason" not in iterable_list[1] + assert "@odata.context" not in iterable_list[1] + + +def test_request_headers(incremental_config): + stream = IncrementalMicrosoftDataverseStream(**incremental_config) + headers = stream.request_headers(stream_state={}) + assert "Prefer" in headers + assert headers["Prefer"] == "odata.track-changes,odata.maxpagesize=100" diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_source.py b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_source.py new file mode 100644 index 0000000000000..bb93ad44b8239 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_source.py @@ -0,0 +1,144 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json +from unittest import mock +from unittest.mock import MagicMock + +from airbyte_cdk.models import SyncMode +from source_microsoft_dataverse.dataverse import AirbyteType +from source_microsoft_dataverse.source import SourceMicrosoftDataverse +from source_microsoft_dataverse.streams import IncrementalMicrosoftDataverseStream, MicrosoftDataverseStream + + +@mock.patch("source_microsoft_dataverse.source.do_request") +def test_check_connection(mock_request): + mock_request.return_value.raise_for_status = lambda: () + source = SourceMicrosoftDataverse() + logger_mock, config_mock = MagicMock(), MagicMock() + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +@mock.patch("source_microsoft_dataverse.source.get_auth") +@mock.patch("source_microsoft_dataverse.source.do_request") +def test_streams_incremental(mock_get_auth, mock_request): + streams = MagicMock() + streams.sync_mode = SyncMode.incremental + streams.stream.name = "test" + + catalog = MagicMock() + + catalog.streams = [streams] + + config_mock = MagicMock() + source = SourceMicrosoftDataverse() + source.catalogs = catalog + + streams = source.streams(config_mock) + + expected_streams_number = 1 + assert len(streams) == expected_streams_number + assert isinstance(streams[0], IncrementalMicrosoftDataverseStream) + assert streams[0].name == "test" + + +@mock.patch("source_microsoft_dataverse.source.get_auth") +@mock.patch("source_microsoft_dataverse.source.do_request") +def test_streams_full_refresh(mock_get_auth, mock_request): + streams = MagicMock() + streams.sync_mode = SyncMode.full_refresh + streams.stream.name = "test" + + catalog = MagicMock() + + catalog.streams = [streams] + + config_mock = MagicMock() + source = SourceMicrosoftDataverse() + source.catalogs = catalog + + streams = source.streams(config_mock) + + expected_streams_number = 1 + assert len(streams) == expected_streams_number + assert isinstance(streams[0], MicrosoftDataverseStream) + assert streams[0].name == "test" + + +@mock.patch("source_microsoft_dataverse.source.do_request") +def test_discover_incremental(mock_request): + result_json = json.loads(''' + { + "value": [ + { + "LogicalName": "stream", + "PrimaryIdAttribute": "primary", + "ChangeTrackingEnabled": true, + "CanChangeTrackingBeEnabled": { + "Value": true + }, + "Attributes": [ + { + "LogicalName": "test", + "AttributeType": "String" + }, + { + "LogicalName": "modifiedon", + "AttributeType": "DateTime" + } + ] + } + ] + } + ''') + + mock_request.return_value.status.return_value = 200 + mock_request.return_value.json.return_value = result_json + + source = SourceMicrosoftDataverse() + logger_mock, config_mock = MagicMock(), MagicMock() + + catalog = source.discover(logger_mock, config_mock) + + assert not {'modifiedon'} ^ set(catalog.streams[0].default_cursor_field) + assert not {SyncMode.full_refresh, SyncMode.incremental} ^ set(catalog.streams[0].supported_sync_modes) + assert not {'primary'} ^ set(catalog.streams[0].source_defined_primary_key[0]) + assert catalog.streams[0].json_schema["properties"]["test"] == AirbyteType.String.value + + +@mock.patch("source_microsoft_dataverse.source.do_request") +def test_discover_full_refresh(mock_request): + result_json = json.loads(''' + { + "value": [ + { + "LogicalName": "stream", + "PrimaryIdAttribute": "primary", + "ChangeTrackingEnabled": false, + "CanChangeTrackingBeEnabled": { + "Value": false + }, + "Attributes": [ + { + "LogicalName": "test", + "AttributeType": "String" + } + ] + } + ] + } + ''') + + mock_request.return_value.status.return_value = 200 + mock_request.return_value.json.return_value = result_json + + source = SourceMicrosoftDataverse() + logger_mock, config_mock = MagicMock(), MagicMock() + + catalog = source.discover(logger_mock, config_mock) + + assert catalog.streams[0].default_cursor_field is None or len(catalog.streams[0].default_cursor_field) == 0 + assert not {SyncMode.full_refresh} ^ set(catalog.streams[0].supported_sync_modes) + assert not {'primary'} ^ set(catalog.streams[0].source_defined_primary_key[0]) + assert catalog.streams[0].json_schema["properties"]["test"] == AirbyteType.String.value diff --git a/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_streams.py new file mode 100644 index 0000000000000..d58f2763229e6 --- /dev/null +++ b/airbyte-integrations/connectors/source-microsoft-dataverse/unit_tests/test_streams.py @@ -0,0 +1,113 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from pytest import fixture +from source_microsoft_dataverse.source import MicrosoftDataverseStream + + +@fixture +def incremental_config(): + return { + "url": "http://test-url", + "stream_name": "test_stream", + "stream_path": "test_path", + "primary_key": [["test_primary_key"]], + "schema": { + + }, + "odata_maxpagesize": 100, + "authenticator": MagicMock() + } + + +@pytest.mark.parametrize( + ("inputs", "expected_params"), + [ + ({"stream_slice": None, "stream_state": {}, "next_page_token": None}, {}), + ({"stream_slice": None, "stream_state": {}, "next_page_token": {"$skiptoken": "skiptoken"}}, {"$skiptoken": "skiptoken"}), + ({"stream_slice": None, "stream_state": {"$deltatoken": "delta"}, "next_page_token": None}, {"$deltatoken": "delta"}) + ], +) +def test_request_params(inputs, expected_params, incremental_config): + stream = MicrosoftDataverseStream(**incremental_config) + assert stream.request_params(**inputs) == expected_params + + +@pytest.mark.parametrize( + ("response_json", "next_page_token"), + [ + ({"@odata.nextLink": "https://url?$skiptoken=oEBwdSP6uehIAxQOWq_3Ksh_TLol6KIm3stvdc6hGhZRi1hQ7Spe__dpvm3U4zReE4CYXC2zOtaKdi7KHlUtC2CbRiBIUwOxPKLa"}, + {"$skiptoken": "oEBwdSP6uehIAxQOWq_3Ksh_TLol6KIm3stvdc6hGhZRi1hQ7Spe__dpvm3U4zReE4CYXC2zOtaKdi7KHlUtC2CbRiBIUwOxPKLa"}), + ({"value": []}, None), + ], +) +def test_next_page_token(response_json, next_page_token, incremental_config): + stream = MicrosoftDataverseStream(**incremental_config) + response = MagicMock() + response.json.return_value = response_json + inputs = {"response": response} + expected_token = next_page_token + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(incremental_config): + stream = MicrosoftDataverseStream(**incremental_config) + response = MagicMock() + response.json.return_value = { + "value": [ + { + "test-key": "test-value" + } + ] + } + inputs = {"response": response} + expected_parsed_object = { + "test-key": "test-value" + } + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(incremental_config): + stream = MicrosoftDataverseStream(**incremental_config) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = { + "Cache-Control": "no-cache", + "OData-Version": "4.0", + "Content-Type": "application/json", + "Prefer": "odata.maxpagesize=100" + } + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(incremental_config): + stream = MicrosoftDataverseStream(**incremental_config) + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(incremental_config, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = MicrosoftDataverseStream(**incremental_config) + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(incremental_config): + response_mock = MagicMock() + stream = MicrosoftDataverseStream(**incremental_config) + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/main/java/io.airbyte.integrations.source.mongodb/MongodbSourceStrictEncrypt.java b/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/main/java/io.airbyte.integrations.source.mongodb/MongodbSourceStrictEncrypt.java index 4d460be244830..5fb9f8a430741 100644 --- a/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/main/java/io.airbyte.integrations.source.mongodb/MongodbSourceStrictEncrypt.java +++ b/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/main/java/io.airbyte.integrations.source.mongodb/MongodbSourceStrictEncrypt.java @@ -6,13 +6,14 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; +import io.airbyte.db.mongodb.MongoUtils; import io.airbyte.db.mongodb.MongoUtils.MongoInstanceType; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.spec_modification.SpecModifyingSource; import io.airbyte.protocol.models.AirbyteConnectionStatus; -import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.ConnectorSpecification; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,14 +28,12 @@ public MongodbSourceStrictEncrypt() { @Override public AirbyteConnectionStatus check(final JsonNode config) throws Exception { - final JsonNode instanceConfig = config.get(MongoDbSourceUtils.INSTANCE_TYPE); - final MongoInstanceType instance = MongoInstanceType.fromValue(instanceConfig.get(MongoDbSourceUtils.INSTANCE).asText()); + final JsonNode instanceConfig = config.get(MongoUtils.INSTANCE_TYPE); + final MongoInstanceType instance = MongoInstanceType.fromValue(instanceConfig.get(MongoUtils.INSTANCE).asText()); // If the MongoDb source connector is not set up to use a TLS connection, then we should fail the // check. - if (instance.equals(MongoInstanceType.STANDALONE) && !MongoDbSourceUtils.tlsEnabledForStandaloneInstance(config, instanceConfig)) { - return new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage("TLS connection must be used to read from MongoDB."); + if (instance.equals(MongoInstanceType.STANDALONE) && !MongoUtils.tlsEnabledForStandaloneInstance(config, instanceConfig)) { + throw new ConfigErrorException("TLS connection must be used to read from MongoDB."); } return super.check(config); diff --git a/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongodbSourceStrictEncryptAcceptanceTest.java b/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongodbSourceStrictEncryptAcceptanceTest.java index 3a0516c45c5fc..462d700065728 100644 --- a/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongodbSourceStrictEncryptAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mongodb-strict-encrypt/src/test-integration/java/io/airbyte/integrations/source/mongodb/MongodbSourceStrictEncryptAcceptanceTest.java @@ -4,6 +4,8 @@ package io.airbyte.integrations.source.mongodb; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.catchThrowable; import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.JsonNode; @@ -11,6 +13,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.mongodb.client.MongoCollection; +import io.airbyte.commons.exceptions.ConfigErrorException; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.resources.MoreResources; import io.airbyte.db.jdbc.JdbcUtils; @@ -18,8 +21,6 @@ import io.airbyte.db.mongodb.MongoUtils.MongoInstanceType; import io.airbyte.integrations.standardtest.source.SourceAcceptanceTest; import io.airbyte.integrations.standardtest.source.TestDestinationEnv; -import io.airbyte.protocol.models.AirbyteConnectionStatus; -import io.airbyte.protocol.models.AirbyteConnectionStatus.Status; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; @@ -160,13 +161,11 @@ void testCheck() throws Exception { ((ObjectNode) invalidStandaloneConfig).put(INSTANCE_TYPE, instanceConfig); - final AirbyteConnectionStatus actual = new MongodbSourceStrictEncrypt().check(invalidStandaloneConfig); - final AirbyteConnectionStatus expected = - new AirbyteConnectionStatus() - .withStatus(Status.FAILED) - .withMessage("TLS connection must be used to read from MongoDB."); - - assertEquals(expected, actual); + final Throwable throwable = catchThrowable(() -> new MongodbSourceStrictEncrypt().check(invalidStandaloneConfig)); + assertThat(throwable).isInstanceOf(ConfigErrorException.class); + assertThat(((ConfigErrorException) throwable) + .getDisplayMessage() + .contains("TLS connection must be used to read from MongoDB.")); } } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml new file mode 100644 index 0000000000000..6402c3beba5f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-mongodb-v2:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle index 6b776c6192dcf..ab5675cd3211f 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/build.gradle +++ b/airbyte-integrations/connectors/source-mongodb-v2/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSource.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSource.java index 7ebfb9d36ef37..7491d47146be7 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSource.java +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSource.java @@ -56,8 +56,8 @@ public static void main(final String[] args) throws Exception { @Override public JsonNode toDatabaseConfig(final JsonNode config) { - final var credentials = config.has(MongoDbSourceUtils.USER) && config.has(JdbcUtils.PASSWORD_KEY) - ? String.format("%s:%s@", config.get(MongoDbSourceUtils.USER).asText(), config.get(JdbcUtils.PASSWORD_KEY).asText()) + final var credentials = config.has(MongoUtils.USER) && config.has(JdbcUtils.PASSWORD_KEY) + ? String.format("%s:%s@", config.get(MongoUtils.USER).asText(), config.get(JdbcUtils.PASSWORD_KEY).asText()) : StringUtils.EMPTY; return Jsons.jsonNode(ImmutableMap.builder() @@ -111,7 +111,7 @@ protected List>> discoverInternal(final MongoDat .nameSpace(database.getName()) .name(collectionName) .fields(fields) - .primaryKeys(List.of(MongoDbSourceUtils.PRIMARY_KEY)) + .primaryKeys(List.of(MongoUtils.PRIMARY_KEY)) .build(); tableInfos.add(tableInfo); @@ -192,7 +192,7 @@ public boolean isCursorType(final BsonType bsonType) { // when we have no cursor field here, at least id could be used as cursor here. // This logic will be used feather when we will implement part which will show only list of possible // cursor fields on UI - return MongoDbSourceUtils.ALLOWED_CURSOR_TYPES.contains(bsonType); + return MongoUtils.ALLOWED_CURSOR_TYPES.contains(bsonType); } private AutoCloseableIterator queryTable(final MongoDatabase database, @@ -212,30 +212,30 @@ private AutoCloseableIterator queryTable(final MongoDatabase database, private String buildConnectionString(final JsonNode config, final String credentials) { final StringBuilder connectionStrBuilder = new StringBuilder(); - final JsonNode instanceConfig = config.get(MongoDbSourceUtils.INSTANCE_TYPE); - final MongoInstanceType instance = MongoInstanceType.fromValue(instanceConfig.get(MongoDbSourceUtils.INSTANCE).asText()); + final JsonNode instanceConfig = config.get(MongoUtils.INSTANCE_TYPE); + final MongoInstanceType instance = MongoInstanceType.fromValue(instanceConfig.get(MongoUtils.INSTANCE).asText()); switch (instance) { case STANDALONE -> { connectionStrBuilder.append( - String.format(MongoDbSourceUtils.MONGODB_SERVER_URL, credentials, instanceConfig.get(JdbcUtils.HOST_KEY).asText(), + String.format(MongoUtils.MONGODB_SERVER_URL, credentials, instanceConfig.get(JdbcUtils.HOST_KEY).asText(), instanceConfig.get(JdbcUtils.PORT_KEY).asText(), config.get(JdbcUtils.DATABASE_KEY).asText(), - config.get(MongoDbSourceUtils.AUTH_SOURCE).asText(), MongoDbSourceUtils.tlsEnabledForStandaloneInstance(config, instanceConfig))); + config.get(MongoUtils.AUTH_SOURCE).asText(), MongoUtils.tlsEnabledForStandaloneInstance(config, instanceConfig))); } case REPLICA -> { connectionStrBuilder.append( - String.format(MongoDbSourceUtils.MONGODB_REPLICA_URL, credentials, instanceConfig.get(MongoDbSourceUtils.SERVER_ADDRESSES).asText(), + String.format(MongoUtils.MONGODB_REPLICA_URL, credentials, instanceConfig.get(MongoUtils.SERVER_ADDRESSES).asText(), config.get(JdbcUtils.DATABASE_KEY).asText(), - config.get(MongoDbSourceUtils.AUTH_SOURCE).asText())); - if (instanceConfig.has(MongoDbSourceUtils.REPLICA_SET)) { - connectionStrBuilder.append(String.format("&replicaSet=%s", instanceConfig.get(MongoDbSourceUtils.REPLICA_SET).asText())); + config.get(MongoUtils.AUTH_SOURCE).asText())); + if (instanceConfig.has(MongoUtils.REPLICA_SET)) { + connectionStrBuilder.append(String.format("&replicaSet=%s", instanceConfig.get(MongoUtils.REPLICA_SET).asText())); } } case ATLAS -> { connectionStrBuilder.append( - String.format(MongoDbSourceUtils.MONGODB_CLUSTER_URL, credentials, - instanceConfig.get(MongoDbSourceUtils.CLUSTER_URL).asText(), config.get(JdbcUtils.DATABASE_KEY).asText(), - config.get(MongoDbSourceUtils.AUTH_SOURCE).asText())); + String.format(MongoUtils.MONGODB_CLUSTER_URL, credentials, + instanceConfig.get(MongoUtils.CLUSTER_URL).asText(), config.get(JdbcUtils.DATABASE_KEY).asText(), + config.get(MongoUtils.AUTH_SOURCE).asText())); } default -> throw new IllegalArgumentException("Unsupported instance type: " + instance); } diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSourceUtils.java b/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSourceUtils.java deleted file mode 100644 index 6939ddf9b0ba0..0000000000000 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/java/io.airbyte.integrations.source.mongodb/MongoDbSourceUtils.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.integrations.source.mongodb; - -import static org.bson.BsonType.DATE_TIME; -import static org.bson.BsonType.DECIMAL128; -import static org.bson.BsonType.DOCUMENT; -import static org.bson.BsonType.DOUBLE; -import static org.bson.BsonType.INT32; -import static org.bson.BsonType.INT64; -import static org.bson.BsonType.OBJECT_ID; -import static org.bson.BsonType.STRING; -import static org.bson.BsonType.TIMESTAMP; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.db.jdbc.JdbcUtils; -import java.util.Set; -import org.bson.BsonType; - -public final class MongoDbSourceUtils { - - private MongoDbSourceUtils() {} - - public static final String MONGODB_SERVER_URL = "mongodb://%s%s:%s/%s?authSource=%s&ssl=%s"; - public static final String MONGODB_CLUSTER_URL = "mongodb+srv://%s%s/%s?authSource=%s&retryWrites=true&w=majority&tls=true"; - public static final String MONGODB_REPLICA_URL = "mongodb://%s%s/%s?authSource=%s&directConnection=false&ssl=true"; - public static final String USER = "user"; - public static final String INSTANCE_TYPE = "instance_type"; - public static final String INSTANCE = "instance"; - public static final String CLUSTER_URL = "cluster_url"; - public static final String SERVER_ADDRESSES = "server_addresses"; - public static final String REPLICA_SET = "replica_set"; - public static final String AUTH_SOURCE = "auth_source"; - public static final String PRIMARY_KEY = "_id"; - public static final Set ALLOWED_CURSOR_TYPES = Set.of(DOUBLE, STRING, DOCUMENT, OBJECT_ID, DATE_TIME, - INT32, TIMESTAMP, INT64, DECIMAL128); - - /** - * Determines whether TLS/SSL should be enabled for a standalone instance of MongoDB. - */ - public static boolean tlsEnabledForStandaloneInstance(final JsonNode config, final JsonNode instanceConfig) { - return config.has(JdbcUtils.TLS_KEY) ? config.get(JdbcUtils.TLS_KEY).asBoolean() - : (instanceConfig.has(JdbcUtils.TLS_KEY) ? instanceConfig.get(JdbcUtils.TLS_KEY).asBoolean() : true); - } - -} diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json index fc7959b42274f..2f535f07687a7 100644 --- a/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/main/resources/spec.json @@ -20,8 +20,7 @@ "properties": { "instance": { "type": "string", - "enum": ["standalone"], - "default": "standalone" + "const": "standalone" }, "host": { "title": "Host", @@ -54,8 +53,7 @@ "properties": { "instance": { "type": "string", - "enum": ["replica"], - "default": "replica" + "const": "replica" }, "server_addresses": { "title": "Server Addresses", @@ -74,13 +72,12 @@ }, { "title": "MongoDB Atlas", - "additionalProperties": false, + "additionalProperties": true, "required": ["instance", "cluster_url"], "properties": { "instance": { "type": "string", - "enum": ["atlas"], - "default": "atlas" + "const": "atlas" }, "cluster_url": { "title": "Cluster URL", diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..0216ddfd3c031 --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/dummy_config.json @@ -0,0 +1,3 @@ +{ + "database": "default" +} diff --git a/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..b39746df263b5 --- /dev/null +++ b/airbyte-integrations/connectors/source-mongodb-v2/src/test-integration/resources/expected_spec.json @@ -0,0 +1,124 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/mongodb-v2", + "changelogUrl": "https://docs.airbyte.com/integrations/sources/mongodb-v2", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "MongoDb Source Spec", + "type": "object", + "required": ["database"], + "additionalProperties": true, + "properties": { + "instance_type": { + "type": "object", + "title": "MongoDb Instance Type", + "description": "The MongoDb instance to connect to. For MongoDB Atlas and Replica Set TLS connection is used by default.", + "order": 0, + "oneOf": [ + { + "title": "Standalone MongoDb Instance", + "required": ["instance", "host", "port"], + "properties": { + "instance": { + "type": "string", + "const": "standalone" + }, + "host": { + "title": "Host", + "type": "string", + "description": "The host name of the Mongo database.", + "order": 0 + }, + "port": { + "title": "Port", + "type": "integer", + "description": "The port of the Mongo database.", + "minimum": 0, + "maximum": 65536, + "default": 27017, + "examples": ["27017"], + "order": 1 + }, + "tls": { + "title": "TLS Connection", + "type": "boolean", + "description": "Indicates whether TLS encryption protocol will be used to connect to MongoDB. It is recommended to use TLS connection if possible. For more information see documentation.", + "default": false, + "order": 2 + } + } + }, + { + "title": "Replica Set", + "required": ["instance", "server_addresses"], + "properties": { + "instance": { + "type": "string", + "const": "replica" + }, + "server_addresses": { + "title": "Server Addresses", + "type": "string", + "description": "The members of a replica set. Please specify `host`:`port` of each member separated by comma.", + "examples": ["host1:27017,host2:27017,host3:27017"], + "order": 0 + }, + "replica_set": { + "title": "Replica Set", + "type": "string", + "description": "A replica set in MongoDB is a group of mongod processes that maintain the same data set.", + "order": 1 + } + } + }, + { + "title": "MongoDB Atlas", + "additionalProperties": true, + "required": ["instance", "cluster_url"], + "properties": { + "instance": { + "type": "string", + "const": "atlas" + }, + "cluster_url": { + "title": "Cluster URL", + "type": "string", + "description": "The URL of a cluster to connect to.", + "order": 0 + } + } + } + ] + }, + "database": { + "title": "Database Name", + "type": "string", + "description": "The database you want to replicate.", + "order": 1 + }, + "user": { + "title": "User", + "type": "string", + "description": "The username which is used to access the database.", + "order": 2 + }, + "password": { + "title": "Password", + "type": "string", + "description": "The password associated with this username.", + "airbyte_secret": true, + "order": 3 + }, + "auth_source": { + "title": "Authentication Source", + "type": "string", + "description": "The authentication source where the user information is stored.", + "default": "admin", + "examples": ["admin"], + "order": 4 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml index 5aa3dde87597b..2b3f94e1b3a85 100644 --- a/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mssql/acceptance-test-config.yml @@ -3,4 +3,7 @@ connector_image: airbyte/source-mssql:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" + backward_compatibility_tests_config: + disable_for_version: "0.4.25" diff --git a/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-mssql/build.gradle b/airbyte-integrations/connectors/source-mssql/build.gradle index a1bceecbf7a08..55f5f5a313c3d 100644 --- a/airbyte-integrations/connectors/source-mssql/build.gradle +++ b/airbyte-integrations/connectors/source-mssql/build.gradle @@ -3,6 +3,7 @@ plugins { id 'airbyte-docker' id 'airbyte-integration-test-java' id 'airbyte-performance-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java index ed44070677c45..ee0e578b5d876 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java +++ b/airbyte-integrations/connectors/source-mssql/src/main/java/io/airbyte/integrations/source/mssql/MssqlSource.java @@ -54,7 +54,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Properties; import java.util.Set; import java.util.function.Supplier; import org.slf4j.Logger; diff --git a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json index 674f9342317de..35b192d2c4de7 100644 --- a/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mssql/src/main/resources/spec.json @@ -72,9 +72,7 @@ "properties": { "ssl_method": { "type": "string", - "const": "unencrypted", - "enum": ["unencrypted"], - "default": "unencrypted" + "const": "unencrypted" } } }, @@ -85,9 +83,7 @@ "properties": { "ssl_method": { "type": "string", - "const": "encrypted_trust_server_certificate", - "enum": ["encrypted_trust_server_certificate"], - "default": "encrypted_trust_server_certificate" + "const": "encrypted_trust_server_certificate" } } }, @@ -98,9 +94,7 @@ "properties": { "ssl_method": { "type": "string", - "const": "encrypted_verify_certificate", - "enum": ["encrypted_verify_certificate"], - "default": "encrypted_verify_certificate" + "const": "encrypted_verify_certificate" }, "hostNameInCertificate": { "title": "Host Name In Certificate", @@ -127,8 +121,6 @@ "method": { "type": "string", "const": "STANDARD", - "enum": ["STANDARD"], - "default": "STANDARD", "order": 0 } } @@ -141,8 +133,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "data_to_sync": { @@ -160,15 +150,6 @@ "enum": ["Snapshot", "Read Committed"], "description": "Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the \"Snapshot\" level, you must enable the snapshot isolation mode on the database.", "order": 2 - }, - "initial_waiting_seconds": { - "type": "integer", - "title": "Initial Waiting Time in Seconds (Advanced)", - "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.", - "default": 300, - "min": 120, - "max": 1200, - "order": 3 } } } diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..560e553333780 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 5555, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..4a5352b013bd8 --- /dev/null +++ b/airbyte-integrations/connectors/source-mssql/src/test-integration/resources/expected_spec.json @@ -0,0 +1,277 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/mssql", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "MSSQL Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "description": "The hostname of the database.", + "title": "Host", + "type": "string", + "order": 0 + }, + "port": { + "description": "The port of the database.", + "title": "Port", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "examples": ["1433"], + "order": 1 + }, + "database": { + "description": "The name of the database.", + "title": "Database", + "type": "string", + "examples": ["master"], + "order": 2 + }, + "schemas": { + "title": "Schemas", + "description": "The list of schemas to sync from. Defaults to user. Case sensitive.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 0, + "uniqueItems": true, + "default": ["dbo"], + "order": 3 + }, + "username": { + "description": "The username which is used to access the database.", + "title": "Username", + "type": "string", + "order": 4 + }, + "password": { + "description": "The password associated with the username.", + "title": "Password", + "type": "string", + "airbyte_secret": true, + "order": 5 + }, + "jdbc_url_params": { + "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "type": "string", + "order": 6 + }, + "ssl_method": { + "title": "SSL Method", + "type": "object", + "description": "The encryption method which is used when communicating with the database.", + "order": 7, + "oneOf": [ + { + "title": "Unencrypted", + "description": "Data transfer will not be encrypted.", + "required": ["ssl_method"], + "properties": { + "ssl_method": { + "type": "string", + "const": "unencrypted" + } + } + }, + { + "title": "Encrypted (trust server certificate)", + "description": "Use the certificate provided by the server without verification. (For testing purposes only!)", + "required": ["ssl_method"], + "properties": { + "ssl_method": { + "type": "string", + "const": "encrypted_trust_server_certificate" + } + } + }, + { + "title": "Encrypted (verify certificate)", + "description": "Verify and use the certificate provided by the server.", + "required": ["ssl_method", "trustStoreName", "trustStorePassword"], + "properties": { + "ssl_method": { + "type": "string", + "const": "encrypted_verify_certificate" + }, + "hostNameInCertificate": { + "title": "Host Name In Certificate", + "type": "string", + "description": "Specifies the host name of the server. The value of this property must match the subject property of the certificate.", + "order": 7 + } + } + } + ] + }, + "replication_method": { + "type": "object", + "title": "Replication Method", + "description": "The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.", + "default": "STANDARD", + "order": 8, + "oneOf": [ + { + "title": "Standard", + "description": "Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "STANDARD", + "order": 0 + } + } + }, + { + "title": "Logical Replication (CDC)", + "description": "CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "CDC", + "order": 0 + }, + "data_to_sync": { + "title": "Data to Sync", + "type": "string", + "default": "Existing and New", + "enum": ["Existing and New", "New Changes Only"], + "description": "What data should be synced under the CDC. \"Existing and New\" will read existing data as a snapshot, and sync new changes through CDC. \"New Changes Only\" will skip the initial snapshot, and only sync new changes through CDC.", + "order": 1 + }, + "snapshot_isolation": { + "title": "Initial Snapshot Isolation Level", + "type": "string", + "default": "Snapshot", + "enum": ["Snapshot", "Read Committed"], + "description": "Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the \"Snapshot\" level, you must enable the snapshot isolation mode on the database.", + "order": 2 + } + } + } + ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java index ff4608d84ecf5..dc516470c2dd5 100644 --- a/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java +++ b/airbyte-integrations/connectors/source-mssql/src/test/java/io/airbyte/integrations/source/mssql/CdcMssqlSourceTest.java @@ -161,7 +161,7 @@ public String createSchemaQuery(final String schemaName) { return "CREATE SCHEMA " + schemaName; } - //TODO : Delete this Override when MSSQL supports individual table snapshot + // TODO : Delete this Override when MSSQL supports individual table snapshot @Override public void newTableSnapshotTest() throws Exception { // Do nothing diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile index 86a16ad9c7f96..367d4518cbace 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/Dockerfile @@ -16,6 +16,6 @@ ENV APPLICATION source-mysql-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.0.11 +LABEL io.airbyte.version=1.0.13 LABEL io.airbyte.name=airbyte/source-mysql-strict-encrypt diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java index eb58bd9c10c1c..b039d9fbc8fd0 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/java/io/airbyte/integrations/source/mysql_strict_encrypt/MySqlStrictEncryptJdbcSourceAcceptanceTest.java @@ -22,7 +22,9 @@ import io.airbyte.db.factory.DatabaseDriver; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.integrations.base.Source; +import io.airbyte.integrations.base.ssh.SshBastionContainer; import io.airbyte.integrations.base.ssh.SshHelpers; +import io.airbyte.integrations.base.ssh.SshTunnel; import io.airbyte.integrations.source.jdbc.test.JdbcSourceAcceptanceTest; import io.airbyte.integrations.source.mysql.MySqlSource; import io.airbyte.integrations.source.relationaldb.models.DbStreamState; @@ -39,10 +41,7 @@ import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; +import java.util.*; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterAll; @@ -51,12 +50,15 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.testcontainers.containers.MySQLContainer; +import org.testcontainers.containers.Network; class MySqlStrictEncryptJdbcSourceAcceptanceTest extends JdbcSourceAcceptanceTest { protected static final String TEST_USER = "test"; protected static final String TEST_PASSWORD = "test"; protected static MySQLContainer container; + private static final SshBastionContainer bastion = new SshBastionContainer(); + private static final Network network = Network.newNetwork(); protected Database database; protected DSLContext dslContext; @@ -328,6 +330,32 @@ void testStrictSSLUnsecuredWithTunnel() throws Exception { assertTrue(actual.getMessage().contains("Could not connect with provided SSH configuration.")); } + @Test + void testCheckWithSSlModeDisabled() throws Exception { + try (final MySQLContainer db = new MySQLContainer<>("mysql:8.0").withNetwork(network)) { + bastion.initAndStartBastion(network); + db.start(); + final JsonNode configWithSSLModeDisabled = bastion.getTunnelConfig(SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH, ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, Objects.requireNonNull(db.getContainerInfo() + .getNetworkSettings() + .getNetworks() + .entrySet().stream() + .findFirst() + .get().getValue().getIpAddress())) + .put(JdbcUtils.PORT_KEY, db.getExposedPorts().get(0)) + .put(JdbcUtils.DATABASE_KEY, db.getDatabaseName()) + .put(JdbcUtils.SCHEMAS_KEY, List.of("public")) + .put(JdbcUtils.USERNAME_KEY, db.getUsername()) + .put(JdbcUtils.PASSWORD_KEY, db.getPassword()) + .put(JdbcUtils.SSL_MODE_KEY, Map.of(JdbcUtils.MODE_KEY, "disable"))); + + final AirbyteConnectionStatus actual = source.check(configWithSSLModeDisabled); + assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, actual.getStatus()); + } finally { + bastion.stopAndClose(); + } + } + @Override protected boolean supportsPerStream() { return true; diff --git a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/resources/expected_spec.json index 870f0a66f303f..c2287b4113154 100644 --- a/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-mysql-strict-encrypt/src/test/resources/expected_spec.json @@ -61,8 +61,6 @@ "mode": { "type": "string", "const": "preferred", - "enum": ["preferred"], - "default": "preferred", "order": 0 } } @@ -75,8 +73,6 @@ "mode": { "type": "string", "const": "required", - "enum": ["required"], - "default": "required", "order": 0 } } @@ -89,8 +85,6 @@ "mode": { "type": "string", "const": "verify_ca", - "enum": ["verify_ca"], - "default": "verify_ca", "order": 0 }, "ca_certificate": { @@ -134,8 +128,6 @@ "mode": { "type": "string", "const": "verify_identity", - "enum": ["verify_identity"], - "default": "verify_identity", "order": 0 }, "ca_certificate": { @@ -187,8 +179,6 @@ "method": { "type": "string", "const": "STANDARD", - "enum": ["STANDARD"], - "default": "STANDARD", "order": 0 } } @@ -201,8 +191,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "initial_waiting_seconds": { diff --git a/airbyte-integrations/connectors/source-mysql/Dockerfile b/airbyte-integrations/connectors/source-mysql/Dockerfile index b8186ad829520..6bdb2fa01373d 100644 --- a/airbyte-integrations/connectors/source-mysql/Dockerfile +++ b/airbyte-integrations/connectors/source-mysql/Dockerfile @@ -16,6 +16,6 @@ ENV APPLICATION source-mysql COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.0.11 +LABEL io.airbyte.version=1.0.13 LABEL io.airbyte.name=airbyte/source-mysql diff --git a/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml b/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml index cf854c42e505f..4653b50be581a 100644 --- a/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-mysql/acceptance-test-config.yml @@ -3,4 +3,5 @@ connector_image: airbyte/source-mysql:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-mysql/build.gradle b/airbyte-integrations/connectors/source-mysql/build.gradle index 568ccfd0eee04..d738922c47142 100644 --- a/airbyte-integrations/connectors/source-mysql/build.gradle +++ b/airbyte-integrations/connectors/source-mysql/build.gradle @@ -3,6 +3,7 @@ plugins { id 'airbyte-docker' id 'airbyte-integration-test-java' id 'airbyte-performance-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSourceOperations.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSourceOperations.java index 2730b68f6b5f9..8590ea970bff8 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSourceOperations.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/MySqlSourceOperations.java @@ -89,7 +89,7 @@ public void setJsonField(final ResultSet resultSet, final int colIndex, final Ob } } case BOOLEAN -> putBoolean(json, columnName, resultSet, colIndex); - case TINYINT, TINYINT_UNSIGNED -> { + case TINYINT -> { if (field.getLength() == 1L) { // TINYINT(1) is boolean putBoolean(json, columnName, resultSet, colIndex); @@ -97,6 +97,7 @@ public void setJsonField(final ResultSet resultSet, final int colIndex, final Ob putShortInt(json, columnName, resultSet, colIndex); } } + case TINYINT_UNSIGNED -> putShortInt(json, columnName, resultSet, colIndex); case SMALLINT, SMALLINT_UNSIGNED, MEDIUMINT, MEDIUMINT_UNSIGNED -> putInteger(json, columnName, resultSet, colIndex); case INT, INT_UNSIGNED -> { if (field.isUnsigned()) { @@ -184,7 +185,7 @@ public MysqlType getFieldType(final JsonNode field) { switch (literalType) { // BIT(1) and TINYINT(1) are interpreted as boolean - case BIT, TINYINT, TINYINT_UNSIGNED -> { + case BIT, TINYINT -> { if (columnSize == 1) { return MysqlType.BOOLEAN; } diff --git a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/helpers/CdcConfigurationHelper.java b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/helpers/CdcConfigurationHelper.java index a16176bc09f19..cd56c7fdae65c 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/helpers/CdcConfigurationHelper.java +++ b/airbyte-integrations/connectors/source-mysql/src/main/java/io/airbyte/integrations/source/mysql/helpers/CdcConfigurationHelper.java @@ -7,7 +7,6 @@ import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.commons.functional.CheckedConsumer; import io.airbyte.db.jdbc.JdbcDatabase; -import java.time.Duration; import java.time.ZoneId; import java.util.List; import java.util.Optional; diff --git a/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json b/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json index c1a6f48b4e87d..40373023020a5 100644 --- a/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-mysql/src/main/resources/spec.json @@ -68,8 +68,6 @@ "mode": { "type": "string", "const": "preferred", - "enum": ["preferred"], - "default": "preferred", "order": 0 } } @@ -82,8 +80,6 @@ "mode": { "type": "string", "const": "required", - "enum": ["required"], - "default": "required", "order": 0 } } @@ -96,8 +92,6 @@ "mode": { "type": "string", "const": "verify_ca", - "enum": ["verify_ca"], - "default": "verify_ca", "order": 0 }, "ca_certificate": { @@ -141,8 +135,6 @@ "mode": { "type": "string", "const": "verify_identity", - "enum": ["verify_identity"], - "default": "verify_identity", "order": 0 }, "ca_certificate": { @@ -194,8 +186,6 @@ "method": { "type": "string", "const": "STANDARD", - "enum": ["STANDARD"], - "default": "STANDARD", "order": 0 } } @@ -208,8 +198,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "initial_waiting_seconds": { diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcMySqlSslRequiredSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcMySqlSslRequiredSourceAcceptanceTest.java index 02a8dc39fbc88..7c984d81ab1d0 100644 --- a/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcMySqlSslRequiredSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/CdcMySqlSslRequiredSourceAcceptanceTest.java @@ -1,5 +1,14 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static io.airbyte.integrations.io.airbyte.integration_tests.sources.utils.TestConstants.INITIAL_CDC_WAITING_SECONDS; +import static io.airbyte.protocol.models.SyncMode.INCREMENTAL; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; + import com.fasterxml.jackson.databind.JsonNode; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; @@ -23,176 +32,172 @@ import io.airbyte.protocol.models.Field; import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; +import java.util.List; +import java.util.stream.Collectors; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.Test; import org.testcontainers.containers.MySQLContainer; -import java.util.List; -import java.util.stream.Collectors; - -import static io.airbyte.integrations.io.airbyte.integration_tests.sources.utils.TestConstants.INITIAL_CDC_WAITING_SECONDS; -import static io.airbyte.protocol.models.SyncMode.INCREMENTAL; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; - -public class CdcMySqlSslRequiredSourceAcceptanceTest extends SourceAcceptanceTest { - private static final String STREAM_NAME = "id_and_name"; - private static final String STREAM_NAME2 = "starships"; - private MySQLContainer container; - private JsonNode config; - - @Override - protected String getImageName() { - return "airbyte/source-mysql:dev"; - } - - @Override - protected ConnectorSpecification getSpec() throws Exception { - return SshHelpers.getSpecAndInjectSsh(); - } - - @Override - protected JsonNode getConfig() { - return config; - } - - @Override - protected ConfiguredAirbyteCatalog getConfiguredCatalog() { - return new ConfiguredAirbyteCatalog().withStreams(Lists.newArrayList( - new ConfiguredAirbyteStream() - .withSyncMode(INCREMENTAL) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", STREAM_NAME), - String.format("%s", config.get(JdbcUtils.DATABASE_KEY).asText()), - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)) - .withSourceDefinedCursor(true) - .withSourceDefinedPrimaryKey(List.of(List.of("id"))) - .withSupportedSyncModes( - Lists.newArrayList(SyncMode.FULL_REFRESH, INCREMENTAL))), - new ConfiguredAirbyteStream() - .withSyncMode(INCREMENTAL) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream(CatalogHelpers.createAirbyteStream( - String.format("%s", STREAM_NAME2), - String.format("%s", config.get(JdbcUtils.DATABASE_KEY).asText()), - Field.of("id", JsonSchemaType.NUMBER), - Field.of("name", JsonSchemaType.STRING)) - .withSourceDefinedCursor(true) - .withSourceDefinedPrimaryKey(List.of(List.of("id"))) - .withSupportedSyncModes( - Lists.newArrayList(SyncMode.FULL_REFRESH, INCREMENTAL))))); - } - - @Override - protected JsonNode getState() { - return null; - } - - @Override - protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { - container = new MySQLContainer<>("mysql:8.0"); - container.start(); - - final var sslMode = ImmutableMap.builder() - .put(JdbcUtils.MODE_KEY, "required") - .build(); - final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() - .put("method", "CDC") - .put("initial_waiting_seconds", INITIAL_CDC_WAITING_SECONDS) - .build()); - - config = Jsons.jsonNode(ImmutableMap.builder() - .put(JdbcUtils.HOST_KEY, container.getHost()) - .put(JdbcUtils.PORT_KEY, container.getFirstMappedPort()) - .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) - .put(JdbcUtils.USERNAME_KEY, container.getUsername()) - .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) - .put(JdbcUtils.SSL_KEY, true) - .put(JdbcUtils.SSL_MODE_KEY, sslMode) - .put("replication_method", replicationMethod) - .put("is_test", true) - .build()); - - revokeAllPermissions(); - grantCorrectPermissions(); - alterUserRequireSsl(); - createAndPopulateTables(); - } - - private void alterUserRequireSsl() { - executeQuery("ALTER USER " + container.getUsername() + " REQUIRE SSL;"); - } - - private void createAndPopulateTables() { - executeQuery("CREATE TABLE id_and_name(id INTEGER PRIMARY KEY, name VARCHAR(200));"); - executeQuery( - "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); - executeQuery("CREATE TABLE starships(id INTEGER PRIMARY KEY, name VARCHAR(200));"); - executeQuery( - "INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); - } - - private void revokeAllPermissions() { - executeQuery("REVOKE ALL PRIVILEGES, GRANT OPTION FROM " + container.getUsername() + "@'%';"); - } - - private void grantCorrectPermissions() { - executeQuery( - "GRANT SELECT, RELOAD, SHOW DATABASES, REPLICATION SLAVE, REPLICATION CLIENT ON *.* TO " - + container.getUsername() + "@'%';"); +public class CdcMySqlSslRequiredSourceAcceptanceTest extends SourceAcceptanceTest { + + private static final String STREAM_NAME = "id_and_name"; + private static final String STREAM_NAME2 = "starships"; + private MySQLContainer container; + private JsonNode config; + + @Override + protected String getImageName() { + return "airbyte/source-mysql:dev"; + } + + @Override + protected ConnectorSpecification getSpec() throws Exception { + return SshHelpers.getSpecAndInjectSsh(); + } + + @Override + protected JsonNode getConfig() { + return config; + } + + @Override + protected ConfiguredAirbyteCatalog getConfiguredCatalog() { + return new ConfiguredAirbyteCatalog().withStreams(Lists.newArrayList( + new ConfiguredAirbyteStream() + .withSyncMode(INCREMENTAL) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(CatalogHelpers.createAirbyteStream( + String.format("%s", STREAM_NAME), + String.format("%s", config.get(JdbcUtils.DATABASE_KEY).asText()), + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING)) + .withSourceDefinedCursor(true) + .withSourceDefinedPrimaryKey(List.of(List.of("id"))) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, INCREMENTAL))), + new ConfiguredAirbyteStream() + .withSyncMode(INCREMENTAL) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withStream(CatalogHelpers.createAirbyteStream( + String.format("%s", STREAM_NAME2), + String.format("%s", config.get(JdbcUtils.DATABASE_KEY).asText()), + Field.of("id", JsonSchemaType.NUMBER), + Field.of("name", JsonSchemaType.STRING)) + .withSourceDefinedCursor(true) + .withSourceDefinedPrimaryKey(List.of(List.of("id"))) + .withSupportedSyncModes( + Lists.newArrayList(SyncMode.FULL_REFRESH, INCREMENTAL))))); + } + + @Override + protected JsonNode getState() { + return null; + } + + @Override + protected void setupEnvironment(final TestDestinationEnv environment) throws Exception { + container = new MySQLContainer<>("mysql:8.0"); + container.start(); + + final var sslMode = ImmutableMap.builder() + .put(JdbcUtils.MODE_KEY, "required") + .build(); + final JsonNode replicationMethod = Jsons.jsonNode(ImmutableMap.builder() + .put("method", "CDC") + .put("initial_waiting_seconds", INITIAL_CDC_WAITING_SECONDS) + .build()); + + config = Jsons.jsonNode(ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, container.getHost()) + .put(JdbcUtils.PORT_KEY, container.getFirstMappedPort()) + .put(JdbcUtils.DATABASE_KEY, container.getDatabaseName()) + .put(JdbcUtils.USERNAME_KEY, container.getUsername()) + .put(JdbcUtils.PASSWORD_KEY, container.getPassword()) + .put(JdbcUtils.SSL_KEY, true) + .put(JdbcUtils.SSL_MODE_KEY, sslMode) + .put("replication_method", replicationMethod) + .put("is_test", true) + .build()); + + revokeAllPermissions(); + grantCorrectPermissions(); + alterUserRequireSsl(); + createAndPopulateTables(); + } + + private void alterUserRequireSsl() { + executeQuery("ALTER USER " + container.getUsername() + " REQUIRE SSL;"); + } + + private void createAndPopulateTables() { + executeQuery("CREATE TABLE id_and_name(id INTEGER PRIMARY KEY, name VARCHAR(200));"); + executeQuery( + "INSERT INTO id_and_name (id, name) VALUES (1,'picard'), (2, 'crusher'), (3, 'vash');"); + executeQuery("CREATE TABLE starships(id INTEGER PRIMARY KEY, name VARCHAR(200));"); + executeQuery( + "INSERT INTO starships (id, name) VALUES (1,'enterprise-d'), (2, 'defiant'), (3, 'yamato');"); + } + + private void revokeAllPermissions() { + executeQuery("REVOKE ALL PRIVILEGES, GRANT OPTION FROM " + container.getUsername() + "@'%';"); + } + + private void grantCorrectPermissions() { + executeQuery( + "GRANT SELECT, RELOAD, SHOW DATABASES, REPLICATION SLAVE, REPLICATION CLIENT ON *.* TO " + + container.getUsername() + "@'%';"); + } + + private void executeQuery(final String query) { + try (final DSLContext dslContext = DSLContextFactory.create( + "root", + "test", + DatabaseDriver.MYSQL.getDriverClassName(), + String.format(DatabaseDriver.MYSQL.getUrlFormatString(), + container.getHost(), + container.getFirstMappedPort(), + container.getDatabaseName()), + SQLDialect.MYSQL)) { + final Database database = new Database(dslContext); + database.query( + ctx -> ctx + .execute(query)); + } catch (final Exception e) { + throw new RuntimeException(e); } + } + + @Override + protected void tearDown(final TestDestinationEnv testEnv) { + container.close(); + } + + @Test + public void testIncrementalSyncShouldNotFailIfBinlogIsDeleted() throws Exception { + final ConfiguredAirbyteCatalog configuredCatalog = withSourceDefinedCursors(getConfiguredCatalog()); + // only sync incremental streams + configuredCatalog.setStreams( + configuredCatalog.getStreams().stream().filter(s -> s.getSyncMode() == INCREMENTAL).collect(Collectors.toList())); + + final List airbyteMessages = runRead(configuredCatalog, getState()); + final List recordMessages = filterRecords(airbyteMessages); + final List stateMessages = airbyteMessages + .stream() + .filter(m -> m.getType() == AirbyteMessage.Type.STATE) + .map(AirbyteMessage::getState) + .collect(Collectors.toList()); + assertFalse(recordMessages.isEmpty(), "Expected the first incremental sync to produce records"); + assertFalse(stateMessages.isEmpty(), "Expected incremental sync to produce STATE messages"); + + // when we run incremental sync again there should be no new records. Run a sync with the latest + // state message and assert no records were emitted. + final JsonNode latestState = Jsons.jsonNode(supportsPerStream() ? stateMessages : List.of(Iterables.getLast(stateMessages))); + // RESET MASTER removes all binary log files that are listed in the index file, + // leaving only a single, empty binary log file with a numeric suffix of .000001 + executeQuery("RESET MASTER;"); + + assertEquals(6, filterRecords(runRead(configuredCatalog, latestState)).size()); + } - private void executeQuery(final String query) { - try (final DSLContext dslContext = DSLContextFactory.create( - "root", - "test", - DatabaseDriver.MYSQL.getDriverClassName(), - String.format(DatabaseDriver.MYSQL.getUrlFormatString(), - container.getHost(), - container.getFirstMappedPort(), - container.getDatabaseName()), - SQLDialect.MYSQL)) { - final Database database = new Database(dslContext); - database.query( - ctx -> ctx - .execute(query)); - } catch (final Exception e) { - throw new RuntimeException(e); - } - } - - @Override - protected void tearDown(final TestDestinationEnv testEnv) { - container.close(); - } - - @Test - public void testIncrementalSyncShouldNotFailIfBinlogIsDeleted() throws Exception { - final ConfiguredAirbyteCatalog configuredCatalog = withSourceDefinedCursors(getConfiguredCatalog()); - // only sync incremental streams - configuredCatalog.setStreams( - configuredCatalog.getStreams().stream().filter(s -> s.getSyncMode() == INCREMENTAL).collect(Collectors.toList())); - - final List airbyteMessages = runRead(configuredCatalog, getState()); - final List recordMessages = filterRecords(airbyteMessages); - final List stateMessages = airbyteMessages - .stream() - .filter(m -> m.getType() == AirbyteMessage.Type.STATE) - .map(AirbyteMessage::getState) - .collect(Collectors.toList()); - assertFalse(recordMessages.isEmpty(), "Expected the first incremental sync to produce records"); - assertFalse(stateMessages.isEmpty(), "Expected incremental sync to produce STATE messages"); - - // when we run incremental sync again there should be no new records. Run a sync with the latest - // state message and assert no records were emitted. - final JsonNode latestState = Jsons.jsonNode(supportsPerStream() ? stateMessages : List.of(Iterables.getLast(stateMessages))); - // RESET MASTER removes all binary log files that are listed in the index file, - // leaving only a single, empty binary log file with a numeric suffix of .000001 - executeQuery("RESET MASTER;"); - - assertEquals(6, filterRecords(runRead(configuredCatalog, latestState)).size()); - } } diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..e17733f16b235 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/dummy_config.json @@ -0,0 +1,7 @@ +{ + "host": "default", + "port": 5555, + "database": "default", + "username": "default", + "replication_method": { "method": "STANDARD" } +} diff --git a/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..a1ab35b8c56c5 --- /dev/null +++ b/airbyte-integrations/connectors/source-mysql/src/test-integration/resources/expected_spec.json @@ -0,0 +1,341 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/mysql", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "MySql Source Spec", + "type": "object", + "required": ["host", "port", "database", "username", "replication_method"], + "properties": { + "host": { + "description": "The host name of the database.", + "title": "Host", + "type": "string", + "order": 0 + }, + "port": { + "description": "The port to connect to.", + "title": "Port", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 3306, + "examples": ["3306"], + "order": 1 + }, + "database": { + "description": "The database name.", + "title": "Database", + "type": "string", + "order": 2 + }, + "username": { + "description": "The username which is used to access the database.", + "title": "Username", + "type": "string", + "order": 3 + }, + "password": { + "description": "The password associated with the username.", + "title": "Password", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", + "type": "string", + "order": 5 + }, + "ssl": { + "title": "SSL Connection", + "description": "Encrypt data using SSL.", + "type": "boolean", + "default": true, + "order": 6 + }, + "ssl_mode": { + "title": "SSL modes", + "description": "SSL connection modes.
  • preferred - Automatically attempt SSL connection. If the MySQL server does not support SSL, continue with a regular connection.
  • required - Always connect with SSL. If the MySQL server doesn’t support SSL, the connection will not be established. Certificate Authority (CA) and Hostname are not verified.
  • verify-ca - Always connect with SSL. Verifies CA, but allows connection even if Hostname does not match.
  • Verify Identity - Always connect with SSL. Verify both CA and Hostname.
  • Read more in the docs.", + "type": "object", + "order": 7, + "oneOf": [ + { + "title": "preferred", + "description": "Preferred SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "preferred", + "order": 0 + } + } + }, + { + "title": "required", + "description": "Require SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "required", + "order": 0 + } + } + }, + { + "title": "Verify CA", + "description": "Verify CA SSL mode.", + "required": ["mode", "ca_certificate"], + "properties": { + "mode": { + "type": "string", + "const": "verify_ca", + "order": 0 + }, + "ca_certificate": { + "type": "string", + "title": "CA certificate", + "description": "CA certificate", + "airbyte_secret": true, + "multiline": true, + "order": 1 + }, + "client_certificate": { + "type": "string", + "title": "Client certificate", + "description": "Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)", + "airbyte_secret": true, + "multiline": true, + "order": 2 + }, + "client_key": { + "type": "string", + "title": "Client key", + "description": "Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)", + "airbyte_secret": true, + "multiline": true, + "order": 3 + }, + "client_key_password": { + "type": "string", + "title": "Client key password", + "description": "Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "Verify Identity", + "description": "Verify-full SSL mode.", + "required": ["mode", "ca_certificate"], + "properties": { + "mode": { + "type": "string", + "const": "verify_identity", + "order": 0 + }, + "ca_certificate": { + "type": "string", + "title": "CA certificate", + "description": "CA certificate", + "airbyte_secret": true, + "multiline": true, + "order": 1 + }, + "client_certificate": { + "type": "string", + "title": "Client certificate", + "description": "Client certificate (this is not a required field, but if you want to use it, you will need to add the Client key as well)", + "airbyte_secret": true, + "multiline": true, + "order": 2 + }, + "client_key": { + "type": "string", + "title": "Client key", + "description": "Client key (this is not a required field, but if you want to use it, you will need to add the Client certificate as well)", + "airbyte_secret": true, + "multiline": true, + "order": 3 + }, + "client_key_password": { + "type": "string", + "title": "Client key password", + "description": "Password for keystorage. This field is optional. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + }, + "replication_method": { + "type": "object", + "title": "Replication Method", + "description": "Replication method to use for extracting data from the database.", + "order": 8, + "oneOf": [ + { + "title": "Standard", + "description": "Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "STANDARD", + "order": 0 + } + } + }, + { + "title": "Logical Replication (CDC)", + "description": "CDC uses the Binlog to detect inserts, updates, and deletes. This needs to be configured on the source database itself.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "CDC", + "order": 0 + }, + "initial_waiting_seconds": { + "type": "integer", + "title": "Initial Waiting Time in Seconds (Advanced)", + "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.", + "default": 300, + "min": 120, + "max": 1200, + "order": 1 + }, + "server_time_zone": { + "type": "string", + "title": "Configured server timezone for the MySQL source (Advanced)", + "description": "Enter the configured MySQL server timezone. This should only be done if the configured timezone in your MySQL instance does not conform to IANNA standard.", + "order": 2 + } + } + } + ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-n8n/.dockerignore b/airbyte-integrations/connectors/source-n8n/.dockerignore new file mode 100644 index 0000000000000..e6ff0bd71d01d --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_n8n +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-n8n/Dockerfile b/airbyte-integrations/connectors/source-n8n/Dockerfile new file mode 100644 index 0000000000000..ced481db962b8 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_n8n ./source_n8n + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-n8n diff --git a/airbyte-integrations/connectors/source-n8n/README.md b/airbyte-integrations/connectors/source-n8n/README.md new file mode 100644 index 0000000000000..cd06a03f5f498 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/README.md @@ -0,0 +1,79 @@ +# N8n Source + +This is the repository for the N8n configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/n8n). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-n8n:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/n8n) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_n8n/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source n8n test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-n8n:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-n8n:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-n8n:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-n8n:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-n8n:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-n8n:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-n8n:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-n8n:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-n8n/__init__.py b/airbyte-integrations/connectors/source-n8n/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-n8n/acceptance-test-config.yml b/airbyte-integrations/connectors/source-n8n/acceptance-test-config.yml new file mode 100644 index 0000000000000..d2f0d6e291026 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-n8n:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_n8n/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-n8n/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-n8n/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-n8n/build.gradle b/airbyte-integrations/connectors/source-n8n/build.gradle new file mode 100644 index 0000000000000..1d54f8ea835e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_n8n' +} diff --git a/airbyte-integrations/connectors/source-n8n/integration_tests/__init__.py b/airbyte-integrations/connectors/source-n8n/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-n8n/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-n8n/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-n8n/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-n8n/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-n8n/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-n8n/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..4ee230eb82025 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "executions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-n8n/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-n8n/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..98b09640648ed --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "n8n_api", + "host": "https://n8n.io" +} diff --git a/airbyte-integrations/connectors/source-n8n/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-n8n/integration_tests/sample_config.json new file mode 100644 index 0000000000000..98b09640648ed --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "n8n_api", + "host": "https://n8n.io" +} diff --git a/airbyte-integrations/connectors/source-n8n/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-n8n/integration_tests/sample_state.json new file mode 100644 index 0000000000000..443dfbff2717c --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/integration_tests/sample_state.json @@ -0,0 +1,11 @@ +{ + "id": 1007, + "finished": false, + "mode": "webhook", + "retryOf": null, + "retrySuccessId": null, + "startedAt": "2022-10-24T13:00:31.946Z", + "stoppedAt": "2022-10-24T13:01:32.612Z", + "workflowId": "2", + "waitTill": null +} diff --git a/airbyte-integrations/connectors/source-n8n/main.py b/airbyte-integrations/connectors/source-n8n/main.py new file mode 100644 index 0000000000000..1f2ea9e2acd68 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_n8n import SourceN8n + +if __name__ == "__main__": + source = SourceN8n() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-n8n/requirements.txt b/airbyte-integrations/connectors/source-n8n/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-n8n/setup.py b/airbyte-integrations/connectors/source-n8n/setup.py new file mode 100644 index 0000000000000..bbaadbab6b654 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_n8n", + description="Source implementation for N8n.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-n8n/source_n8n/__init__.py b/airbyte-integrations/connectors/source-n8n/source_n8n/__init__.py new file mode 100644 index 0000000000000..82484f22ffbed --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/source_n8n/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceN8n + +__all__ = ["SourceN8n"] diff --git a/airbyte-integrations/connectors/source-n8n/source_n8n/n8n.yaml b/airbyte-integrations/connectors/source-n8n/source_n8n/n8n.yaml new file mode 100644 index 0000000000000..6e8c2289fb43e --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/source_n8n/n8n.yaml @@ -0,0 +1,50 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["data"] + requester: + url_base: "{{ config['host'] }}/api/v1" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: X-N8N-API-KEY + api_token: "{{ config['api_key'] }}" + request_options_provider: + request_parameters: + limit: "250" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "" + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response.nextCursor }}" + page_size: 250 + page_token_option: + field_name: "cursor" + inject_into: "request_parameter" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + executions_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "executions" + primary_key: "id" + path: "/executions" + +streams: + - "*ref(definitions.executions_stream)" + +check: + stream_names: + - "executions" diff --git a/airbyte-integrations/connectors/source-n8n/source_n8n/schemas/executions.json b/airbyte-integrations/connectors/source-n8n/source_n8n/schemas/executions.json new file mode 100644 index 0000000000000..075ea0568cc4c --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/source_n8n/schemas/executions.json @@ -0,0 +1,33 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["integer"] + }, + "finished": { + "type": ["boolean"] + }, + "mode": { + "type": ["string"] + }, + "retryOf": { + "type": ["null", "string"] + }, + "retrySuccessId": { + "type": ["null", "integer"] + }, + "startedAt": { + "type": ["string"] + }, + "stoppedAt": { + "type": ["null", "string"] + }, + "workflowId": { + "type": ["null", "string"] + }, + "waitTill": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-n8n/source_n8n/source.py b/airbyte-integrations/connectors/source-n8n/source_n8n/source.py new file mode 100644 index 0000000000000..b4e3253134564 --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/source_n8n/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceN8n(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "n8n.yaml"}) diff --git a/airbyte-integrations/connectors/source-n8n/source_n8n/spec.yaml b/airbyte-integrations/connectors/source-n8n/source_n8n/spec.yaml new file mode 100644 index 0000000000000..28902c69b6cbc --- /dev/null +++ b/airbyte-integrations/connectors/source-n8n/source_n8n/spec.yaml @@ -0,0 +1,16 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/n8n +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: N8n Spec + type: object + required: + - host + - api_key + additionalProperties: true + properties: + host: + type: string + description: Hostname of the n8n instance + api_key: + type: string + description: Your API KEY. See here diff --git a/airbyte-integrations/connectors/source-nasa/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-nasa/integration_tests/configured_catalog.json index caa03877beac0..e2b271256f8f6 100644 --- a/airbyte-integrations/connectors/source-nasa/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-nasa/integration_tests/configured_catalog.json @@ -4,23 +4,13 @@ "stream": { "name": "nasa_apod", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh", - "incremental" - ], + "supported_sync_modes": ["full_refresh", "incremental"], "source_defined_cursor": true, - "default_cursor_field": [ - "date" - ], - "source_defined_primary_key": [ - [ - "date" - ] - ] + "default_cursor_field": ["date"], + "source_defined_primary_key": [["date"]] }, "sync_mode": "incremental", "destination_sync_mode": "overwrite" } ] } - diff --git a/airbyte-integrations/connectors/source-nasa/source_nasa/spec.yaml b/airbyte-integrations/connectors/source-nasa/source_nasa/spec.yaml index ace88f9ff1112..a06e464838660 100644 --- a/airbyte-integrations/connectors/source-nasa/source_nasa/spec.yaml +++ b/airbyte-integrations/connectors/source-nasa/source_nasa/spec.yaml @@ -22,7 +22,7 @@ connectionSpecification: type: integer minimum: 1 maximum: 100 - description: >- + description: >- A positive integer, no greater than 100. If this is specified then `count` randomly chosen images will be returned in a JSON array. Cannot be used in conjunction with `date` or `start_date` and `end_date`. diff --git a/airbyte-integrations/connectors/source-newsdata/.dockerignore b/airbyte-integrations/connectors/source-newsdata/.dockerignore new file mode 100644 index 0000000000000..e53943a36e18b --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_newsdata +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-newsdata/Dockerfile b/airbyte-integrations/connectors/source-newsdata/Dockerfile new file mode 100644 index 0000000000000..5fe81e4686ede --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_newsdata ./source_newsdata + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-newsdata diff --git a/airbyte-integrations/connectors/source-newsdata/README.md b/airbyte-integrations/connectors/source-newsdata/README.md new file mode 100644 index 0000000000000..4a4e36cc6c3ab --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/README.md @@ -0,0 +1,79 @@ +# Newsdata Source + +This is the repository for the Newsdata configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/newsdata). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-newsdata:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/newsdata) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_newsdata/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source newsdata test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-newsdata:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-newsdata:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-newsdata:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-newsdata:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-newsdata:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-newsdata:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-newsdata:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-newsdata:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-newsdata/__init__.py b/airbyte-integrations/connectors/source-newsdata/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-newsdata/acceptance-test-config.yml b/airbyte-integrations/connectors/source-newsdata/acceptance-test-config.yml new file mode 100644 index 0000000000000..a333228b391b5 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/acceptance-test-config.yml @@ -0,0 +1,27 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-newsdata:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_newsdata/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + bypass_reason: "This connector does not implement incremental sync" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-newsdata/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-newsdata/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-newsdata/build.gradle b/airbyte-integrations/connectors/source-newsdata/build.gradle new file mode 100644 index 0000000000000..8e930b32624ba --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_newsdata' +} diff --git a/airbyte-integrations/connectors/source-newsdata/integration_tests/__init__.py b/airbyte-integrations/connectors/source-newsdata/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-newsdata/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-newsdata/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-newsdata/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-newsdata/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-newsdata/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-newsdata/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..bf69b4357da57 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/integration_tests/configured_catalog.json @@ -0,0 +1,22 @@ +{ + "streams": [ + { + "stream": { + "name": "latest", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "sources", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-newsdata/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-newsdata/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..9266d77f33fd7 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "ivalid_api_key", + "category": ["invalid_category_1", "invalid_category_2"] +} diff --git a/airbyte-integrations/connectors/source-newsdata/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-newsdata/integration_tests/sample_config.json new file mode 100644 index 0000000000000..a216b34cceaad --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "TODO: set your api_key", + "query": "pizza" +} diff --git a/airbyte-integrations/connectors/source-newsdata/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-newsdata/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-newsdata/main.py b/airbyte-integrations/connectors/source-newsdata/main.py new file mode 100644 index 0000000000000..6141f19bc0f42 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_newsdata import SourceNewsdata + +if __name__ == "__main__": + source = SourceNewsdata() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-newsdata/requirements.txt b/airbyte-integrations/connectors/source-newsdata/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-newsdata/setup.py b/airbyte-integrations/connectors/source-newsdata/setup.py new file mode 100644 index 0000000000000..e37e1b508e463 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_newsdata", + description="Source implementation for Newsdata.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-newsdata/source_newsdata/__init__.py b/airbyte-integrations/connectors/source-newsdata/source_newsdata/__init__.py new file mode 100644 index 0000000000000..117cedb9b0e83 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/source_newsdata/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceNewsdata + +__all__ = ["SourceNewsdata"] diff --git a/airbyte-integrations/connectors/source-newsdata/source_newsdata/newsdata.yaml b/airbyte-integrations/connectors/source-newsdata/source_newsdata/newsdata.yaml new file mode 100644 index 0000000000000..911e4b3954fb5 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/source_newsdata/newsdata.yaml @@ -0,0 +1,79 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["results"] + base_requester: + url_base: "https://newsdata.io/api/1" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "X-ACCESS-KEY" + api_token: "{{ config['api_key'] }}" + base_retriever: + record_selector: + $ref: "*ref(definitions.selector)" + base_stream: + retriever: + $ref: "*ref(definitions.base_retriever)" + requester: + $ref: "*ref(definitions.base_requester)" + cursor_paginator: + type: "DefaultPaginator" + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response['nextPage'] }}" + page_size: 10 + # TODO: make page_size dynamic, depending on free or paid tier. See https://github.com/airbytehq/airbyte/issues/18783 + page_token_option: + field_name: "page" + inject_into: "request_parameter" + page_size_option: # This is useless, only there because it is required, but page sizes are managed automatically by API subscription type + field_name: "X-Pagination-Page-Size" + inject_into: "header" + url_base: "*ref(definitions.base_requester.url_base)" + latest_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "latest" + primary_key: "link" + path: "/news" + retriever: + $ref: "*ref(definitions.base_retriever)" + requester: + $ref: "*ref(definitions.base_requester)" + request_options_provider: + request_parameters: + country: "{{ ','.join(config['country']) }}" + language: "{{ ','.join(config['language']) }}" + category: "{{ ','.join(config['category']) }}" + q: "{{ config['query'] | urlencode }}" + qInTitle: "{{ config['query_in_title'] | urlencode }}" + domain: "{{ ','.join(config['domain']) }}" + paginator: + $ref: "*ref(definitions.cursor_paginator)" + sources_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "sources" + primary_key: "id" + path: "/sources" + retriever: + $ref: "*ref(definitions.base_retriever)" + requester: + $ref: "*ref(definitions.base_requester)" + request_options_provider: + request_parameters: + country: "{{ config['country'][0] }}" + language: "{{ config['language'][0] }}" + category: "{{ config['category'][0] }}" + +streams: + - "*ref(definitions.latest_stream)" + - "*ref(definitions.sources_stream)" + +check: + stream_names: + - "latest" + - "sources" diff --git a/airbyte-integrations/connectors/source-newsdata/source_newsdata/schemas/latest.json b/airbyte-integrations/connectors/source-newsdata/source_newsdata/schemas/latest.json new file mode 100644 index 0000000000000..42aa64c9db687 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/source_newsdata/schemas/latest.json @@ -0,0 +1,57 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "title": { + "type": ["null", "string"] + }, + "link": { + "type": ["null", "string"] + }, + "source_id": { + "type": ["null", "string"] + }, + "keywords": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "creator": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "image_url": { + "type": ["null", "string"] + }, + "video_url": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "pubDate": { + "type": ["null", "string"] + }, + "content": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "category": { + "type": ["null", "array"], + "items": { + "type": "string" + } + }, + "language": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-newsdata/source_newsdata/schemas/sources.json b/airbyte-integrations/connectors/source-newsdata/source_newsdata/schemas/sources.json new file mode 100644 index 0000000000000..be59b5b8420fb --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/source_newsdata/schemas/sources.json @@ -0,0 +1,33 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "language": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + }, + "country": { + "type": ["null", "array"], + "items": { + "type": ["null", "string"] + } + } + } +} diff --git a/airbyte-integrations/connectors/source-newsdata/source_newsdata/source.py b/airbyte-integrations/connectors/source-newsdata/source_newsdata/source.py new file mode 100644 index 0000000000000..bba2fc1793660 --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/source_newsdata/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceNewsdata(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "newsdata.yaml"}) diff --git a/airbyte-integrations/connectors/source-newsdata/source_newsdata/spec.yaml b/airbyte-integrations/connectors/source-newsdata/source_newsdata/spec.yaml new file mode 100644 index 0000000000000..dd186f2a5c96b --- /dev/null +++ b/airbyte-integrations/connectors/source-newsdata/source_newsdata/spec.yaml @@ -0,0 +1,198 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/newsdata +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Newsdata Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + type: string + description: API Key + airbyte_secret: true + order: 0 + OneOf: + query: + type: string + description: >- + Keywords or phrases to search for in the news title and content. + Advanced Search options: + - Search `Social`: query = "social" + - Search `Social Pizza`: query = "social pizza" + - Search `Social` but not with `pizza`: query = "social -pizza" + - Search `Social` but not with `pizza` and `wildfire`: query = "social -pizza -wildfire" + - Search `Social` and `pizza`: query = "social AND pizza" + - Search `Social` and `pizza` and `pasta`: query = "social AND pizza AND pasta" + - Search `Social` or `pizza`: query = "social OR pizza" + - Search `Social` or `pizza` but not `pasta`: query = "social OR pizza -pasta" + - Search `Social` or `pizza` or `pasta`: query = "social OR pizza OR pasta" + Note: You can't use AND and OR in the same query. + order: 1 + query_in_title: + type: string + description: Same as `query`, but restricting the search to only the news title. It cannot be used along with `query`. + order: 1 + domain: + type: array + description: Domains (maximum 5) to restrict the search to. Use the sources stream to find top sources id. + maxitems: 5 + items: + type: string + order: 2 + country: + type: array + description: 2-letter ISO 3166-1 countries (maximum 5) to restrict the search to. + maxitems: 5 + order: 3 + items: + type: string + enum: + - ar + - au + - at + - bd + - by + - be + - br + - bg + - ca + - cl + - cn + - co + - cr + - cu + - cz + - dk + - do + - ec + - eg + - ee + - et + - fi + - fr + - de + - gr + - hk + - hu + - in + - id + - iq + - ie + - il + - it + - jp + - kz + - kw + - lv + - lb + - lt + - my + - mx + - ma + - mm + - nl + - nz + - ng + - kp + - "no" + - pk + - pe + - ph + - pl + - pt + - pr + - ro + - ru + - sa + - rs + - sg + - sk + - si + - za + - kr + - es + - se + - ch + - tw + - tz + - th + - tr + - ua + - ae + - gb + - us + - ve + - vi + category: + type: array + description: Categories (maximum 5) to restrict the search to. + maxitems: 5 + order: 4 + items: + type: string + enum: + - business + - entertainment + - environment + - food + - health + - politics + - science + - sports + - technology + - top + - world + language: + type: array + description: Languages (maximum 5) to restrict the search to. + maxitems: 5 + order: 5 + items: + type: string + enum: + - be + - am + - ar + - bn + - bs + - bg + - my + - ckb + - zh + - hr + - cs + - da + - nl + - en + - et + - fi + - fr + - de + - el + - he + - hi + - hu + - in + - it + - jp + - ko + - lv + - lt + - ms + - "no" + - pl + - pt + - ro + - ru + - sr + - sk + - sl + - es + - sw + - sv + - th + - tr + - uk + - ur + - vi diff --git a/airbyte-integrations/connectors/source-nytimes/.dockerignore b/airbyte-integrations/connectors/source-nytimes/.dockerignore new file mode 100644 index 0000000000000..33cefa728fb13 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_nytimes +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-nytimes/Dockerfile b/airbyte-integrations/connectors/source-nytimes/Dockerfile new file mode 100644 index 0000000000000..d8e2580d82ccb --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_nytimes ./source_nytimes + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-nytimes diff --git a/airbyte-integrations/connectors/source-nytimes/README.md b/airbyte-integrations/connectors/source-nytimes/README.md new file mode 100644 index 0000000000000..a2a6f85001943 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/README.md @@ -0,0 +1,79 @@ +# Nytimes Source + +This is the repository for the Nytimes configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/nytimes). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-nytimes:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/nytimes) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_nytimes/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source nytimes test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-nytimes:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-nytimes:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-nytimes:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-nytimes:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-nytimes:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-nytimes:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-nytimes:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-nytimes:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-nytimes/__init__.py b/airbyte-integrations/connectors/source-nytimes/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-nytimes/acceptance-test-config.yml b/airbyte-integrations/connectors/source-nytimes/acceptance-test-config.yml new file mode 100644 index 0000000000000..d0e1d6a5606bd --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/acceptance-test-config.yml @@ -0,0 +1,30 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-nytimes:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_nytimes/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-nytimes/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-nytimes/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-nytimes/build.gradle b/airbyte-integrations/connectors/source-nytimes/build.gradle new file mode 100644 index 0000000000000..e1410f09c0483 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_nytimes' +} diff --git a/airbyte-integrations/connectors/source-nytimes/integration_tests/__init__.py b/airbyte-integrations/connectors/source-nytimes/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-nytimes/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-nytimes/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..856143d7913fe --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "archive": { + "pub_date": "2999-12-31T23:59:59+0000" + } +} diff --git a/airbyte-integrations/connectors/source-nytimes/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-nytimes/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-nytimes/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-nytimes/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..145e307a0bbb4 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/integration_tests/configured_catalog.json @@ -0,0 +1,40 @@ +{ + "streams": [ + { + "stream": { + "name": "archive", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "most_popular_emailed", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "most_popular_shared", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "most_popular_viewed", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-nytimes/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-nytimes/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..fea0c792182e7 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "api_key": "My API Key", + "year": "2000", + "month": "april", + "period": 14 +} diff --git a/airbyte-integrations/connectors/source-nytimes/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-nytimes/integration_tests/sample_config.json new file mode 100644 index 0000000000000..ba1248d9cf6b8 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/integration_tests/sample_config.json @@ -0,0 +1,7 @@ +{ + "api_key": "My API Key", + "year": 2022, + "month": 6, + "period": 7, + "shared_type": "facebook" +} diff --git a/airbyte-integrations/connectors/source-nytimes/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-nytimes/integration_tests/sample_state.json new file mode 100644 index 0000000000000..ef2c459f763d7 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "archive": { + "pub_date": "2022-11-02T10:00:09+0000" + } +} diff --git a/airbyte-integrations/connectors/source-nytimes/main.py b/airbyte-integrations/connectors/source-nytimes/main.py new file mode 100644 index 0000000000000..7eeb2e6c83844 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_nytimes import SourceNytimes + +if __name__ == "__main__": + source = SourceNytimes() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-nytimes/requirements.txt b/airbyte-integrations/connectors/source-nytimes/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-nytimes/setup.py b/airbyte-integrations/connectors/source-nytimes/setup.py new file mode 100644 index 0000000000000..babec603a1d63 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_nytimes", + description="Source implementation for Nytimes.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-nytimes/source_nytimes/__init__.py b/airbyte-integrations/connectors/source-nytimes/source_nytimes/__init__.py new file mode 100644 index 0000000000000..026d0c73eecf4 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/source_nytimes/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceNytimes + +__all__ = ["SourceNytimes"] diff --git a/airbyte-integrations/connectors/source-nytimes/source_nytimes/nytimes.yaml b/airbyte-integrations/connectors/source-nytimes/source_nytimes/nytimes.yaml new file mode 100644 index 0000000000000..a2c008b48750a --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/source_nytimes/nytimes.yaml @@ -0,0 +1,79 @@ +version: "0.1.0" + +definitions: + requester: + url_base: "https://api.nytimes.com/svc" + http_method: "GET" + request_options_provider: + request_parameters: + api-key: "{{ config['api_key'] }}" + retriever: + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + archive_stream: + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + extractor: + field_pointer: ["response", "docs"] + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m" + end_datetime: + datetime: "{{ config['end_date'] or today_utc().strftime('%Y-%m') }}" + datetime_format: "%Y-%m" + step: "1m" + datetime_format: "%Y-%m-%dT%H:%M:%S%z" + cursor_field: "{{ options['stream_cursor_field'] }}" + $options: + name: "archive" + primary_key: "_id" + path: "/archive/v1/{{ stream_slice['start_time'].split('-')[0] | int }}/{{ stream_slice['start_time'].split('-')[1] | int }}.json" + stream_cursor_field: "pub_date" + most_popular_emailed_stream: + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + extractor: + field_pointer: ["results"] + $options: + name: "most_popular_emailed" + primary_key: "id" + path: "/mostpopular/v2/emailed/{{ config['period'] }}.json" + most_popular_shared_stream: + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + extractor: + field_pointer: ["results"] + $options: + name: "most_popular_shared" + primary_key: "id" + path: "/mostpopular/v2/shared/{{ config['period'] }}{% if 'share_type' in config %}/{{ config['share_type'] }}{% endif %}.json" + most_popular_viewed_stream: + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + extractor: + field_pointer: ["results"] + $options: + name: "most_popular_viewed" + primary_key: "id" + path: "/mostpopular/v2/viewed/{{ config['period'] }}.json" + +streams: + - "*ref(definitions.archive_stream)" + - "*ref(definitions.most_popular_emailed_stream)" + - "*ref(definitions.most_popular_shared_stream)" + - "*ref(definitions.most_popular_viewed_stream)" + +check: + stream_names: + - "archive" + - "most_popular_emailed" + - "most_popular_shared" + - "most_popular_viewed" diff --git a/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/archive.json b/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/archive.json new file mode 100644 index 0000000000000..ae41270a09059 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/archive.json @@ -0,0 +1,193 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "web_url": { + "type": ["null", "string"], + "description": "Article URL." + }, + "snippet": { + "type": ["null", "string"] + }, + "print_page": { + "type": ["null", "string"], + "description": "Page in print (e.g. 1)." + }, + "print_section": { + "type": ["null", "string"], + "description": "Section in print (e.g. A)." + }, + "source": { + "type": ["null", "string"] + }, + "multimedia": { + "type": "array", + "items": { + "type": "object", + "properties": { + "rank": { + "type": ["null", "integer"] + }, + "subtype": { + "type": ["null", "string"] + }, + "caption": { + "type": ["null", "string"] + }, + "credit": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + }, + "height": { + "type": ["null", "integer"] + }, + "width": { + "type": ["null", "integer"] + }, + "legacy": { + "type": "object", + "properties": { + "xlarge": { + "type": ["null", "string"] + }, + "xlargewidth": { + "type": ["null", "integer"] + }, + "xlargeheight": { + "type": ["null", "integer"] + } + } + }, + "crop_name": { + "type": ["null", "string"] + } + } + } + }, + "headline": { + "type": "object", + "properties": { + "main": { + "type": ["null", "string"] + }, + "kicker": { + "type": ["null", "string"] + }, + "content_kicker": { + "type": ["null", "string"] + }, + "print_headline": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "seo": { + "type": ["null", "string"] + }, + "sub": { + "type": ["null", "string"] + } + } + }, + "keywords": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": ["null", "string"] + }, + "value": { + "type": ["null", "string"] + }, + "rank": { + "type": ["null", "integer"] + }, + "major": { + "type": ["null", "string"] + } + } + } + }, + "pub_date": { + "type": ["null", "string"], + "description": "Publication date." + }, + "document_type": { + "type": ["null", "string"], + "description": "Document type (article, multimedia)." + }, + "news_desk": { + "type": ["null", "string"], + "description": "Desk in the newsroom that worked on the story (Foreign, Metro, Sports, ...)." + }, + "section_name": { + "type": ["null", "string"], + "description": "Section that the article appeared in (New York, Sports, World, ...)." + }, + "byline": { + "type": "object", + "properties": { + "original": { + "type": ["null", "string"] + }, + "person": { + "type": "array", + "items": { + "type": "object", + "properties": { + "firstname": { + "type": ["null", "string"] + }, + "middlename": { + "type": ["null", "string"] + }, + "lastname": { + "type": ["null", "string"] + }, + "qualifier": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "role": { + "type": ["null", "string"] + }, + "organization": { + "type": ["null", "string"] + }, + "rank": { + "type": ["null", "integer"] + } + } + } + }, + "organization": { + "type": ["null", "string"] + } + } + }, + "type_of_material": { + "type": ["null", "string"], + "description": "Type of asset (Correction, News, Op-Ed, Review, Video, ...)." + }, + "_id": { + "type": ["null", "string"] + }, + "word_count": { + "type": ["null", "integer"], + "description": "Number of words in the article." + }, + "uri": { + "type": ["null", "string"], + "description": "Uniquely identifies an asset." + } + } +} diff --git a/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_emailed.json b/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_emailed.json new file mode 100644 index 0000000000000..1b09c2228ce6c --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_emailed.json @@ -0,0 +1,156 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "url": { + "type": ["null", "string"], + "description": "Article's URL." + }, + "adx_keywords": { + "type": ["null", "string"], + "description": "Semicolon separated list of keywords." + }, + "subsection": { + "type": ["null", "string"], + "description": "Article's subsection (e.g. Politics). Can be empty string." + }, + "column": { + "type": ["null", "string"], + "description": "Deprecated. Set to null." + }, + "eta_id": { + "type": ["null", "integer"], + "description": "Deprecated. Set to 0." + }, + "section": { + "type": ["null", "string"], + "description": "Article's section (e.g. Sports)." + }, + "id": { + "type": ["null", "integer"], + "description": "Asset ID number (e.g. 100000007772696)." + }, + "asset_id": { + "type": ["null", "integer"], + "description": "Asset ID number (e.g. 100000007772696)." + }, + "nytdsection": { + "type": ["null", "string"], + "description": "Article's section (e.g. sports)." + }, + "byline": { + "type": ["null", "string"], + "description": "Article's byline (e.g. By Thomas L. Friedman)." + }, + "type": { + "type": ["null", "string"], + "description": "Asset type (e.g. Article, Interactive, ...)." + }, + "title": { + "type": ["null", "string"], + "description": "Article's headline (e.g. When the Cellos Play, the Cows Come Home)." + }, + "abstract": { + "type": ["null", "string"], + "description": "Brief summary of the article." + }, + "published_date": { + "type": ["null", "string"], + "description": "When the article was published on the web (e.g. 2021-04-19)." + }, + "source": { + "type": ["null", "string"], + "description": "Publisher (e.g. New York Times)." + }, + "updated": { + "type": ["null", "string"], + "description": "When the article was last updated (e.g. 2021-05-12 06:32:03)." + }, + "des_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of description facets (e.g. Quarantine (Life and Culture))." + }, + "org_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of organization facets (e.g. Sullivan Street Bakery)." + }, + "per_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of person facets (e.g. Bittman, Mark)." + }, + "geo_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of geographic facets (e.g. Canada)." + }, + "media": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": ["null", "string"], + "description": "Asset type (e.g. image)." + }, + "subtype": { + "type": ["null", "string"], + "description": "Asset subtype (e.g. photo)." + }, + "caption": { + "type": ["null", "string"], + "description": "Media caption." + }, + "copyright": { + "type": ["null", "string"], + "description": "Media credit." + }, + "approved_for_syndication": { + "type": ["null", "integer"], + "description": "Whether media is approved for syndication." + }, + "media-metadata": { + "type": "array", + "items": { + "type": "object", + "properties": { + "url": { + "type": ["null", "string"], + "description": "Image's URL." + }, + "format": { + "type": ["null", "string"], + "description": "Image's crop name." + }, + "height": { + "type": ["null", "integer"], + "description": "Image's height (e.g. 293)." + }, + "width": { + "type": ["null", "integer"], + "description": "Image's width (e.g. 440)." + } + } + }, + "description": "Media metadata (url, width, height, ...)." + } + } + }, + "description": "Array of images." + }, + "uri": { + "type": ["null", "string"], + "description": "An article's globally unique identifier." + } + } +} diff --git a/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_shared.json b/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_shared.json new file mode 100644 index 0000000000000..1b09c2228ce6c --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_shared.json @@ -0,0 +1,156 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "url": { + "type": ["null", "string"], + "description": "Article's URL." + }, + "adx_keywords": { + "type": ["null", "string"], + "description": "Semicolon separated list of keywords." + }, + "subsection": { + "type": ["null", "string"], + "description": "Article's subsection (e.g. Politics). Can be empty string." + }, + "column": { + "type": ["null", "string"], + "description": "Deprecated. Set to null." + }, + "eta_id": { + "type": ["null", "integer"], + "description": "Deprecated. Set to 0." + }, + "section": { + "type": ["null", "string"], + "description": "Article's section (e.g. Sports)." + }, + "id": { + "type": ["null", "integer"], + "description": "Asset ID number (e.g. 100000007772696)." + }, + "asset_id": { + "type": ["null", "integer"], + "description": "Asset ID number (e.g. 100000007772696)." + }, + "nytdsection": { + "type": ["null", "string"], + "description": "Article's section (e.g. sports)." + }, + "byline": { + "type": ["null", "string"], + "description": "Article's byline (e.g. By Thomas L. Friedman)." + }, + "type": { + "type": ["null", "string"], + "description": "Asset type (e.g. Article, Interactive, ...)." + }, + "title": { + "type": ["null", "string"], + "description": "Article's headline (e.g. When the Cellos Play, the Cows Come Home)." + }, + "abstract": { + "type": ["null", "string"], + "description": "Brief summary of the article." + }, + "published_date": { + "type": ["null", "string"], + "description": "When the article was published on the web (e.g. 2021-04-19)." + }, + "source": { + "type": ["null", "string"], + "description": "Publisher (e.g. New York Times)." + }, + "updated": { + "type": ["null", "string"], + "description": "When the article was last updated (e.g. 2021-05-12 06:32:03)." + }, + "des_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of description facets (e.g. Quarantine (Life and Culture))." + }, + "org_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of organization facets (e.g. Sullivan Street Bakery)." + }, + "per_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of person facets (e.g. Bittman, Mark)." + }, + "geo_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of geographic facets (e.g. Canada)." + }, + "media": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": ["null", "string"], + "description": "Asset type (e.g. image)." + }, + "subtype": { + "type": ["null", "string"], + "description": "Asset subtype (e.g. photo)." + }, + "caption": { + "type": ["null", "string"], + "description": "Media caption." + }, + "copyright": { + "type": ["null", "string"], + "description": "Media credit." + }, + "approved_for_syndication": { + "type": ["null", "integer"], + "description": "Whether media is approved for syndication." + }, + "media-metadata": { + "type": "array", + "items": { + "type": "object", + "properties": { + "url": { + "type": ["null", "string"], + "description": "Image's URL." + }, + "format": { + "type": ["null", "string"], + "description": "Image's crop name." + }, + "height": { + "type": ["null", "integer"], + "description": "Image's height (e.g. 293)." + }, + "width": { + "type": ["null", "integer"], + "description": "Image's width (e.g. 440)." + } + } + }, + "description": "Media metadata (url, width, height, ...)." + } + } + }, + "description": "Array of images." + }, + "uri": { + "type": ["null", "string"], + "description": "An article's globally unique identifier." + } + } +} diff --git a/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_viewed.json b/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_viewed.json new file mode 100644 index 0000000000000..01283446f09df --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/source_nytimes/schemas/most_popular_viewed.json @@ -0,0 +1,140 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "url": { + "type": ["null", "string"], + "description": "Article's URL." + }, + "adx_keywords": { + "type": ["null", "string"], + "description": "Semicolon separated list of keywords." + }, + "column": { + "type": ["null", "string"], + "description": "Deprecated. Set to null." + }, + "section": { + "type": ["null", "string"], + "description": "Article's section (e.g. Sports)." + }, + "byline": { + "type": ["null", "string"], + "description": "Article's byline (e.g. By Thomas L. Friedman)." + }, + "type": { + "type": ["null", "string"], + "description": "Asset type (e.g. Article, Interactive, ...)." + }, + "title": { + "type": ["null", "string"], + "description": "Article's headline (e.g. When the Cellos Play, the Cows Come Home)." + }, + "abstract": { + "type": ["null", "string"], + "description": "Brief summary of the article." + }, + "published_date": { + "type": ["null", "string"], + "description": "When the article was published on the web (e.g. 2021-04-19)." + }, + "source": { + "type": ["null", "string"], + "description": "Publisher (e.g. New York Times)." + }, + "id": { + "type": ["null", "integer"], + "description": "Asset ID number (e.g. 100000007772696)." + }, + "asset_id": { + "type": ["null", "integer"], + "description": "Asset ID number (e.g. 100000007772696)." + }, + "des_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of description facets (e.g. Quarantine (Life and Culture))." + }, + "org_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of organization facets (e.g. Sullivan Street Bakery)." + }, + "per_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of person facets (e.g. Bittman, Mark)." + }, + "geo_facet": { + "type": "array", + "items": { + "type": ["null", "string"] + }, + "description": "Array of geographic facets (e.g. Canada)." + }, + "media": { + "type": "array", + "items": { + "type": "object", + "properties": { + "type": { + "type": ["null", "string"], + "description": "Asset type (e.g. image)." + }, + "subtype": { + "type": ["null", "string"], + "description": "Asset subtype (e.g. photo)." + }, + "caption": { + "type": ["null", "string"], + "description": "Media caption." + }, + "copyright": { + "type": ["null", "string"], + "description": "Media credit." + }, + "approved_for_syndication": { + "type": ["null", "integer"], + "description": "Whether media is approved for syndication." + }, + "media-metadata": { + "type": "array", + "items": { + "type": "object", + "properties": { + "url": { + "type": ["null", "string"], + "description": "Image's URL." + }, + "format": { + "type": ["null", "string"], + "description": "Image's crop name." + }, + "height": { + "type": ["null", "integer"], + "description": "Image's height (e.g. 293)." + }, + "width": { + "type": ["null", "integer"], + "description": "Image's width (e.g. 440)." + } + } + }, + "description": "Media metadata (url, width, height, ...)." + } + } + }, + "description": "Array of images." + }, + "uri": { + "type": ["null", "string"], + "description": "An article's globally unique identifier." + } + } +} diff --git a/airbyte-integrations/connectors/source-nytimes/source_nytimes/source.py b/airbyte-integrations/connectors/source-nytimes/source_nytimes/source.py new file mode 100644 index 0000000000000..e67605caf16d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/source_nytimes/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceNytimes(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "nytimes.yaml"}) diff --git a/airbyte-integrations/connectors/source-nytimes/source_nytimes/spec.yaml b/airbyte-integrations/connectors/source-nytimes/source_nytimes/spec.yaml new file mode 100644 index 0000000000000..b8df768cdb52f --- /dev/null +++ b/airbyte-integrations/connectors/source-nytimes/source_nytimes/spec.yaml @@ -0,0 +1,51 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/nytimes +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Nytimes Spec + type: object + required: + - api_key + - start_date + - period + additionalProperties: true + properties: + api_key: + type: string + title: API Key + description: API Key + airbyte_secret: true + order: 0 + start_date: + type: string + title: Start Date + description: Start date to begin the article retrieval (format YYYY-MM) + pattern: ^[0-9]{4}-[0-9]{2}$ + examples: + - 2022-08 + - 1851-01 + order: 1 + end_date: + type: string + title: End Date + description: End date to stop the article retrieval (format YYYY-MM) + pattern: ^[0-9]{4}-[0-9]{2}$ + examples: + - 2022-08 + - 1851-01 + order: 2 + period: + type: integer + title: Period (used for Most Popular streams) + description: Period of time (in days) + order: 3 + enum: + - 1 + - 7 + - 30 + share_type: + type: string + title: Share Type (used for Most Popular Shared stream) + description: Share Type + order: 4 + enum: + - "facebook" diff --git a/airbyte-integrations/connectors/source-oracle/acceptance-test-config.yml b/airbyte-integrations/connectors/source-oracle/acceptance-test-config.yml index 37c40cb71f16e..fc86a52ddd77e 100644 --- a/airbyte-integrations/connectors/source-oracle/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-oracle/acceptance-test-config.yml @@ -3,4 +3,6 @@ connector_image: airbyte/source-oracle:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" + timeout_seconds: 300 diff --git a/airbyte-integrations/connectors/source-oracle/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-oracle/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-oracle/build.gradle b/airbyte-integrations/connectors/source-oracle/build.gradle index 435b7625e749b..32cc9db422dfa 100644 --- a/airbyte-integrations/connectors/source-oracle/build.gradle +++ b/airbyte-integrations/connectors/source-oracle/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-oracle/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-oracle/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json b/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json index 4f1c7b04e01a0..56a6cc0b28242 100644 --- a/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-oracle/src/main/resources/spec.json @@ -35,7 +35,6 @@ "connection_type": { "type": "string", "const": "service_name", - "default": "service_name", "order": 0 }, "service_name": { @@ -53,7 +52,6 @@ "connection_type": { "type": "string", "const": "sid", - "default": "sid", "order": 0 }, "sid": { @@ -108,9 +106,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "unencrypted", - "enum": ["unencrypted"], - "default": "unencrypted" + "const": "unencrypted" } } }, @@ -121,9 +117,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "client_nne", - "enum": ["client_nne"], - "default": "client_nne" + "const": "client_nne" }, "encryption_algorithm": { "type": "string", @@ -141,9 +135,7 @@ "properties": { "encryption_method": { "type": "string", - "const": "encrypted_verify_certificate", - "enum": ["encrypted_verify_certificate"], - "default": "encrypted_verify_certificate" + "const": "encrypted_verify_certificate" }, "ssl_certificate": { "title": "SSL PEM File", diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..4bd783a80420f --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/dummy_config.json @@ -0,0 +1,5 @@ +{ + "host": "default", + "port": 5555, + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..94d3ab92ca4a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-oracle/src/test-integration/resources/expected_spec.json @@ -0,0 +1,271 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/oracle", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Oracle Source Spec", + "type": "object", + "required": ["host", "port", "username"], + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the database.", + "type": "string", + "order": 1 + }, + "port": { + "title": "Port", + "description": "Port of the database.\nOracle Corporations recommends the following port numbers:\n1521 - Default listening port for client connections to the listener. \n2484 - Recommended and officially registered listening port for client connections to the listener using TCP/IP with SSL", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 1521, + "order": 2 + }, + "connection_data": { + "title": "Connect by", + "type": "object", + "description": "Connect data that will be used for DB connection", + "order": 3, + "oneOf": [ + { + "title": "Service name", + "description": "Use service name", + "required": ["service_name"], + "properties": { + "connection_type": { + "type": "string", + "const": "service_name", + "order": 0 + }, + "service_name": { + "title": "Service name", + "type": "string", + "order": 1 + } + } + }, + { + "title": "System ID (SID)", + "description": "Use SID (Oracle System Identifier)", + "required": ["sid"], + "properties": { + "connection_type": { + "type": "string", + "const": "sid", + "order": 0 + }, + "sid": { + "title": "System ID (SID)", + "type": "string", + "order": 1 + } + } + } + ] + }, + "username": { + "title": "User", + "description": "The username which is used to access the database.", + "type": "string", + "order": 4 + }, + "password": { + "title": "Password", + "description": "The password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 5 + }, + "schemas": { + "title": "Schemas", + "description": "The list of schemas to sync from. Defaults to user. Case sensitive.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true, + "order": 6 + }, + "jdbc_url_params": { + "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "type": "string", + "order": 7 + }, + "encryption": { + "title": "Encryption", + "type": "object", + "description": "The encryption method with is used when communicating with the database.", + "order": 8, + "oneOf": [ + { + "title": "Unencrypted", + "description": "Data transfer will not be encrypted.", + "required": ["encryption_method"], + "properties": { + "encryption_method": { + "type": "string", + "const": "unencrypted" + } + } + }, + { + "title": "Native Network Encryption (NNE)", + "description": "The native network encryption gives you the ability to encrypt database connections, without the configuration overhead of TCP/IP and SSL/TLS and without the need to open and listen on different ports.", + "required": ["encryption_method"], + "properties": { + "encryption_method": { + "type": "string", + "const": "client_nne" + }, + "encryption_algorithm": { + "type": "string", + "description": "This parameter defines what encryption algorithm is used.", + "title": "Encryption Algorithm", + "default": "AES256", + "enum": ["AES256", "RC4_56", "3DES168"] + } + } + }, + { + "title": "TLS Encrypted (verify certificate)", + "description": "Verify and use the certificate provided by the server.", + "required": ["encryption_method", "ssl_certificate"], + "properties": { + "encryption_method": { + "type": "string", + "const": "encrypted_verify_certificate" + }, + "ssl_certificate": { + "title": "SSL PEM File", + "description": "Privacy Enhanced Mail (PEM) files are concatenated certificate containers frequently used in certificate installations.", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + } + ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-partnerstack/.dockerignore b/airbyte-integrations/connectors/source-partnerstack/.dockerignore new file mode 100644 index 0000000000000..92734c506d7de --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_partnerstack +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-partnerstack/Dockerfile b/airbyte-integrations/connectors/source-partnerstack/Dockerfile new file mode 100644 index 0000000000000..3c43983b269d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_partnerstack ./source_partnerstack + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-partnerstack diff --git a/airbyte-integrations/connectors/source-partnerstack/README.md b/airbyte-integrations/connectors/source-partnerstack/README.md new file mode 100644 index 0000000000000..c2d61c9435fd4 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/README.md @@ -0,0 +1,79 @@ +# Partnerstack Source + +This is the repository for the Partnerstack configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/partnerstack). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-partnerstack:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/partnerstack) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_partnerstack/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source partnerstack test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-partnerstack:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-partnerstack:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-partnerstack:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-partnerstack:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-partnerstack:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-partnerstack:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-partnerstack:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-partnerstack:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-partnerstack/__init__.py b/airbyte-integrations/connectors/source-partnerstack/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-partnerstack/acceptance-test-config.yml b/airbyte-integrations/connectors/source-partnerstack/acceptance-test-config.yml new file mode 100644 index 0000000000000..adc1da7691b45 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/acceptance-test-config.yml @@ -0,0 +1,30 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-partnerstack:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_partnerstack/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/simple_catalog.json" + empty_streams: [] + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/simple_incremental_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/simple_catalog.json" diff --git a/airbyte-integrations/connectors/source-partnerstack/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-partnerstack/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-partnerstack/build.gradle b/airbyte-integrations/connectors/source-partnerstack/build.gradle new file mode 100644 index 0000000000000..61d7a10468a4f --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_partnerstack' +} diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/__init__.py b/airbyte-integrations/connectors/source-partnerstack/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-partnerstack/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..072dfd6892316 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "customers": { + "updated_at": "11654622676000" + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-partnerstack/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-partnerstack/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..f86aa59251d9d --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/configured_catalog.json @@ -0,0 +1,67 @@ +{ + "streams": [ + { + "stream": { + "name": "customers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "deals", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "groups", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "leads", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "partnerships", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "rewards", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "transactions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-partnerstack/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..ec176d99961a9 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "public_key": "", + "private_key": "", + "start_date": "2022-10-25" +} diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-partnerstack/integration_tests/sample_config.json new file mode 100644 index 0000000000000..9b24d9bcd386e --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "public_key": "", + "private_key": "" +} diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-partnerstack/integration_tests/sample_state.json new file mode 100644 index 0000000000000..8535902fe849c --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "customers": { + "updated_at": "1654622676000" + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/simple_catalog.json b/airbyte-integrations/connectors/source-partnerstack/integration_tests/simple_catalog.json new file mode 100644 index 0000000000000..21d1624149f5d --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/simple_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "customers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-partnerstack/integration_tests/simple_incremental_catalog.json b/airbyte-integrations/connectors/source-partnerstack/integration_tests/simple_incremental_catalog.json new file mode 100644 index 0000000000000..e662b9017a9a5 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/integration_tests/simple_incremental_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "customers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-partnerstack/main.py b/airbyte-integrations/connectors/source-partnerstack/main.py new file mode 100644 index 0000000000000..aa7176881f622 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_partnerstack import SourcePartnerstack + +if __name__ == "__main__": + source = SourcePartnerstack() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-partnerstack/requirements.txt b/airbyte-integrations/connectors/source-partnerstack/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-partnerstack/setup.py b/airbyte-integrations/connectors/source-partnerstack/setup.py new file mode 100644 index 0000000000000..64bade243e1ae --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_partnerstack", + description="Source implementation for Partnerstack.", + author="Elliot Trabac", + author_email="elliot.trabac1@gmail.com", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/__init__.py b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/__init__.py new file mode 100644 index 0000000000000..30492d64a767a --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourcePartnerstack + +__all__ = ["SourcePartnerstack"] diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/components.py b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/components.py new file mode 100644 index 0000000000000..7b97596998b43 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/components.py @@ -0,0 +1,60 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from dataclasses import InitVar, dataclass +from typing import Any, Iterable, Mapping, MutableMapping, Optional, Union + +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources.declarative.stream_slicers import StreamSlicer +from airbyte_cdk.sources.declarative.types import Record, StreamSlice, StreamState + + +@dataclass +class PartnerstackSlicer(StreamSlicer): + options: InitVar[Mapping[str, Any]] + cursor_field: str + request_cursor_field: str + + def __post_init__(self, options: Mapping[str, Any]): + self._state = {} + + def stream_slices(self, sync_mode: SyncMode, stream_state: StreamState, *args, **kwargs) -> Iterable[StreamSlice]: + yield {self.request_cursor_field: stream_state.get(self.cursor_field, 0)} + + def _max_dt_str(self, *args: str) -> Optional[str]: + new_state_candidates = list(map(lambda x: int(x), filter(None, args))) + if not new_state_candidates: + return + max_dt = max(new_state_candidates) + return max_dt + + def update_cursor(self, stream_slice: StreamSlice, last_record: Optional[Record] = None): + slice_state = stream_slice.get(self.cursor_field) + current_state = self._state.get(self.cursor_field) + last_cursor = last_record and last_record[self.cursor_field] + max_dt = self._max_dt_str(slice_state, current_state, last_cursor) + if not max_dt: + return + self._state[self.cursor_field] = max_dt + + def get_stream_state(self) -> StreamState: + return self._state + + def get_request_params( + self, + *, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> MutableMapping[str, Any]: + return stream_slice or {} + + def get_request_headers(self, *args, **kwargs) -> Mapping[str, Any]: + return {} + + def get_request_body_data(self, *args, **kwargs) -> Optional[Union[Mapping, str]]: + return {} + + def get_request_body_json(self, *args, **kwargs) -> Optional[Mapping]: + return {} diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/partnerstack.yaml b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/partnerstack.yaml new file mode 100644 index 0000000000000..230485f29e467 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/partnerstack.yaml @@ -0,0 +1,105 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["data", "items"] + requester: + url_base: "https://api.partnerstack.com/api/v2/" + http_method: "GET" + authenticator: + type: BasicHttpAuthenticator + username: "{{ config['public_key'] }}" + password: "{{ config['private_key'] }}" + request_options_provider: + request_parameters: + min_created: "{{ timestamp(config['start_date']) * 1000 }}" + stream_slicer: + request_cursor_field: "min_updated" + cursor_field: "updated_at" + class_name: source_partnerstack.components.PartnerstackSlicer + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ last_records[-1]['key'] }}" + stop_condition: "{{ response.data.has_more is false }}" + page_size: 250 + page_size_option: + field_name: "limit" + inject_into: "request_parameter" + page_token_option: + field_name: "starting_after" + inject_into: "request_parameter" + url_base: + $ref: "*ref(definitions.requester.url_base)" + requester: + $ref: "*ref(definitions.requester)" + stream_slicer: + $ref: "*ref(definitions.stream_slicer)" + + # base stream + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + + # stream definitions + customers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "customers" + primary_key: "key" + path: "/customers" + stream_cursor_field: "updated_at" + deals_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "deals" + primary_key: "key" + path: "/deals" + groups_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "groups" + primary_key: "key" + path: "/groups" + leads_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "leads" + primary_key: "key" + path: "/leads" + partnerships_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "partnerships" + primary_key: "key" + path: "/partnerships" + rewards_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "rewards" + primary_key: "key" + path: "/rewards" + transactions_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "transactions" + primary_key: "key" + path: "/transactions" + +streams: + - "*ref(definitions.customers_stream)" + - "*ref(definitions.deals_stream)" + - "*ref(definitions.groups_stream)" + - "*ref(definitions.leads_stream)" + - "*ref(definitions.partnerships_stream)" + - "*ref(definitions.rewards_stream)" + - "*ref(definitions.transactions_stream)" + +check: + stream_names: + - "groups" diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/customers.json b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/customers.json new file mode 100644 index 0000000000000..762ece7fa09a0 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/customers.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "created_at": { + "type": ["null", "integer"] + }, + "customer_key": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "field_data": { + "type": ["null", "object"] + }, + "fields": { + "type": ["null", "array"] + }, + "key": { + "type": ["null", "string"] + }, + "meta": { + "type": ["null", "object"] + }, + "name": { + "type": ["null", "string"] + }, + "partner_key": { + "type": ["null", "string"] + }, + "partnership_key": { + "type": ["null", "string"] + }, + "provider_key": { + "type": ["null", "string"] + }, + "source_key": { + "type": ["null", "string"] + }, + "source_type": { + "type": ["null", "string"] + }, + "test": { + "type": ["null", "boolean"] + }, + "updated_at": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/deals.json b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/deals.json new file mode 100644 index 0000000000000..1c5366a328c90 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/deals.json @@ -0,0 +1,66 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "created_at": { + "type": ["null", "integer"] + }, + "key": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "integer"] + }, + "account_name": { + "type": ["null", "object"] + }, + "amount": { + "type": ["null", "integer"] + }, + "close_date": { + "type": ["null", "string"] + }, + "contact_first_name": { + "type": ["null", "string"] + }, + "contact_last_name": { + "type": ["null", "string"] + }, + "external_key": { + "type": ["null", "string"] + }, + "field_data": { + "type": ["null", "object"] + }, + "fields": { + "type": ["null", "array"] + }, + "group_key": { + "type": ["null", "string"] + }, + "lost_reason": { + "type": ["null", "string"] + }, + "meta": { + "type": ["null", "object"] + }, + "mold_key": { + "type": ["null", "string"] + }, + "partner_key": { + "type": ["null", "string"] + }, + "source": { + "type": ["null", "string"] + }, + "stage": { + "type": ["null", "string"] + }, + "team": { + "type": ["null", "object"] + }, + "team_member": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/groups.json b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/groups.json new file mode 100644 index 0000000000000..3b92990e307f0 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/groups.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "archived": { + "type": ["null", "boolean"] + }, + "created_at": { + "type": ["null", "integer"] + }, + "default": { + "type": ["null", "boolean"] + }, + "features": { + "type": ["null", "object"] + }, + "key": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "slug": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/leads.json b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/leads.json new file mode 100644 index 0000000000000..d917c20a0670f --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/leads.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "amount": { + "type": ["null", "integer"] + }, + "approved": { + "type": ["null", "boolean"] + }, + "created_at": { + "type": ["null", "integer"] + }, + "external_key": { + "type": ["null", "string"] + }, + "field_data": { + "type": ["null", "object"] + }, + "fields": { + "type": ["null", "array"] + }, + "group_key": { + "type": ["null", "string"] + }, + "key": { + "type": ["null", "string"] + }, + "meta": { + "type": ["null", "object"] + }, + "mold_key": { + "type": ["null", "string"] + }, + "partner_key": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/partnerships.json b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/partnerships.json new file mode 100644 index 0000000000000..63488aa0d686a --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/partnerships.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "created_at": { + "type": ["null", "integer"] + }, + "email": { + "type": ["null", "string"] + }, + "first_name": { + "type": ["null", "string"] + }, + "group": { + "type": ["null", "object"] + }, + "joined_at": { + "type": ["null", "integer"] + }, + "key": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + }, + "manager": { + "type": ["null", "object"] + }, + "m_data": { + "type": ["null", "object"] + }, + "partner_key": { + "type": ["null", "string"] + }, + "stats": { + "type": ["null", "object"] + }, + "tags": { + "type": ["null", "array"] + }, + "team": { + "type": ["null", "object"] + }, + "updated_at": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/rewards.json b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/rewards.json new file mode 100644 index 0000000000000..5ceb041635960 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/rewards.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "action": { + "type": ["null", "object"] + }, + "amount": { + "type": ["null", "integer"] + }, + "created_at": { + "type": ["null", "integer"] + }, + "customer": { + "type": ["null", "object"] + }, + "description": { + "type": ["null", "string"] + }, + "invoice": { + "type": ["null", "object"] + }, + "key": { + "type": ["null", "string"] + }, + "offer": { + "type": ["null", "object"] + }, + "partnership": { + "type": ["null", "object"] + }, + "reward_status": { + "type": ["null", "string"] + }, + "transaction": { + "type": ["null", "object"] + }, + "trigger": { + "type": ["null", "object"] + }, + "updated_at": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/transactions.json b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/transactions.json new file mode 100644 index 0000000000000..8a903c76ce564 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/schemas/transactions.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "amount": { + "type": ["null", "integer"] + }, + "amount_usd": { + "type": ["null", "integer"] + }, + "approved": { + "type": ["null", "boolean"] + }, + "category_key": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "integer"] + }, + "currency": { + "type": ["null", "string"] + }, + "customer": { + "type": ["null", "object"] + }, + "customer_key": { + "type": ["null", "string"] + }, + "extension": { + "type": ["null", "object"] + }, + "key": { + "type": ["null", "string"] + }, + "product_key": { + "type": ["null", "object"] + }, + "updated_at": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/source.py b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/source.py new file mode 100644 index 0000000000000..6584be3472c77 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourcePartnerstack(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "partnerstack.yaml"}) diff --git a/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/spec.yaml b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/spec.yaml new file mode 100644 index 0000000000000..475a1b42ab692 --- /dev/null +++ b/airbyte-integrations/connectors/source-partnerstack/source_partnerstack/spec.yaml @@ -0,0 +1,29 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/partnerstack +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Partnerstack Spec + type: object + required: + - public_key + - private_key + additionalProperties: true + properties: + public_key: + type: string + title: Partnerstack Public key + description: The Live Public Key for a Partnerstack account. + airbyte_secret: true + private_key: + type: string + title: Partnerstack Private key + description: The Live Private Key for a Partnerstack account. + airbyte_secret: true + start_date: + type: string + title: Start date + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$ + description: >- + UTC date and time in the format 2017-01-25T00:00:00Z. Any data before + this date will not be replicated. + examples: + - "2017-01-25T00:00:00Z" diff --git a/airbyte-integrations/connectors/source-pinterest/Dockerfile b/airbyte-integrations/connectors/source-pinterest/Dockerfile index 5b53c149a4441..adc1f9a7b8a52 100644 --- a/airbyte-integrations/connectors/source-pinterest/Dockerfile +++ b/airbyte-integrations/connectors/source-pinterest/Dockerfile @@ -34,5 +34,5 @@ COPY source_pinterest ./source_pinterest ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.8 +LABEL io.airbyte.version=0.1.9 LABEL io.airbyte.name=airbyte/source-pinterest diff --git a/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/ad_account_analytics.json b/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/ad_account_analytics.json index 12ede7b5c54ce..aa2bb01f96bd1 100644 --- a/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/ad_account_analytics.json +++ b/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/ad_account_analytics.json @@ -7,7 +7,7 @@ "format": "date" }, "AD_ACCOUNT_ID": { - "type": ["null", "string"] + "type": ["string"] }, "AD_ID": { "type": ["null", "string"] diff --git a/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/ad_group_analytics.json b/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/ad_group_analytics.json index 17fa3968a8026..25cb790dfa3b5 100644 --- a/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/ad_group_analytics.json +++ b/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/ad_group_analytics.json @@ -7,7 +7,7 @@ "format": "date" }, "AD_ACCOUNT_ID": { - "type": ["integer"] + "type": ["string"] }, "AD_ID": { "type": ["null", "string"] diff --git a/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/campaign_analytics.json b/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/campaign_analytics.json index 25ee8aeea80ee..6bae1d1596506 100644 --- a/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/campaign_analytics.json +++ b/airbyte-integrations/connectors/source-pinterest/source_pinterest/schemas/campaign_analytics.json @@ -7,7 +7,7 @@ "format": "date" }, "AD_ACCOUNT_ID": { - "type": ["null", "integer"] + "type": ["string"] }, "AD_ID": { "type": ["null", "string"] diff --git a/airbyte-integrations/connectors/source-pinterest/source_pinterest/source.py b/airbyte-integrations/connectors/source-pinterest/source_pinterest/source.py index 4d97641c766c6..5ac895977f9df 100644 --- a/airbyte-integrations/connectors/source-pinterest/source_pinterest/source.py +++ b/airbyte-integrations/connectors/source-pinterest/source_pinterest/source.py @@ -257,8 +257,13 @@ def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: class Campaigns(ServerSideFilterStream): + def __init__(self, parent: HttpStream, with_data_slices: bool = True, status_filter: str = "", **kwargs): + super().__init__(parent, with_data_slices, **kwargs) + self.status_filter = status_filter + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"ad_accounts/{stream_slice['parent']['id']}/campaigns" + params = f"?entity_statuses={self.status_filter}" if self.status_filter else "" + return f"ad_accounts/{stream_slice['parent']['id']}/campaigns{params}" class CampaignAnalytics(PinterestAnalyticsStream): @@ -269,8 +274,13 @@ def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: class AdGroups(ServerSideFilterStream): + def __init__(self, parent: HttpStream, with_data_slices: bool = True, status_filter: str = "", **kwargs): + super().__init__(parent, with_data_slices, **kwargs) + self.status_filter = status_filter + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"ad_accounts/{stream_slice['parent']['id']}/ad_groups" + params = f"?entity_statuses={self.status_filter}" if self.status_filter else "" + return f"ad_accounts/{stream_slice['parent']['id']}/ad_groups{params}" class AdGroupAnalytics(PinterestAnalyticsStream): @@ -281,8 +291,13 @@ def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: class Ads(ServerSideFilterStream): + def __init__(self, parent: HttpStream, with_data_slices: bool = True, status_filter: str = "", **kwargs): + super().__init__(parent, with_data_slices, **kwargs) + self.status_filter = status_filter + def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str: - return f"ad_accounts/{stream_slice['parent']['id']}/ads" + params = f"?entity_statuses={self.status_filter}" if self.status_filter else "" + return f"ad_accounts/{stream_slice['parent']['id']}/ads{params}" class AdAnalytics(PinterestAnalyticsStream): @@ -338,18 +353,19 @@ def check_connection(self, logger, config) -> Tuple[bool, any]: def streams(self, config: Mapping[str, Any]) -> List[Stream]: config = self._validate_and_transform(config) config["authenticator"] = self.get_authenticator(config) + status = ",".join(config.get("status")) if config.get("status") else None return [ AdAccountAnalytics(AdAccounts(config), config=config), AdAccounts(config), AdAnalytics(Ads(AdAccounts(config), with_data_slices=False, config=config), config=config), AdGroupAnalytics(AdGroups(AdAccounts(config), with_data_slices=False, config=config), config=config), - AdGroups(AdAccounts(config), config=config), - Ads(AdAccounts(config), config=config), + AdGroups(AdAccounts(config), status_filter=status, config=config), + Ads(AdAccounts(config), status_filter=status, config=config), BoardPins(Boards(config), config=config), BoardSectionPins(BoardSections(Boards(config), config=config), config=config), BoardSections(Boards(config), config=config), Boards(config), CampaignAnalytics(Campaigns(AdAccounts(config), with_data_slices=False, config=config), config=config), - Campaigns(AdAccounts(config), config=config), + Campaigns(AdAccounts(config), status_filter=status, config=config), UserAccountAnalytics(None, config=config), ] diff --git a/airbyte-integrations/connectors/source-pinterest/source_pinterest/spec.json b/airbyte-integrations/connectors/source-pinterest/source_pinterest/spec.json index 2c8440ec43b91..1d7da627fe7a0 100644 --- a/airbyte-integrations/connectors/source-pinterest/source_pinterest/spec.json +++ b/airbyte-integrations/connectors/source-pinterest/source_pinterest/spec.json @@ -13,6 +13,16 @@ "description": "A date in the format YYYY-MM-DD. If you have not set a date, it would be defaulted to latest allowed date by api (914 days from today).", "examples": ["2022-07-28"] }, + "status": { + "title": "Status", + "description": "Entity statuses based off of campaigns, ad_groups, and ads. If you do not have a status set, it will be ignored completely.", + "type": ["array", "null"], + "items": { + "type": "string", + "enum": ["ACTIVE", "PAUSED", "ARCHIVED"] + }, + "uniqueItems": true + }, "credentials": { "title": "Authorization Method", "type": "object", diff --git a/airbyte-integrations/connectors/source-plausible/.dockerignore b/airbyte-integrations/connectors/source-plausible/.dockerignore new file mode 100644 index 0000000000000..1c872e09e6981 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_plausible +!setup.py +!secrets \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/BOOTSTRAP.md b/airbyte-integrations/connectors/source-plausible/BOOTSTRAP.md new file mode 100644 index 0000000000000..bf25537a11948 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/BOOTSTRAP.md @@ -0,0 +1,8 @@ +# Plausible + +Plausible is a privacy-first, subscription-only website analytics service. Link to their stats API is [here](https://plausible.io/docs/stats-api). + +## How to get an API key +- [Sign up for Plausible](https://plausible.io/register). There is a 30-day free trial but beyond that it is a paid subscription. +- [Add a website](https://plausible.io/docs/plausible-script). +- Generate an API key from the [Settings page](https://plausible.io/settings). diff --git a/airbyte-integrations/connectors/source-plausible/Dockerfile b/airbyte-integrations/connectors/source-plausible/Dockerfile new file mode 100644 index 0000000000000..2ea7e48ce8006 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_plausible ./source_plausible + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-plausible \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/README.md b/airbyte-integrations/connectors/source-plausible/README.md new file mode 100644 index 0000000000000..9e057fdd58637 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/README.md @@ -0,0 +1,79 @@ +# Plausible Source + +This is the repository for the Plausible configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/plausible). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-plausible:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/plausible) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_plausible/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source plausible test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-plausible:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-plausible:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-plausible:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-plausible:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-plausible:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-plausible:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-plausible:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-plausible:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/__init__.py b/airbyte-integrations/connectors/source-plausible/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-plausible/acceptance-test-config.yml b/airbyte-integrations/connectors/source-plausible/acceptance-test-config.yml new file mode 100644 index 0000000000000..07581487d5c4e --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/acceptance-test-config.yml @@ -0,0 +1,27 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-plausible:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_plausible/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + bypass_reason: "This connector does not implement incremental sync" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-plausible/acceptance-test-docker.sh new file mode 100755 index 0000000000000..fa680528f222c --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/build.gradle b/airbyte-integrations/connectors/source-plausible/build.gradle new file mode 100644 index 0000000000000..a0e0655b430f9 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_plausible' +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/integration_tests/__init__.py b/airbyte-integrations/connectors/source-plausible/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-plausible/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-plausible/integration_tests/acceptance.py new file mode 100644 index 0000000000000..e2a8f1a4cb74d --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + pass diff --git a/airbyte-integrations/connectors/source-plausible/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-plausible/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..168e61d240033 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "stats", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-plausible/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..b54b652088176 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "api_key": "", + "site_id": "https://airbyte.com", + "start_date": "20220101" +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-plausible/integration_tests/sample_config.json new file mode 100644 index 0000000000000..2e59a8bcd3cf1 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "api_key": "your_api_key", + "site_id": "airbyte.com", + "start_date": "2022-01-01" +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/main.py b/airbyte-integrations/connectors/source-plausible/main.py new file mode 100644 index 0000000000000..a41667d297b4a --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_plausible import SourcePlausible + +if __name__ == "__main__": + source = SourcePlausible() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-plausible/requirements.txt b/airbyte-integrations/connectors/source-plausible/requirements.txt new file mode 100644 index 0000000000000..78140e52009f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/setup.py b/airbyte-integrations/connectors/source-plausible/setup.py new file mode 100644 index 0000000000000..f10c9c2264ceb --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_plausible", + description="Source implementation for Plausible.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-plausible/source_plausible/__init__.py b/airbyte-integrations/connectors/source-plausible/source_plausible/__init__.py new file mode 100644 index 0000000000000..64d2a5b4bd243 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/source_plausible/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourcePlausible + +__all__ = ["SourcePlausible"] diff --git a/airbyte-integrations/connectors/source-plausible/source_plausible/plausible.yaml b/airbyte-integrations/connectors/source-plausible/source_plausible/plausible.yaml new file mode 100644 index 0000000000000..9562bf3f656f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/source_plausible/plausible.yaml @@ -0,0 +1,48 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [ "results" ] + record_filter: + condition: "{{ record['bounce_rate'] is not none }}" + requester: + url_base: "https://plausible.io/api/v1/stats" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + request_options_provider: + request_parameters: + site_id: "{{ config['site_id'] }}" + period: "custom" + # Retrieves all available metrics. + # See https://plausible.io/docs/stats-api#metrics + metrics: "visitors,pageviews,bounce_rate,visit_duration,visits" + # Plausible does not provide a way for "all time" data retrieval. + # Default start date: Plausible was launched in January 2019. + # See (https://plausible.io/about) + date: "{{ config['start_date'] or '2019-01-01' }},{{ today_utc() }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + stats_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "stats" + primary_key: "date" + path: "/timeseries" + +streams: + - "*ref(definitions.stats_stream)" + +check: + stream_names: + - "stats" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/source_plausible/schemas/stats.json b/airbyte-integrations/connectors/source-plausible/source_plausible/schemas/stats.json new file mode 100644 index 0000000000000..dedba0ebe26bc --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/source_plausible/schemas/stats.json @@ -0,0 +1,37 @@ +{ + "type": "object", + "title": "Stats", + "description": "Timeseries data over a given time period.", + "properties": { + "bounce_rate": { + "type": ["integer", "null"], + "title": "Bounce Rate", + "description": "Bounce rate percentage." + }, + "date": { + "type": "string", + "title": "Date", + "description": "The date, in your site's time zone, in ISO 8601 format." + }, + "pageviews": { + "type": "integer", + "title": "Pageviews", + "description": "The number of pageview events." + }, + "visit_duration": { + "type": ["integer", "null"], + "title": "Visit Duration", + "description": "Visit duration in seconds." + }, + "visitors": { + "type": "integer", + "title": "Visitors", + "description": "The number of unique visitors." + }, + "visits": { + "type": "integer", + "title": "Visits", + "description": "The number of visits/sessions." + } + } +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-plausible/source_plausible/source.py b/airbyte-integrations/connectors/source-plausible/source_plausible/source.py new file mode 100644 index 0000000000000..b3109208e8e34 --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/source_plausible/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourcePlausible(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "plausible.yaml"}) diff --git a/airbyte-integrations/connectors/source-plausible/source_plausible/spec.yaml b/airbyte-integrations/connectors/source-plausible/source_plausible/spec.yaml new file mode 100644 index 0000000000000..f2044a651727a --- /dev/null +++ b/airbyte-integrations/connectors/source-plausible/source_plausible/spec.yaml @@ -0,0 +1,39 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/plausible +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Plausible Spec + type: object + required: + - api_key + - site_id + additionalProperties: true + properties: + api_key: + type: string + title: Plausible API key + description: >- + Plausible API Key. See the docs + for information on how to generate this key. + airbyte_secret: true + site_id: + type: string + title: Target website domain + description: >- + The domain of the site you want to retrieve data for. + Enter the name of your site as configured on Plausible, + i.e., excluding "https://" and "www". Can be retrieved from + the 'domain' field in your Plausible site settings. + # Alphanumeric, '-', or '.' characters separated by a period + pattern: ^[A-Za-z0-9-.]+\.[A-Z-a-z0-9-.]+ + examples: + - airbyte.com + - docs.airbyte.com + start_date: + type: string + title: Data start date + description: >- + Start date for data to retrieve, in ISO-8601 format. + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + examples: + - YYYY-MM-DD \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-pocket/.dockerignore b/airbyte-integrations/connectors/source-pocket/.dockerignore new file mode 100644 index 0000000000000..adfbada17e5bc --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_pocket +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-pocket/Dockerfile b/airbyte-integrations/connectors/source-pocket/Dockerfile new file mode 100644 index 0000000000000..2633f4e461e92 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_pocket ./source_pocket + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-pocket diff --git a/airbyte-integrations/connectors/source-pocket/README.md b/airbyte-integrations/connectors/source-pocket/README.md new file mode 100644 index 0000000000000..c7d6e71624c34 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/README.md @@ -0,0 +1,79 @@ +# Pocket Source + +This is the repository for the Pocket configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/pocket). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-pocket:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/pocket) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_pocket/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source pocket test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-pocket:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-pocket:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-pocket:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-pocket:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-pocket:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-pocket:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-pocket:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-pocket:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-pocket/__init__.py b/airbyte-integrations/connectors/source-pocket/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-pocket/acceptance-test-config.yml b/airbyte-integrations/connectors/source-pocket/acceptance-test-config.yml new file mode 100644 index 0000000000000..393ad8f1553b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/acceptance-test-config.yml @@ -0,0 +1,27 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-pocket:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_pocket/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + bypass_reason: "This connector does not implement incremental sync" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-pocket/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-pocket/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c416fd1440557 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-pocket/bootstrap.md b/airbyte-integrations/connectors/source-pocket/bootstrap.md new file mode 100644 index 0000000000000..a817be1dc7a28 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/bootstrap.md @@ -0,0 +1,19 @@ +# Pocket API + +Pocket's /v3/get endpoint is a single call that is incredibly versatile. A few examples of the types of requests you can make: + +- Retrieve a user’s list of unread items +- Sync data that has changed since the last time your app checked +- Retrieve paged results sorted by the most recent saves +- Retrieve just videos that the user has saved +- Search for a given keyword in item’s title and url +- Retrieve all items for a given domain +- and more + +## Required Permissions + +In order to use the /v3/get endpoint, your consumer key must have the "Retrieve" permission. + +## Secret generation + +In order to generate both needed secrets to authenticate (consumer key and access token), you can follow the steps described in [https://docs.airbyte.com/integrations/sources/pocket](https://docs.airbyte.com/integrations/sources/pocket) \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-pocket/build.gradle b/airbyte-integrations/connectors/source-pocket/build.gradle new file mode 100644 index 0000000000000..c47ba950c3baa --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_pocket' +} diff --git a/airbyte-integrations/connectors/source-pocket/integration_tests/__init__.py b/airbyte-integrations/connectors/source-pocket/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-pocket/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-pocket/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-pocket/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-pocket/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..f9236112e703e --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "retrieve", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-pocket/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-pocket/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..c458243c3d965 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "consumer_key": "9999999999999999999999999999999999999999", + "access_token": "9999999999999999999999999999999999999999", + "detail_type": "invalid" +} diff --git a/airbyte-integrations/connectors/source-pocket/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-pocket/integration_tests/sample_config.json new file mode 100644 index 0000000000000..e36abfc93a845 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "consumer_key": "consumer_key", + "access_token": "access_token", + "detail_type": "complete" +} diff --git a/airbyte-integrations/connectors/source-pocket/main.py b/airbyte-integrations/connectors/source-pocket/main.py new file mode 100644 index 0000000000000..25bb68be28dec --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_pocket import SourcePocket + +if __name__ == "__main__": + source = SourcePocket() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-pocket/requirements.txt b/airbyte-integrations/connectors/source-pocket/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-pocket/setup.py b/airbyte-integrations/connectors/source-pocket/setup.py new file mode 100644 index 0000000000000..805434106f157 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_pocket", + description="Source implementation for Pocket.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-pocket/source_pocket/__init__.py b/airbyte-integrations/connectors/source-pocket/source_pocket/__init__.py new file mode 100644 index 0000000000000..5f1542ab8a482 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/source_pocket/__init__.py @@ -0,0 +1,9 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .extractor import PocketExtractor +from .source import SourcePocket + +__all__ = ["PocketExtractor", "SourcePocket"] diff --git a/airbyte-integrations/connectors/source-pocket/source_pocket/extractor.py b/airbyte-integrations/connectors/source-pocket/source_pocket/extractor.py new file mode 100644 index 0000000000000..24050dea14d8a --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/source_pocket/extractor.py @@ -0,0 +1,39 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from dataclasses import InitVar, dataclass +from typing import Any, List, Mapping + +import requests +from airbyte_cdk.sources.declarative.decoders.decoder import Decoder +from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder +from airbyte_cdk.sources.declarative.extractors.record_extractor import RecordExtractor +from airbyte_cdk.sources.declarative.types import Record + + +@dataclass +class PocketExtractor(RecordExtractor): + """ + Record extractor that extracts record of the form: + + { "list": { "ID_1": record_1, "ID_2": record_2, ... } } + + Attributes: + options (Mapping[str, Any]): Additional runtime parameters to be used for string interpolation + decoder (Decoder): The decoder responsible to transfom the response in a Mapping + field_pointer (str): The field defining record Mapping + """ + + options: InitVar[Mapping[str, Any]] + decoder: Decoder = JsonDecoder(options={}) + field_pointer: str = "list" + + def extract_records(self, response: requests.Response) -> List[Record]: + response_body = self.decoder.decode(response) + if self.field_pointer not in response_body: + return [] + elif type(response_body[self.field_pointer]) is list: + return response_body[self.field_pointer] + else: + return [record for _, record in response_body[self.field_pointer].items()] diff --git a/airbyte-integrations/connectors/source-pocket/source_pocket/pocket.yaml b/airbyte-integrations/connectors/source-pocket/source_pocket/pocket.yaml new file mode 100644 index 0000000000000..b10d6c9d5d07c --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/source_pocket/pocket.yaml @@ -0,0 +1,68 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + class_name: "source_pocket.PocketExtractor" + requester: + url_base: "https://getpocket.com/v3" + http_method: "POST" + request_options_provider: + request_parameters: + consumer_key: "{{ config['consumer_key'] }}" + access_token: "{{ config['access_token'] }}" + state: "{{ config['state'] }}" + favorite: "{% if config['favorite'] %}{{ 1 }}{% else %}{{ 0 }}{% endif %}" + tag: "{{ config['tag'] }}" + contentType: "{{ config['content_type'] }}" + sort: "{{ config['sort'] }}" + detailType: "{{ config['detail_type'] }}" + search: "{{ config['search'] }}" + domain: "{{ config['domain'] }}" + since: "{{ timestamp(config['since']) }}" + error_handler: + type: "DefaultErrorHandler" + error_handlers: + - response_filters: + - http_codes: [401] # User authentication error + action: FAIL + - http_codes: [403] # Access denied due to lack of permission or rate limiting + action: RETRY + backoff_strategies: + - type: "WaitTimeFromHeader" + header: "X-Limit-User-Reset" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: "DefaultPaginator" + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "count" + pagination_strategy: + type: "OffsetIncrement" + page_size: 10 + page_token_option: + inject_into: "request_parameter" + field_name: "offset" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + schema_loader: + type: "JsonSchema" + retrieve_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "retrieve" + primary_key: "item_id" + path: "/get" + +streams: + - "*ref(definitions.retrieve_stream)" + +check: + stream_names: + - "retrieve" diff --git a/airbyte-integrations/connectors/source-pocket/source_pocket/schemas/retrieve.json b/airbyte-integrations/connectors/source-pocket/source_pocket/schemas/retrieve.json new file mode 100644 index 0000000000000..a9c2f18c8799d --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/source_pocket/schemas/retrieve.json @@ -0,0 +1,236 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "item_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "resolved_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "given_url": { + "type": ["null", "string"] + }, + "given_title": { + "type": ["null", "string"] + }, + "favorite": { + "type": ["null", "string"], + "pattern": "0|1" + }, + "status": { + "type": ["null", "string"], + "pattern": "0|1|2" + }, + "time_added": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "time_updated": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "time_read": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "time_favorited": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "sort_id": { + "type": ["null", "number"] + }, + "resolved_title": { + "type": ["null", "string"] + }, + "resolved_url": { + "type": ["null", "string"] + }, + "excerpt": { + "type": ["null", "string"] + }, + "is_article": { + "type": ["null", "string"], + "pattern": "0|1" + }, + "is_index": { + "type": ["null", "string"], + "pattern": "0|1" + }, + "has_image": { + "type": ["null", "string"], + "pattern": "0|1|2" + }, + "has_video": { + "type": ["null", "string"], + "pattern": "0|1|2" + }, + "word_count": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "lang": { + "type": ["null", "string"] + }, + "time_to_read": { + "type": ["null", "number"] + }, + "top_image_url": { + "type": ["null", "string"] + }, + "tags": { + "type": ["null", "object"], + "patternProperties": { + ".+": { + "type": ["null", "object"], + "properties": { + "item_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "tag": { + "type": ["null", "string"] + } + } + } + } + }, + "authors": { + "type": ["null", "object"], + "patternProperties": { + "[0-9]+": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "item_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "author_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "name": { + "type": ["null", "string"] + }, + "url": { + "type": ["null", "string"] + } + } + } + } + }, + "image": { + "type": ["null", "object"], + "properties": { + "item_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "src": { + "type": ["null", "string"] + }, + "width": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "height": { + "type": ["null", "string"], + "pattern": "[0-9]+" + } + } + }, + "images": { + "type": ["null", "object"], + "patternProperties": { + "[0-9]+": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "item_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "image_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "src": { + "type": ["null", "string"] + }, + "width": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "height": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "credit": { + "type": ["null", "string"] + }, + "caption": { + "type": ["null", "string"] + } + } + } + } + }, + "videos": { + "type": ["null", "object"], + "patternProperties": { + "[0-9]+": { + "type": ["null", "object"], + "additionalProperties": true, + "properties": { + "item_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "video_id": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "src": { + "type": ["null", "string"] + }, + "width": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "height": { + "type": ["null", "string"], + "pattern": "[0-9]+" + }, + "type": { + "type": ["null", "string"] + }, + "vid": { + "type": ["null", "string"] + } + } + } + } + }, + "domain_metadata": { + "type": ["null", "object"], + "properties": { + "name": { + "type": ["null", "string"] + }, + "logo": { + "type": ["null", "string"] + }, + "greyscale_logo": { + "type": ["null", "string"] + } + } + }, + "listen_duration_estimate": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-pocket/source_pocket/source.py b/airbyte-integrations/connectors/source-pocket/source_pocket/source.py new file mode 100644 index 0000000000000..0fc77fa979127 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/source_pocket/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourcePocket(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "pocket.yaml"}) diff --git a/airbyte-integrations/connectors/source-pocket/source_pocket/spec.yaml b/airbyte-integrations/connectors/source-pocket/source_pocket/spec.yaml new file mode 100644 index 0000000000000..fb4e5a71fd1f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/source_pocket/spec.yaml @@ -0,0 +1,87 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/pocket +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Pocket Spec + type: object + required: + - consumer_key + - access_token + additionalProperties: true + properties: + consumer_key: + type: string + title: Consumer Key + description: Your application's Consumer Key. + airbyte_secret: true + order: 0 + access_token: + type: string + title: Access Token + description: The user's Pocket access token. + airbyte_secret: true + order: 1 + state: + type: string + title: State + description: Select the state of the items to retrieve. + order: 2 + enum: + - unread + - archive + - all + favorite: + type: boolean + title: Is Favorite? + description: Retrieve only favorited items. + default: false + order: 3 + tag: + type: string + title: Tag Name + description: Return only items tagged with this tag name. Use _untagged_ for retrieving only untagged items. + order: 4 + content_type: + type: string + title: Content Type + description: Select the content type of the items to retrieve. + order: 5 + enum: + - article + - video + - image + sort: + type: string + title: Sort By + description: Sort retrieved items by the given criteria. + order: 6 + enum: + - newest + - oldest + - title + - site + detail_type: + type: string + title: Detail Type + description: Select the granularity of the information about each item. + order: 7 + enum: + - simple + - complete + search: + type: string + title: Search Query + description: Only return items whose title or url contain the `search` string. + order: 8 + domain: + type: string + title: Domain + description: Only return items from a particular `domain`. + order: 9 + since: + type: string + title: Since + description: Only return items modified since the given timestamp. + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}" + examples: + - "2022-10-20 14:14:14" + order: 10 diff --git a/airbyte-integrations/connectors/source-pocket/unit_tests/__init__.py b/airbyte-integrations/connectors/source-pocket/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-pocket/unit_tests/test_extractor.py b/airbyte-integrations/connectors/source-pocket/unit_tests/test_extractor.py new file mode 100644 index 0000000000000..379c53d27776e --- /dev/null +++ b/airbyte-integrations/connectors/source-pocket/unit_tests/test_extractor.py @@ -0,0 +1,36 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import json + +import pytest +import requests +from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder +from source_pocket.extractor import PocketExtractor + +options = {"options_field": "record_array"} +decoder = JsonDecoder(options={}) + + +@pytest.mark.parametrize( + "test_name, body, expected_records", + [ + ("test_extract_successfully", {"list": {"record_one": {"id": 1}, "record_two": {"id": 2}}}, [{"id": 1}, {"id": 2}]), + ("test_extract_empty_list", {"list": []}, []), + ("test_field_pointer_does_not_exist", {"id": 1}, []), + ], +) +def test_pocket_extractor(test_name, body, expected_records): + extractor = PocketExtractor(decoder=decoder, options=options) + + response = create_response(body) + actual_records = extractor.extract_records(response) + + assert actual_records == expected_records + + +def create_response(body): + response = requests.Response() + response._content = json.dumps(body).encode("utf-8") + return response diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/.dockerignore b/airbyte-integrations/connectors/source-polygon-stock-api/.dockerignore new file mode 100644 index 0000000000000..d9b098c972093 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_polygon_stock_api +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/Dockerfile b/airbyte-integrations/connectors/source-polygon-stock-api/Dockerfile new file mode 100644 index 0000000000000..3b1dd791a1163 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_polygon_stock_api ./source_polygon_stock_api + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-polygon-stock-api diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/README.md b/airbyte-integrations/connectors/source-polygon-stock-api/README.md new file mode 100644 index 0000000000000..b32a733b8f96e --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/README.md @@ -0,0 +1,79 @@ +# Polygon Stock Api Source + +This is the repository for the Polygon Stock Api configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/polygon-stock-api). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-polygon-stock-api:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/polygon-stock-api) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_polygon_stock_api/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source polygon-stock-api test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-polygon-stock-api:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-polygon-stock-api:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-polygon-stock-api:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-polygon-stock-api:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-polygon-stock-api:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-polygon-stock-api:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-polygon-stock-api:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-polygon-stock-api:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/__init__.py b/airbyte-integrations/connectors/source-polygon-stock-api/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-config.yml b/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-config.yml new file mode 100644 index 0000000000000..6ef9bf701dd90 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-polygon-stock-api:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_polygon_stock_api/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/build.gradle b/airbyte-integrations/connectors/source-polygon-stock-api/build.gradle new file mode 100644 index 0000000000000..41db8677ed5eb --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_polygon_stock_api' +} diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/__init__.py b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..6f49f72327e60 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/abnormal_state.json @@ -0,0 +1,8 @@ +{ + "ticker": "microsoft", + "queryCount": 0, + "resultsCount": 0, + "adjusted": true, + "status": "OK", + "request_id": "2c243c1c9bc396cad059cd18253f3ab2" +} diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..2686d60ea9841 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "stock_api", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..0a4c9862b45cd --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/invalid_config.json @@ -0,0 +1,11 @@ +{ + "apiKey": "INVALID_API_KEY", + "limit": "120", + "sort": "asc", + "adjusted": "true", + "stocksTicker": "MSFT", + "multiplier": "1", + "timespan": "day", + "start_date": "2021-07-22", + "end_date": "2021-07-22" +} diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/sample_config.json new file mode 100644 index 0000000000000..113b8801448a8 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/sample_config.json @@ -0,0 +1,11 @@ +{ + "apiKey": "YOUR API KEY", + "limit": "120", + "sort": "asc", + "adjusted": "true", + "stocksTicker": "AAPL", + "multiplier": "1", + "timespan": "day", + "start_date": "2021-07-22", + "end_date": "2021-07-22" +} diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3cd8779e34e8e --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/integration_tests/sample_state.json @@ -0,0 +1,21 @@ +{ + "ticker": "AAPL", + "queryCount": 1, + "resultsCount": 1, + "adjusted": true, + "results": [ + { + "v": 77287356, + "vw": 146.991, + "o": 145.935, + "c": 146.8, + "h": 148.195, + "l": 145.81, + "t": 1626926400000, + "n": 480209 + } + ], + "status": "OK", + "request_id": "6aae8b7d3d4d2cd896085a840d2e3ed1", + "count": 1 +} diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/main.py b/airbyte-integrations/connectors/source-polygon-stock-api/main.py new file mode 100644 index 0000000000000..f5a2cac9ecec1 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_polygon_stock_api import SourcePolygonStockApi + +if __name__ == "__main__": + source = SourcePolygonStockApi() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/requirements.txt b/airbyte-integrations/connectors/source-polygon-stock-api/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/setup.py b/airbyte-integrations/connectors/source-polygon-stock-api/setup.py new file mode 100644 index 0000000000000..0164c42e4b514 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_polygon_stock_api", + description="Source implementation for Polygon Stock Api.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/__init__.py b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/__init__.py new file mode 100644 index 0000000000000..6a0b849243429 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourcePolygonStockApi + +__all__ = ["SourcePolygonStockApi"] diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/polygon_stock_api.yaml b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/polygon_stock_api.yaml new file mode 100644 index 0000000000000..85a1415b09b10 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/polygon_stock_api.yaml @@ -0,0 +1,41 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["results"] + requester: + url_base: "https://api.polygon.io" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "X-CoinAPI-Key" + api_token: "{{ config['access_key'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + + stream_slicer: + start_datetime: "{{ config['start_date'] }}T00:00:00.000000+0000" + end_datetime: "{{ config['start_date'] }}T00:00:00.000000+0000" + step: "1d" + + stock_api_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "stock_api" + primary_key: "t" + path: "/v2/aggs/ticker/{{ config['stocksTicker'] }}/range/{{ config['multiplier'] }}/{{ config['timespan'] }}/{{ config['start_date'] }}/{{ config['end_date'] }}?adjusted={{ config['adjusted'] }}&sort={{ config['sort'] }}&limit=120&apiKey={{ config['apiKey'] }}" +streams: + - "*ref(definitions.stock_api_stream)" + +check: + stream_names: + - "stock_api" diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/schemas/stock_api.json b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/schemas/stock_api.json new file mode 100644 index 0000000000000..0cf3fc66168ee --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/schemas/stock_api.json @@ -0,0 +1,32 @@ +{ + "type": "object", + "properties": { + "v": { + "type": ["null", "number"] + }, + "vw": { + "type": ["null", "number"] + }, + "otc": { + "type": ["null", "boolean"] + }, + "o": { + "type": ["null", "number"] + }, + "c": { + "type": ["null", "number"] + }, + "h": { + "type": ["null", "number"] + }, + "l": { + "type": ["null", "number"] + }, + "t": { + "type": ["null", "integer"] + }, + "n": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/source.py b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/source.py new file mode 100644 index 0000000000000..e57f2f436ebb9 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourcePolygonStockApi(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "polygon_stock_api.yaml"}) diff --git a/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/spec.yaml b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/spec.yaml new file mode 100644 index 0000000000000..5c2e5f9ea4589 --- /dev/null +++ b/airbyte-integrations/connectors/source-polygon-stock-api/source_polygon_stock_api/spec.yaml @@ -0,0 +1,74 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/airtable +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Weather API Spec + type: object + required: + - apiKey + - stocksTicker + - multiplier + - timespan + - start_date + - end_date + additionalProperties: true + properties: + apiKey: + title: API Key + type: string + description: Your API ACCESS Key + airbyte_secret: true + stocksTicker: + title: Stock Ticker + type: string + description: The exchange symbol that this item is traded under. + examples: + - IBM + - MSFT + multiplier: + title: Multiplier + type: integer + description: The size of the timespan multiplier. + examples: + - 1 + - 2 + timespan: + title: Timespan + type: string + description: The size of the time window. + examples: + - day + start_date: + title: Start Date + type: string + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: The beginning date for the aggregate window. + examples: + - "2020-10-14" + end_date: + title: End Date + type: string + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: The target date for the aggregate window. + examples: + - "2020-10-14" + adjusted: + title: Adjusted + type: string + description: Determines whether or not the results are adjusted for splits. By default, results are adjusted and set to true. Set this to false to get results that are NOT adjusted for splits. + examples: + - "true" + - "false" + sort: + title: Sort + type: string + description: Sort the results by timestamp. asc will return results in ascending order (oldest at the top), desc will return results in descending order (newest at the top). + examples: + - "asc" + - "desc" + limit: + title: Limit + type: integer + description: The target date for the aggregate window. + examples: + - 100 + - 120 diff --git a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile index 8c43729edb768..2ce87b9be8f5a 100644 --- a/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres-strict-encrypt/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres-strict-encrypt COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.0.22 +LABEL io.airbyte.version=1.0.25 LABEL io.airbyte.name=airbyte/source-postgres-strict-encrypt diff --git a/airbyte-integrations/connectors/source-postgres/Dockerfile b/airbyte-integrations/connectors/source-postgres/Dockerfile index 77a80d05f0b80..7dfbacc0d6ba4 100644 --- a/airbyte-integrations/connectors/source-postgres/Dockerfile +++ b/airbyte-integrations/connectors/source-postgres/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-postgres COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=1.0.22 +LABEL io.airbyte.version=1.0.25 LABEL io.airbyte.name=airbyte/source-postgres diff --git a/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml b/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml index beaef784e60c2..1236f38994229 100644 --- a/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-postgres/acceptance-test-config.yml @@ -3,4 +3,5 @@ connector_image: airbyte/source-postgres:dev tests: spec: - - spec_path: "src/main/resources/spec.json" + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-postgres/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-postgres/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-postgres/build.gradle b/airbyte-integrations/connectors/source-postgres/build.gradle index 535b2209088df..5971ba1794da8 100644 --- a/airbyte-integrations/connectors/source-postgres/build.gradle +++ b/airbyte-integrations/connectors/source-postgres/build.gradle @@ -3,6 +3,7 @@ plugins { id 'airbyte-docker' id 'airbyte-integration-test-java' id 'airbyte-performance-test-java' + id 'airbyte-source-acceptance-test' } application { @@ -12,6 +13,8 @@ application { dependencies { implementation project(':airbyte-db:db-lib') + implementation 'io.debezium:debezium-api:1.9.6.Final' + implementation 'io.debezium:debezium-embedded:1.9.6.Final' implementation project(':airbyte-integrations:bases:base-java') implementation project(':airbyte-integrations:bases:debezium-v1-9-6') implementation project(':airbyte-protocol:protocol-models') diff --git a/airbyte-integrations/connectors/source-postgres/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-postgres/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcCatalogHelper.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcCatalogHelper.java index c7b67e28c9d48..21f082a5ad08c 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcCatalogHelper.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcCatalogHelper.java @@ -10,9 +10,9 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; import io.airbyte.db.jdbc.JdbcDatabase; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.debezium.internals.DebeziumEventUtils; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.SyncMode; import java.sql.SQLException; import java.util.ArrayList; diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcProperties.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcProperties.java index 37c2d6ab7b37e..85b18cc01fe8b 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcProperties.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcProperties.java @@ -16,12 +16,14 @@ import io.airbyte.integrations.source.jdbc.AbstractJdbcSource.SslMode; import java.net.URI; import java.nio.file.Path; +import java.time.Duration; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PostgresCdcProperties { + private static final int HEARTBEAT_FREQUENCY_SEC = 10; private static final Logger LOGGER = LoggerFactory.getLogger(PostgresCdcProperties.class); static Properties getDebeziumDefaultProperties(final JdbcDatabase database) { @@ -54,6 +56,7 @@ private static Properties commonProperties(final JdbcDatabase database) { props.setProperty("converters", "datetime"); props.setProperty("datetime.type", PostgresConverter.class.getName()); props.setProperty("include.unknown.datatypes", "true"); + props.setProperty("heartbeat.interval.ms", Long.toString(Duration.ofSeconds(HEARTBEAT_FREQUENCY_SEC).toMillis())); // Check params for SSL connection in config and add properties for CDC SSL connection // https://debezium.io/documentation/reference/stable/connectors/postgresql.html#postgresql-property-database-sslmode diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcTargetPosition.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcTargetPosition.java index 1de2d0cf65608..d69e0426ca1c6 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcTargetPosition.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresCdcTargetPosition.java @@ -5,21 +5,26 @@ package io.airbyte.integrations.source.postgres; import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.db.PgLsn; import io.airbyte.db.PostgresUtils; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.debezium.CdcTargetPosition; import io.airbyte.integrations.debezium.internals.SnapshotMetadata; +import io.debezium.engine.ChangeEvent; +import java.lang.reflect.Field; import java.sql.SQLException; import java.util.Objects; import java.util.Optional; +import org.apache.kafka.connect.source.SourceRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class PostgresCdcTargetPosition implements CdcTargetPosition { private static final Logger LOGGER = LoggerFactory.getLogger(PostgresCdcTargetPosition.class); - private final PgLsn targetLsn; + @VisibleForTesting + final PgLsn targetLsn; public PostgresCdcTargetPosition(final PgLsn targetLsn) { this.targetLsn = targetLsn; @@ -62,6 +67,32 @@ public boolean reachedTargetPosition(final JsonNode valueAsJson) { } } + private boolean isHeartbeatEvent(final ChangeEvent event) { + return Objects.nonNull(event) && !event.value().contains("source"); + } + + @Override + public Long getHeartbeatPosition(final ChangeEvent heartbeatEvent) { + if (isHeartbeatEvent(heartbeatEvent)) { + try { + final Field f = heartbeatEvent.getClass().getDeclaredField("sourceRecord"); + f.setAccessible(true); + final SourceRecord sr = (SourceRecord) f.get(heartbeatEvent); + final Long hbLsn = (Long) sr.sourceOffset().get("lsn"); + LOGGER.debug("Found heartbeat lsn: {}", hbLsn); + return hbLsn; + } catch (final NoSuchFieldException | IllegalAccessException e) { + LOGGER.info("failed to get heartbeat lsn"); + } + } + return null; + } + + @Override + public boolean reachedTargetPosition(final Long lsn) { + return (lsn == null) ? false : lsn.compareTo(targetLsn.asLong()) >= 0; + } + private PgLsn extractLsn(final JsonNode valueAsJson) { return Optional.ofNullable(valueAsJson.get("source")) .flatMap(source -> Optional.ofNullable(source.get("lsn").asText())) @@ -70,4 +101,9 @@ private PgLsn extractLsn(final JsonNode valueAsJson) { .orElseThrow(() -> new IllegalStateException("Could not find LSN")); } + @Override + public boolean isHeartbeatSupported() { + return true; + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java index f20bab26ff498..321c075f9e924 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSource.java @@ -28,7 +28,6 @@ import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.ssh.SshWrappedSource; @@ -50,6 +49,7 @@ import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.CommonField; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; diff --git a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java index 8bc58ee3dbd49..aa9ad53c58fbc 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java +++ b/airbyte-integrations/connectors/source-postgres/src/main/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncrypt.java @@ -4,6 +4,9 @@ package io.airbyte.integrations.source.postgres; +import static io.airbyte.protocol.models.AirbyteConnectionStatus.Status; + +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.commons.json.Jsons; @@ -11,7 +14,9 @@ import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.base.spec_modification.SpecModifyingSource; +import io.airbyte.protocol.models.AirbyteConnectionStatus; import io.airbyte.protocol.models.ConnectorSpecification; +import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -22,6 +27,13 @@ public class PostgresSourceStrictEncrypt extends SpecModifyingSource implements Source { private static final Logger LOGGER = LoggerFactory.getLogger(PostgresSourceStrictEncrypt.class); + public static final String TUNNEL_METHOD = "tunnel_method"; + public static final String NO_TUNNEL = "NO_TUNNEL"; + public static final String SSL_MODE = "ssl_mode"; + public static final String MODE = "mode"; + public static final String SSL_MODE_ALLOW = "allow"; + public static final String SSL_MODE_PREFER = "prefer"; + public static final String SSL_MODE_DISABLE = "disable"; public PostgresSourceStrictEncrypt() { super(PostgresSource.sshWrappedSource()); @@ -39,6 +51,27 @@ public ConnectorSpecification modifySpec(final ConnectorSpecification originalSp return spec; } + @Override + public AirbyteConnectionStatus check(final JsonNode config) throws Exception { + // #15808 Disallow connecting to db with disable, prefer or allow SSL mode when connecting directly + // and not over SSH tunnel + if (config.has(TUNNEL_METHOD) + && config.get(TUNNEL_METHOD).has(TUNNEL_METHOD) + && config.get(TUNNEL_METHOD).get(TUNNEL_METHOD).asText().equals(NO_TUNNEL)) { + // If no SSH tunnel + if (config.has(SSL_MODE) && config.get(SSL_MODE).has(MODE)) { + if (Set.of(SSL_MODE_DISABLE, SSL_MODE_ALLOW, SSL_MODE_PREFER).contains(config.get(SSL_MODE).get(MODE).asText())) { + // Fail in case SSL mode is disable, allow or prefer + return new AirbyteConnectionStatus() + .withStatus(Status.FAILED) + .withMessage( + "Unsecured connection not allowed. If no SSH Tunnel set up, please use one of the following SSL modes: require, verify-ca, verify-full"); + } + } + } + return super.check(config); + } + public static void main(final String[] args) throws Exception { final Source source = new PostgresSourceStrictEncrypt(); LOGGER.info("starting source: {}", PostgresSourceStrictEncrypt.class); diff --git a/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json b/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json index ca14aa473aff9..4dfdfab3fb6e5 100644 --- a/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-postgres/src/main/resources/spec.json @@ -74,75 +74,65 @@ "oneOf": [ { "title": "disable", - "additionalProperties": false, + "additionalProperties": true, "description": "Disable SSL.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "disable", - "enum": ["disable"], - "default": "disable", "order": 0 } } }, { "title": "allow", - "additionalProperties": false, + "additionalProperties": true, "description": "Allow SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "allow", - "enum": ["allow"], - "default": "allow", "order": 0 } } }, { "title": "prefer", - "additionalProperties": false, + "additionalProperties": true, "description": "Prefer SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "prefer", - "enum": ["prefer"], - "default": "prefer", "order": 0 } } }, { "title": "require", - "additionalProperties": false, + "additionalProperties": true, "description": "Require SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "require", - "enum": ["require"], - "default": "require", "order": 0 } } }, { "title": "verify-ca", - "additionalProperties": false, + "additionalProperties": true, "description": "Verify-ca SSL mode.", "required": ["mode", "ca_certificate"], "properties": { "mode": { "type": "string", "const": "verify-ca", - "enum": ["verify-ca"], - "default": "verify-ca", "order": 0 }, "ca_certificate": { @@ -180,15 +170,13 @@ }, { "title": "verify-full", - "additionalProperties": false, + "additionalProperties": true, "description": "Verify-full SSL mode.", "required": ["mode", "ca_certificate"], "properties": { "mode": { "type": "string", "const": "verify-full", - "enum": ["verify-full"], - "default": "verify-full", "order": 0 }, "ca_certificate": { @@ -240,8 +228,6 @@ "method": { "type": "string", "const": "Standard", - "enum": ["Standard"], - "default": "Standard", "order": 0 } } @@ -254,8 +240,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "plugin": { @@ -263,7 +247,7 @@ "title": "Plugin", "description": "A logical decoding plugin installed on the PostgreSQL server. The `pgoutput` plugin is used by default. If the replication table contains a lot of big jsonb values it is recommended to use `wal2json` plugin. Read more about selecting replication plugins.", "enum": ["pgoutput", "wal2json"], - "default": "pgoutput", + "const": "pgoutput", "order": 1 }, "replication_slot": { diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..560e553333780 --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 5555, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..5875205e32ebb --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/src/test-integration/resources/expected_spec.json @@ -0,0 +1,397 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/postgres", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Postgres Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "title": "Host", + "description": "Hostname of the database.", + "type": "string", + "order": 0 + }, + "port": { + "title": "Port", + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 5432, + "examples": ["5432"], + "order": 1 + }, + "database": { + "title": "Database Name", + "description": "Name of the database.", + "type": "string", + "order": 2 + }, + "schemas": { + "title": "Schemas", + "description": "The list of schemas (case sensitive) to sync from. Defaults to public.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 0, + "uniqueItems": true, + "default": ["public"], + "order": 3 + }, + "username": { + "title": "Username", + "description": "Username to access the database.", + "type": "string", + "order": 4 + }, + "password": { + "title": "Password", + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 5 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters.", + "title": "JDBC URL Parameters (Advanced)", + "type": "string", + "order": 6 + }, + "ssl": { + "title": "Connect using SSL", + "description": "Encrypt data using SSL. When activating SSL, please select one of the connection modes.", + "type": "boolean", + "default": false, + "order": 7 + }, + "ssl_mode": { + "title": "SSL Modes", + "description": "SSL connection modes. \n
    • disable - Disables encryption of communication between Airbyte and source database
    • \n
    • allow - Enables encryption only when required by the source database
    • \n
    • prefer - allows unencrypted connection only if the source database does not support encryption
    • \n
    • require - Always require encryption. If the source database server does not support encryption, connection will fail
    • \n
    • verify-ca - Always require encryption and verifies that the source database server has a valid SSL certificate
    • \n
    • verify-full - This is the most secure mode. Always require encryption and verifies the identity of the source database server
    \n Read more in the docs.", + "type": "object", + "order": 7, + "oneOf": [ + { + "title": "disable", + "additionalProperties": true, + "description": "Disable SSL.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "disable", + "order": 0 + } + } + }, + { + "title": "allow", + "additionalProperties": true, + "description": "Allow SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "allow", + "order": 0 + } + } + }, + { + "title": "prefer", + "additionalProperties": true, + "description": "Prefer SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "prefer", + "order": 0 + } + } + }, + { + "title": "require", + "additionalProperties": true, + "description": "Require SSL mode.", + "required": ["mode"], + "properties": { + "mode": { + "type": "string", + "const": "require", + "order": 0 + } + } + }, + { + "title": "verify-ca", + "additionalProperties": true, + "description": "Verify-ca SSL mode.", + "required": ["mode", "ca_certificate"], + "properties": { + "mode": { + "type": "string", + "const": "verify-ca", + "order": 0 + }, + "ca_certificate": { + "type": "string", + "title": "CA certificate", + "description": "CA certificate", + "airbyte_secret": true, + "multiline": true, + "order": 1 + }, + "client_certificate": { + "type": "string", + "title": "Client Certificate", + "description": "Client certificate", + "airbyte_secret": true, + "multiline": true, + "order": 2 + }, + "client_key": { + "type": "string", + "title": "Client Key", + "description": "Client key", + "airbyte_secret": true, + "multiline": true, + "order": 3 + }, + "client_key_password": { + "type": "string", + "title": "Client key password", + "description": "Password for keystorage. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "verify-full", + "additionalProperties": true, + "description": "Verify-full SSL mode.", + "required": ["mode", "ca_certificate"], + "properties": { + "mode": { + "type": "string", + "const": "verify-full", + "order": 0 + }, + "ca_certificate": { + "type": "string", + "title": "CA Certificate", + "description": "CA certificate", + "airbyte_secret": true, + "multiline": true, + "order": 1 + }, + "client_certificate": { + "type": "string", + "title": "Client Certificate", + "description": "Client certificate", + "airbyte_secret": true, + "multiline": true, + "order": 2 + }, + "client_key": { + "type": "string", + "title": "Client Key", + "description": "Client key", + "airbyte_secret": true, + "multiline": true, + "order": 3 + }, + "client_key_password": { + "type": "string", + "title": "Client key password", + "description": "Password for keystorage. If you do not add it - the password will be generated automatically.", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + }, + "replication_method": { + "type": "object", + "title": "Replication Method", + "description": "Replication method for extracting data from the database.", + "order": 8, + "oneOf": [ + { + "title": "Standard", + "description": "Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.", + "required": ["method"], + "properties": { + "method": { + "type": "string", + "const": "Standard", + "order": 0 + } + } + }, + { + "title": "Logical Replication (CDC)", + "description": "Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.", + "required": ["method", "replication_slot", "publication"], + "properties": { + "method": { + "type": "string", + "const": "CDC", + "order": 0 + }, + "plugin": { + "type": "string", + "title": "Plugin", + "description": "A logical decoding plugin installed on the PostgreSQL server. The `pgoutput` plugin is used by default. If the replication table contains a lot of big jsonb values it is recommended to use `wal2json` plugin. Read more about selecting replication plugins.", + "enum": ["pgoutput", "wal2json"], + "const": "pgoutput", + "order": 1 + }, + "replication_slot": { + "type": "string", + "title": "Replication Slot", + "description": "A plugin logical replication slot. Read about replication slots.", + "order": 2 + }, + "publication": { + "type": "string", + "title": "Publication", + "description": "A Postgres publication used for consuming changes. Read about publications and replication identities.", + "order": 3 + }, + "initial_waiting_seconds": { + "type": "integer", + "title": "Initial Waiting Time in Seconds (Advanced)", + "description": "The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time.", + "default": 300, + "order": 4, + "min": 120, + "max": 1200 + } + } + } + ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java index 719ccf147e8fc..8712be9b2903f 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/CdcPostgresSourceTest.java @@ -49,10 +49,13 @@ import io.airbyte.protocol.models.JsonSchemaType; import io.airbyte.protocol.models.SyncMode; import io.airbyte.test.utils.PostgreSQLContainerHelper; +import io.debezium.engine.ChangeEvent; import java.sql.SQLException; +import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; +import org.apache.kafka.connect.source.SourceRecord; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; @@ -383,4 +386,49 @@ protected void syncShouldHandlePurgedLogsGracefully() throws Exception { assertEquals(MODEL_RECORDS.size() + recordsToCreate + 1, recordsFromThirdBatch.size()); } + @Test + void testReachedTargetPosition() { + final CdcTargetPosition ctp = cdcLatestTargetPosition(); + final PostgresCdcTargetPosition pctp = (PostgresCdcTargetPosition) ctp; + final PgLsn target = pctp.targetLsn; + assertTrue(ctp.reachedTargetPosition(target.asLong() + 1)); + assertTrue(ctp.reachedTargetPosition(target.asLong())); + assertFalse(ctp.reachedTargetPosition(target.asLong() - 1)); + assertFalse(ctp.reachedTargetPosition((Long) null)); + } + + @Test + void testGetHeartbeatPosition() { + final CdcTargetPosition ctp = cdcLatestTargetPosition(); + final PostgresCdcTargetPosition pctp = (PostgresCdcTargetPosition) ctp; + final Long lsn = pctp.getHeartbeatPosition(new ChangeEvent() { + + private final SourceRecord sourceRecord = new SourceRecord(null, Collections.singletonMap("lsn", 358824993496L), null, null, null); + + @Override + public String key() { + return null; + } + + @Override + public String value() { + return "{\"ts_ms\":1667616934701}"; + } + + @Override + public String destination() { + return null; + } + + public SourceRecord sourceRecord() { + return sourceRecord; + } + + }); + + assertEquals(lsn, 358824993496L); + + assertNull(pctp.getHeartbeatPosition(null)); + } + } diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresCdcGetPublicizedTablesTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresCdcGetPublicizedTablesTest.java index e3c44bca9e63f..90c64b9cc9423 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresCdcGetPublicizedTablesTest.java +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresCdcGetPublicizedTablesTest.java @@ -18,7 +18,7 @@ import io.airbyte.db.jdbc.DefaultJdbcDatabase; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.db.jdbc.JdbcUtils; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.test.utils.PostgreSQLContainerHelper; import java.sql.SQLException; import java.util.List; diff --git a/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncryptTest.java b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncryptTest.java new file mode 100644 index 0000000000000..dc27b8a5db330 --- /dev/null +++ b/airbyte-integrations/connectors/source-postgres/src/test/java/io/airbyte/integrations/source/postgres/PostgresSourceStrictEncryptTest.java @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.postgres; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.ImmutableMap; +import io.airbyte.commons.json.Jsons; +import io.airbyte.db.jdbc.JdbcUtils; +import io.airbyte.integrations.base.ssh.SshBastionContainer; +import io.airbyte.integrations.base.ssh.SshTunnel; +import io.airbyte.protocol.models.AirbyteConnectionStatus; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.PostgreSQLContainer; +import org.testcontainers.utility.DockerImageName; + +public class PostgresSourceStrictEncryptTest { + + private static final SshBastionContainer bastion = new SshBastionContainer(); + private static final Network network = Network.newNetwork(); + + @Test + void testCheckWithSSlModeDisable() throws Exception { + + try (PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine").withNetwork(network)) { + bastion.initAndStartBastion(network); + db.start(); + + // stop to enforce ssl for ssl_mode disable + final ImmutableMap.Builder builderWithSSLModeDisable = getDatabaseConfigBuilderWithSSLMode(db, "disable"); + final JsonNode configWithSSLModeDisable = bastion.getTunnelConfig(SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH, builderWithSSLModeDisable); + final AirbyteConnectionStatus connectionStatusForDisabledMode = new PostgresSourceStrictEncrypt().check(configWithSSLModeDisable); + assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatusForDisabledMode.getStatus()); + + } finally { + bastion.stopAndClose(); + } + } + + @Test + void testCheckWithSSlModePrefer() throws Exception { + + try (PostgreSQLContainer db = new PostgreSQLContainer<>("postgres:13-alpine").withNetwork(network)) { + bastion.initAndStartBastion(network); + db.start(); + // continue to enforce ssl because ssl mode is prefer + final ImmutableMap.Builder builderWithSSLModePrefer = getDatabaseConfigBuilderWithSSLMode(db, "prefer"); + final JsonNode configWithSSLModePrefer = bastion.getTunnelConfig(SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH, builderWithSSLModePrefer); + final AirbyteConnectionStatus connectionStatusForPreferredMode = new PostgresSourceStrictEncrypt().check(configWithSSLModePrefer); + assertEquals(AirbyteConnectionStatus.Status.FAILED, connectionStatusForPreferredMode.getStatus()); + assertEquals("State code: 08004; Message: The server does not support SSL.", connectionStatusForPreferredMode.getMessage()); + + } finally { + bastion.stopAndClose(); + } + } + + private ImmutableMap.Builder getDatabaseConfigBuilderWithSSLMode(PostgreSQLContainer db, String sslMode) { + return ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, Objects.requireNonNull(db.getContainerInfo() + .getNetworkSettings() + .getNetworks() + .entrySet().stream() + .findFirst() + .get().getValue().getIpAddress())) + .put(JdbcUtils.PORT_KEY, db.getExposedPorts().get(0)) + .put(JdbcUtils.DATABASE_KEY, db.getDatabaseName()) + .put(JdbcUtils.SCHEMAS_KEY, List.of("public")) + .put(JdbcUtils.USERNAME_KEY, db.getUsername()) + .put(JdbcUtils.PASSWORD_KEY, db.getPassword()) + .put(JdbcUtils.SSL_MODE_KEY, Map.of(JdbcUtils.MODE_KEY, sslMode)); + } + + private JsonNode getMockedSSLConfig(String sslMode) { + return Jsons.jsonNode(ImmutableMap.builder() + .put(JdbcUtils.HOST_KEY, "test_host") + .put(JdbcUtils.PORT_KEY, 777) + .put(JdbcUtils.DATABASE_KEY, "test_db") + .put(JdbcUtils.USERNAME_KEY, "test_user") + .put(JdbcUtils.PASSWORD_KEY, "test_password") + .put(JdbcUtils.SSL_KEY, true) + .put(JdbcUtils.SSL_MODE_KEY, Map.of(JdbcUtils.MODE_KEY, sslMode)) + .build()); + } + + @Test + void testSslModesUnsecuredNoTunnel() throws Exception { + for (String sslMode : List.of("disable", "allow", "prefer")) { + final JsonNode config = getMockedSSLConfig(sslMode); + ((ObjectNode) config).putIfAbsent("tunnel_method", Jsons.jsonNode(ImmutableMap.builder() + .put("tunnel_method", "NO_TUNNEL") + .build())); + + final AirbyteConnectionStatus actual = new PostgresSourceStrictEncrypt().check(config); + assertEquals(AirbyteConnectionStatus.Status.FAILED, actual.getStatus()); + assertTrue(actual.getMessage().contains("Unsecured connection not allowed")); + } + } + + @Test + void testSslModeRequiredNoTunnel() throws Exception { + + try (PostgreSQLContainer db = + new PostgreSQLContainer<>(DockerImageName.parse("marcosmarxm/postgres-ssl:dev").asCompatibleSubstituteFor("postgres")) + .withCommand("postgres -c ssl=on -c ssl_cert_file=/var/lib/postgresql/server.crt -c ssl_key_file=/var/lib/postgresql/server.key")) { + db.start(); + + final ImmutableMap configBuilderWithSslModeRequire = getDatabaseConfigBuilderWithSSLMode(db, "require").build(); + final JsonNode config = Jsons.jsonNode(configBuilderWithSslModeRequire); + ((ObjectNode) config).putIfAbsent("tunnel_method", Jsons.jsonNode(ImmutableMap.builder() + .put("tunnel_method", "NO_TUNNEL") + .build())); + final AirbyteConnectionStatus connectionStatusForPreferredMode = new PostgresSourceStrictEncrypt().check(config); + assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatusForPreferredMode.getStatus()); + } + } + + @Test + void testStrictSSLSecuredWithTunnel() throws Exception { + try (PostgreSQLContainer db = + new PostgreSQLContainer<>(DockerImageName.parse("marcosmarxm/postgres-ssl:dev").asCompatibleSubstituteFor("postgres")) + .withCommand("postgres -c ssl=on -c ssl_cert_file=/var/lib/postgresql/server.crt -c ssl_key_file=/var/lib/postgresql/server.key") + .withNetwork(network)) { + + bastion.initAndStartBastion(network); + db.start(); + + final ImmutableMap.Builder builderWithSSLModePrefer = getDatabaseConfigBuilderWithSSLMode(db, "require"); + final JsonNode configWithSslAndSsh = bastion.getTunnelConfig(SshTunnel.TunnelMethod.SSH_PASSWORD_AUTH, builderWithSSLModePrefer); + final AirbyteConnectionStatus connectionStatusForPreferredMode = new PostgresSourceStrictEncrypt().check(configWithSslAndSsh); + assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, connectionStatusForPreferredMode.getStatus()); + } finally { + bastion.stopAndClose(); + } + } + +} diff --git a/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json b/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json index 9e0f63090e8e8..52865891ce86a 100644 --- a/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json +++ b/airbyte-integrations/connectors/source-postgres/src/test/resources/expected_spec.json @@ -67,60 +67,52 @@ "oneOf": [ { "title": "allow", - "additionalProperties": false, + "additionalProperties": true, "description": "Allow SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "allow", - "enum": ["allow"], - "default": "allow", "order": 0 } } }, { "title": "prefer", - "additionalProperties": false, + "additionalProperties": true, "description": "Prefer SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "prefer", - "enum": ["prefer"], - "default": "prefer", "order": 0 } } }, { "title": "require", - "additionalProperties": false, + "additionalProperties": true, "description": "Require SSL mode.", "required": ["mode"], "properties": { "mode": { "type": "string", "const": "require", - "enum": ["require"], - "default": "require", "order": 0 } } }, { "title": "verify-ca", - "additionalProperties": false, + "additionalProperties": true, "description": "Verify-ca SSL mode.", "required": ["mode", "ca_certificate"], "properties": { "mode": { "type": "string", "const": "verify-ca", - "enum": ["verify-ca"], - "default": "verify-ca", "order": 0 }, "ca_certificate": { @@ -158,15 +150,13 @@ }, { "title": "verify-full", - "additionalProperties": false, + "additionalProperties": true, "description": "Verify-full SSL mode.", "required": ["mode", "ca_certificate"], "properties": { "mode": { "type": "string", "const": "verify-full", - "enum": ["verify-full"], - "default": "verify-full", "order": 0 }, "ca_certificate": { @@ -218,8 +208,6 @@ "method": { "type": "string", "const": "Standard", - "enum": ["Standard"], - "default": "Standard", "order": 0 } } @@ -232,8 +220,6 @@ "method": { "type": "string", "const": "CDC", - "enum": ["CDC"], - "default": "CDC", "order": 0 }, "plugin": { @@ -241,7 +227,7 @@ "title": "Plugin", "description": "A logical decoding plugin installed on the PostgreSQL server. The `pgoutput` plugin is used by default. If the replication table contains a lot of big jsonb values it is recommended to use `wal2json` plugin. Read more about selecting replication plugins.", "enum": ["pgoutput", "wal2json"], - "default": "pgoutput", + "const": "pgoutput", "order": 1 }, "replication_slot": { @@ -268,7 +254,124 @@ } } ] + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] } } - } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] } diff --git a/airbyte-integrations/connectors/source-postmarkapp/.dockerignore b/airbyte-integrations/connectors/source-postmarkapp/.dockerignore new file mode 100644 index 0000000000000..5122f90d02056 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_postmarkapp +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-postmarkapp/Dockerfile b/airbyte-integrations/connectors/source-postmarkapp/Dockerfile new file mode 100644 index 0000000000000..c9662a9ddb090 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_postmarkapp ./source_postmarkapp + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-postmarkapp diff --git a/airbyte-integrations/connectors/source-postmarkapp/README.md b/airbyte-integrations/connectors/source-postmarkapp/README.md new file mode 100644 index 0000000000000..c44654ce8ddf1 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/README.md @@ -0,0 +1,79 @@ +# Postmarkapp Source + +This is the repository for the Postmarkapp configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/postmarkapp). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-postmarkapp:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/postmarkapp) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_postmarkapp/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source postmarkapp test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-postmarkapp:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-postmarkapp:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-postmarkapp:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-postmarkapp:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-postmarkapp:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-postmarkapp:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-postmarkapp:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-postmarkapp:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-postmarkapp/__init__.py b/airbyte-integrations/connectors/source-postmarkapp/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-config.yml b/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-config.yml new file mode 100644 index 0000000000000..389620d838b46 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-config.yml @@ -0,0 +1,40 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-postmarkapp:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_postmarkapp/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: + - name: bounces + bypass_reason: "Is empty in test data" + - name: messages + bypass_reason: "Is empty in test data" + - name: deliverystats + bypass_reason: "Is empty in test data" + - name: servers + bypass_reason: "Is empty in test data" + - name: domains + bypass_reason: "Is empty in test data" + expect_records: + path: "integration_tests/expected_records.txt" + extra_fields: no + exact_order: no + extra_records: yes + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-docker.sh new file mode 100755 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-postmarkapp/build.gradle b/airbyte-integrations/connectors/source-postmarkapp/build.gradle new file mode 100644 index 0000000000000..cb04abfc4ae72 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_postmarkapp' +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/integration_tests/__init__.py b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-postmarkapp/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-postmarkapp/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..792ca62f03aab --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/configured_catalog.json @@ -0,0 +1,64 @@ +{ + "streams": [ + { + "stream": { + "name": "deliverystats", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["Name"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "message-streams", + "json_schema": {}, + "source_defined_primary_key": [["ID"]], + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "servers", + "json_schema": {}, + "source_defined_primary_key": [["ID"]], + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "messages", + "json_schema": {}, + "source_defined_primary_key": [["MessageID"]], + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "domains", + "json_schema": {}, + "source_defined_primary_key": [["ID"]], + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "bounces", + "json_schema": {}, + "source_defined_primary_key": [["ID"]], + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/expected_records.txt new file mode 100644 index 0000000000000..f54b79d2f917e --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/expected_records.txt @@ -0,0 +1,4 @@ +{"stream": "deliverystats", "data": {"Name": "All", "Count": 0}, "emitted_at": 1668012739230} +{"stream": "message-streams", "data": {"ID": "broadcast", "ServerID": 9708911, "Name": "Default Broadcast Stream", "Description": "Default stream used for sending broadcast messages", "MessageStreamType": "Broadcasts", "CreatedAt": "2022-11-01T12:48:26-04:00", "UpdatedAt": null, "ArchivedAt": null, "ExpectedPurgeDate": null, "SubscriptionManagementConfiguration": {"UnsubscribeHandlingType": "Postmark"}}, "emitted_at": 1668012739882} +{"stream": "message-streams", "data": {"ID": "inbound", "ServerID": 9708911, "Name": "Default Inbound Stream", "Description": "Default stream used for receiving inbound messages", "MessageStreamType": "Inbound", "CreatedAt": "2022-11-01T12:48:26-04:00", "UpdatedAt": null, "ArchivedAt": null, "ExpectedPurgeDate": null, "SubscriptionManagementConfiguration": {"UnsubscribeHandlingType": "None"}}, "emitted_at": 1668012739885} +{"stream": "message-streams", "data": {"ID": "outbound", "ServerID": 9708911, "Name": "Default Transactional Stream", "Description": "Default stream used for sending transactional messages", "MessageStreamType": "Transactional", "CreatedAt": "2022-11-01T12:48:26-04:00", "UpdatedAt": null, "ArchivedAt": null, "ExpectedPurgeDate": null, "SubscriptionManagementConfiguration": {"UnsubscribeHandlingType": "None"}}, "emitted_at": 1668012739886} diff --git a/airbyte-integrations/connectors/source-postmarkapp/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..4ae0ceadeae69 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "X-Postmark-Server-Token": "wrong_value" +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/sample_config.json new file mode 100644 index 0000000000000..3efed19b653ec --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "X-Postmark-Server-Token": "test8616-1337-44ee-te33-235467d79901b" +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/sample_state.json new file mode 100644 index 0000000000000..22f8fcf41fe91 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "message-streams": { + "ServerID": 12 + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/main.py b/airbyte-integrations/connectors/source-postmarkapp/main.py new file mode 100644 index 0000000000000..937988c5c3932 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_postmarkapp import SourcePostmarkapp + +if __name__ == "__main__": + source = SourcePostmarkapp() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-postmarkapp/requirements.txt b/airbyte-integrations/connectors/source-postmarkapp/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-postmarkapp/setup.py b/airbyte-integrations/connectors/source-postmarkapp/setup.py new file mode 100644 index 0000000000000..a29c3ebdad3d7 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.4", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_postmarkapp", + description="Source implementation for Postmarkapp.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/__init__.py b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/__init__.py new file mode 100644 index 0000000000000..0ecd9826b1a2b --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourcePostmarkapp + +__all__ = ["SourcePostmarkapp"] diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/postmarkapp.yaml b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/postmarkapp.yaml new file mode 100644 index 0000000000000..ef5426aeb8558 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/postmarkapp.yaml @@ -0,0 +1,158 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + selector_bounces: + extractor: + field_pointer: ["Bounces"] + selector_domains: + extractor: + field_pointer: ["Domains"] + selector_servers: + extractor: + field_pointer: ["Servers"] + selector_message_streams: + extractor: + field_pointer: ["MessageStreams"] + selector_messages: + extractor: + field_pointer: ["Messages"] + requester: + url_base: "https://api.postmarkapp.com" + http_method: "GET" + request_options_provider: + request_body_json: + Accept: "application/json" + request_parameters: + count: "500" + authenticator: + type: "ApiKeyAuthenticator" + header: "X-Postmark-Server-Token" + api_token: "{{ config['X-Postmark-Server-Token'] }}" + requester_account: + url_base: "https://api.postmarkapp.com" + http_method: "GET" + request_options_provider: + request_body_json: + Accept: "application/json" + request_parameters: + count: "500" + authenticator: + type: "ApiKeyAuthenticator" + header: "X-Postmark-Account-Token" + api_token: "{{ config['X-Postmark-Account-Token'] }}" + retriever_account: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + $options: + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "OffsetIncrement" + page_size: 500 + page_token_option: + inject_into: "request_parameter" + field_name: "offset" + page_size_option: + inject_into: "request_parameter" + field_name: "offset" + requester: + $ref: "*ref(definitions.requester_account)" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + $options: + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "OffsetIncrement" + page_size: 500 + page_token_option: + inject_into: "request_parameter" + field_name: "offset" + page_size_option: + inject_into: "request_parameter" + field_name: "offset" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + base_stream_account: + retriever: + $ref: "*ref(definitions.retriever_account)" + deliverystats: + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_bounces)" + $options: + name: "deliverystats" + primary_key: "Name" + path: "/deliverystats" + message-streams: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_message_streams)" + $options: + name: "message-streams" + primary_key: "ID" + path: "/message-streams" + bounces: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_bounces)" + $options: + name: "bounces" + primary_key: "ID" + path: "/bounces" + + servers: + $ref: "*ref(definitions.base_stream_account)" + retriever: + $ref: "*ref(definitions.retriever_account)" + record_selector: + $ref: "*ref(definitions.selector_servers)" + $options: + name: "servers" + primary_key: "ID" + path: "/servers" + messages: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector_messages)" + $options: + name: "messages" + primary_key: "MessageID" + path: "/messages/outbound" + domains: + $ref: "*ref(definitions.base_stream_account)" + $options: + name: "domains" + primary_key: "ID" + path: "/domains" + retriever: + $ref: "*ref(definitions.retriever_account)" + record_selector: + $ref: "*ref(definitions.selector_domains)" +streams: + - "*ref(definitions.deliverystats)" + - "*ref(definitions.message-streams)" + - "*ref(definitions.domains)" + - "*ref(definitions.messages)" + - "*ref(definitions.bounces)" + - "*ref(definitions.servers)" + +check: + stream_names: + - "message-streams" diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/bounces.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/bounces.json new file mode 100644 index 0000000000000..5e072c87c2ed2 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/bounces.json @@ -0,0 +1,57 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "ID": { + "type": "integer" + }, + "Type": { + "type": "string" + }, + "TypeCode": { + "type": "integer" + }, + "Name": { + "type": "string" + }, + "Tag": { + "type": "string" + }, + "MessageID": { + "type": "string" + }, + "ServerID": { + "type": "integer" + }, + "MessageStream": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "Details": { + "type": "string" + }, + "Email": { + "type": "string" + }, + "From": { + "type": "string" + }, + "BouncedAt": { + "type": "string" + }, + "DumpAvailable": { + "type": "boolean" + }, + "Inactive": { + "type": "boolean" + }, + "CanActivate": { + "type": "boolean" + }, + "Subject": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/deliverystats.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/deliverystats.json new file mode 100644 index 0000000000000..627bfbc67816b --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/deliverystats.json @@ -0,0 +1,12 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "Name": { + "type": "string" + }, + "Count": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/domains.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/domains.json new file mode 100644 index 0000000000000..8a956f39635a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/domains.json @@ -0,0 +1,24 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "Name": { + "type": "string" + }, + "SPFVerified": { + "type": "boolean" + }, + "DKIMVerified": { + "type": "boolean" + }, + "WeakDKIM": { + "type": "boolean" + }, + "ReturnPathDomainVerified": { + "type": "boolean" + }, + "ID": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/message-streams.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/message-streams.json new file mode 100644 index 0000000000000..48cff6d9a4999 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/message-streams.json @@ -0,0 +1,41 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "ID": { + "type": "string" + }, + "ServerID": { + "type": "integer" + }, + "Name": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "MessageStreamType": { + "type": "string" + }, + "CreatedAt": { + "type": "string" + }, + "UpdatedAt": { + "type": "null" + }, + "ArchivedAt": { + "type": "null" + }, + "ExpectedPurgeDate": { + "type": "null" + }, + "SubscriptionManagementConfiguration": { + "type": "object", + "properties": { + "UnsubscribeHandlingType": { + "type": "string" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/messages.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/messages.json new file mode 100644 index 0000000000000..5583a378f950f --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/messages.json @@ -0,0 +1,79 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "Tag": { + "type": "string" + }, + "MessageID": { + "type": "string" + }, + "MessageStream": { + "type": "string" + }, + "To": { + "type": "array", + "items": { + "type": "object", + "properties": { + "Email": { + "type": "string" + }, + "Name": { + "type": "null" + } + } + } + }, + "Cc": { + "type": "array", + "items": {} + }, + "Bcc": { + "type": "array", + "items": {} + }, + "Recipients": { + "type": "array", + "items": { + "type": "string" + } + }, + "ReceivedAt": { + "type": "string" + }, + "From": { + "type": "string" + }, + "Subject": { + "type": "string" + }, + "Attachments": { + "type": "array", + "items": {} + }, + "Status": { + "type": "string" + }, + "TrackOpens": { + "type": "boolean" + }, + "TrackLinks": { + "type": "string" + }, + "Metadata": { + "type": "object", + "properties": { + "color": { + "type": "string" + }, + "client-id": { + "type": "string" + } + } + }, + "Sandboxed": { + "type": "boolean" + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/outbound-stats.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/outbound-stats.json new file mode 100644 index 0000000000000..2d597f73e2782 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/outbound-stats.json @@ -0,0 +1,57 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "Sent": { + "type": "integer" + }, + "Bounced": { + "type": "integer" + }, + "SMTPApiErrors": { + "type": "integer" + }, + "BounceRate": { + "type": "number" + }, + "SpamComplaints": { + "type": "integer" + }, + "SpamComplaintsRate": { + "type": "number" + }, + "Tracked": { + "type": "integer" + }, + "Opens": { + "type": "integer" + }, + "UniqueOpens": { + "type": "integer" + }, + "TotalClicks": { + "type": "integer" + }, + "UniqueLinksClicked": { + "type": "integer" + }, + "WithClientRecorded": { + "type": "integer" + }, + "WithPlatformRecorded": { + "type": "integer" + }, + "WithReadTimeRecorded": { + "type": "integer" + }, + "WithLinkTracking": { + "type": "integer" + }, + "WithOpenTracking": { + "type": "integer" + }, + "TotalTrackedLinksSent": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/server.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/server.json new file mode 100644 index 0000000000000..5e9cd760674a7 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/server.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "ID": { + "type": "integer" + }, + "Name": { + "type": "string" + }, + "ApiTokens": { + "type": "array", + "items": { + "type": "string" + } + }, + "Color": { + "type": "string" + }, + "SmtpApiActivated": { + "type": "boolean" + }, + "RawEmailEnabled": { + "type": "boolean" + }, + "DeliveryType": { + "type": "string" + }, + "ServerLink": { + "type": "string" + }, + "InboundAddress": { + "type": "string" + }, + "InboundHookUrl": { + "type": "string" + }, + "BounceHookUrl": { + "type": "string" + }, + "OpenHookUrl": { + "type": "string" + }, + "DeliveryHookUrl": { + "type": "string" + }, + "PostFirstOpenOnly": { + "type": "boolean" + }, + "InboundDomain": { + "type": "string" + }, + "InboundHash": { + "type": "string" + }, + "InboundSpamThreshold": { + "type": "integer" + }, + "TrackOpens": { + "type": "boolean" + }, + "TrackLinks": { + "type": "string" + }, + "IncludeBounceContentInHook": { + "type": "boolean" + }, + "ClickHookUrl": { + "type": "string" + }, + "EnableSmtpApiErrorHooks": { + "type": "boolean" + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/servers.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/servers.json new file mode 100644 index 0000000000000..5e9cd760674a7 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/servers.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "ID": { + "type": "integer" + }, + "Name": { + "type": "string" + }, + "ApiTokens": { + "type": "array", + "items": { + "type": "string" + } + }, + "Color": { + "type": "string" + }, + "SmtpApiActivated": { + "type": "boolean" + }, + "RawEmailEnabled": { + "type": "boolean" + }, + "DeliveryType": { + "type": "string" + }, + "ServerLink": { + "type": "string" + }, + "InboundAddress": { + "type": "string" + }, + "InboundHookUrl": { + "type": "string" + }, + "BounceHookUrl": { + "type": "string" + }, + "OpenHookUrl": { + "type": "string" + }, + "DeliveryHookUrl": { + "type": "string" + }, + "PostFirstOpenOnly": { + "type": "boolean" + }, + "InboundDomain": { + "type": "string" + }, + "InboundHash": { + "type": "string" + }, + "InboundSpamThreshold": { + "type": "integer" + }, + "TrackOpens": { + "type": "boolean" + }, + "TrackLinks": { + "type": "string" + }, + "IncludeBounceContentInHook": { + "type": "boolean" + }, + "ClickHookUrl": { + "type": "string" + }, + "EnableSmtpApiErrorHooks": { + "type": "boolean" + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/templates.json b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/templates.json new file mode 100644 index 0000000000000..ce2e437b1c0da --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/schemas/templates.json @@ -0,0 +1,32 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "TotalCount": { + "type": "integer" + }, + "Templates": { + "type": "object", + "properties": { + "Active": { + "type": "boolean" + }, + "TemplateId": { + "type": "integer" + }, + "Name": { + "type": "string" + }, + "Alias": { + "type": "string" + }, + "TemplateType": { + "type": "string" + }, + "LayoutTemplate": { + "type": "string" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/source.py b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/source.py new file mode 100644 index 0000000000000..1b110964ac1db --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourcePostmarkapp(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "postmarkapp.yaml"}) diff --git a/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/spec.yaml b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/spec.yaml new file mode 100644 index 0000000000000..969662f1e1442 --- /dev/null +++ b/airbyte-integrations/connectors/source-postmarkapp/source_postmarkapp/spec.yaml @@ -0,0 +1,20 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Postmarkapp Spec + type: object + required: + - X-Postmark-Server-Token + - X-Postmark-Account-Token + additionalProperties: true + properties: + X-Postmark-Server-Token: + title: X-Postmark-Server-Token + type: string + description: API Key for server + airbyte_secret: true + X-Postmark-Account-Token: + title: X-Postmark-Account-Token + type: string + description: API Key for account + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-prestashop/Dockerfile b/airbyte-integrations/connectors/source-prestashop/Dockerfile index 4059ce343928c..7c96e805671c2 100644 --- a/airbyte-integrations/connectors/source-prestashop/Dockerfile +++ b/airbyte-integrations/connectors/source-prestashop/Dockerfile @@ -12,5 +12,5 @@ RUN pip install . ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.0 +LABEL io.airbyte.version=0.3.0 LABEL io.airbyte.name=airbyte/source-prestashop diff --git a/airbyte-integrations/connectors/source-prestashop/acceptance-test-config.yml b/airbyte-integrations/connectors/source-prestashop/acceptance-test-config.yml index b6a4efe2fbf58..bb4b0f42d7ab8 100644 --- a/airbyte-integrations/connectors/source-prestashop/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-prestashop/acceptance-test-config.yml @@ -1,25 +1,39 @@ # See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-prestashop:dev -tests: +acceptance_tests: spec: - - spec_path: "source_prestashop/spec.json" - # unfortunately timeout plugin takes into account setup code as well (docker setup) - timeout_seconds: 300 + tests: + - spec_path: "source_prestashop/spec.yaml" + # unfortunately timeout plugin takes into account setup code as well (docker setup) + timeout_seconds: 300 + backward_compatibility_tests_config: + disable_for_version: "0.2.0" connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" discovery: - - config_path: "secrets/config.json" + tests: + - config_path: "secrets/config.json" basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + expect_records: + path: "integration_tests/expected_records.txt" + extra_fields: no + exact_order: no + extra_records: yes incremental: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/future_state.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state: + future_state_path: "integration_tests/abnormal_state.json" full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-prestashop/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-prestashop/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..b86e0ee4b5364 --- /dev/null +++ b/airbyte-integrations/connectors/source-prestashop/integration_tests/abnormal_state.json @@ -0,0 +1,255 @@ +[ + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "addresses" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "cart_rules" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "carts" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "categories" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "configurations" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "customer_messages" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "customers" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "customer_threads" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "groups" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "manufacturers" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "messages" + }, + "stream_state": { + "date_add": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "order_carriers" + }, + "stream_state": { + "date_add": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "order_histories" + }, + "stream_state": { + "date_add": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "order_invoices" + }, + "stream_state": { + "date_add": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "order_payments" + }, + "stream_state": { + "date_add": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "orders" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "order_slip" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "products" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "stock_movement_reasons" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "stock_movements" + }, + "stream_state": { + "date_add": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "stores" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "suppliers" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "tax_rule_groups" + }, + "stream_state": { + "date_upd": "2121-06-16 14:13:26" + } + } + } +] diff --git a/airbyte-integrations/connectors/source-prestashop/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-prestashop/integration_tests/acceptance.py index 1410925e0bc1a..2290f4a170fc6 100644 --- a/airbyte-integrations/connectors/source-prestashop/integration_tests/acceptance.py +++ b/airbyte-integrations/connectors/source-prestashop/integration_tests/acceptance.py @@ -21,7 +21,12 @@ def create_config_fixture(): secrets_path.mkdir(exist_ok=True) config_filename = str(secrets_path / "config.json") - config = {"url": "http://localhost:8080", "access_key": "59662QEPFNCJ3KFL3VCT5VNQ4NHVUF4Y", "_allow_http": True} + config = { + "url": "http://localhost:8080", + "_allow_http": True, + "access_key": "59662QEPFNCJ3KFL3VCT5VNQ4NHVUF4Y", + "start_date": "2021-05-25", + } with open(config_filename, "w+") as fp: json.dump(obj=config, fp=fp) diff --git a/airbyte-integrations/connectors/source-prestashop/integration_tests/catalog.json b/airbyte-integrations/connectors/source-prestashop/integration_tests/catalog.json deleted file mode 100644 index a4b450674fb9c..0000000000000 --- a/airbyte-integrations/connectors/source-prestashop/integration_tests/catalog.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "addresses", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": "integer" - }, - "id_customer": { - "type": ["null", "string"] - }, - "id_manufacturer": { - "type": ["null", "string"] - }, - "id_supplier": { - "type": ["null", "string"] - }, - "id_warehouse": { - "type": ["null", "string"] - }, - "id_country": { - "type": "string" - }, - "id_state": { - "type": ["null", "string"] - }, - "alias": { - "type": "string" - }, - "company": { - "type": ["null", "string"] - }, - "lastname": { - "type": "string" - }, - "firstname": { - "type": "string" - }, - "vat_number": { - "type": ["null", "string"] - }, - "address1": { - "type": "string" - }, - "address2": { - "type": ["null", "string"] - }, - "postcode": { - "type": ["null", "string"] - }, - "city": { - "type": "string" - }, - "other": { - "type": ["null", "string"] - }, - "phone": { - "type": ["null", "string"] - }, - "phone_mobile": { - "type": ["null", "string"] - }, - "dni": { - "type": ["null", "string"] - }, - "deleted": { - "type": "string" - }, - "date_add": { - "type": "string", - "format": "date-time" - }, - "date_upd": { - "type": "string", - "format": "date-time" - } - } - }, - "supported_sync_modes": ["incremental"], - "source_defined_cursor": true, - "default_cursor_field": ["date_upd"] - }, - "sync_mode": "incremental", - "cursor_field": ["date_upd"], - "destination_sync_mode": "append" - } - ] -} diff --git a/airbyte-integrations/connectors/source-prestashop/integration_tests/dump.sql.gz b/airbyte-integrations/connectors/source-prestashop/integration_tests/dump.sql.gz index 7347898371ada..5a0fbd79b7a28 100644 Binary files a/airbyte-integrations/connectors/source-prestashop/integration_tests/dump.sql.gz and b/airbyte-integrations/connectors/source-prestashop/integration_tests/dump.sql.gz differ diff --git a/airbyte-integrations/connectors/source-prestashop/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-prestashop/integration_tests/expected_records.txt new file mode 100644 index 0000000000000..bf44ef8554cf0 --- /dev/null +++ b/airbyte-integrations/connectors/source-prestashop/integration_tests/expected_records.txt @@ -0,0 +1,56 @@ +{"stream": "addresses", "data": {"id": 1, "id_customer": "1", "id_manufacturer": "0", "id_supplier": "0", "id_warehouse": "0", "id_country": "21", "id_state": "0", "alias": "Anonymous", "company": "Anonymous", "lastname": "Anonymous", "firstname": "Anonymous", "vat_number": "0000", "address1": "Anonymous", "address2": "", "postcode": "00000", "city": "Anonymous", "other": "", "phone": "0000000000", "phone_mobile": "0000000000", "dni": "0000", "deleted": "0", "date_add": "2021-07-23 23:18:07", "date_upd": "2021-07-23 23:18:07"}, "emitted_at": 1667903102665} +{"stream": "carriers", "data": {"id": 1, "deleted": "0", "is_module": "0", "id_tax_rules_group": "1", "id_reference": "1", "name": "Airbyte", "active": "1", "is_free": "1", "url": "", "shipping_handling": "0", "shipping_external": "0", "range_behavior": "0", "shipping_method": 1, "max_width": "0", "max_height": "0", "max_depth": "0", "max_weight": "0.000000", "grade": "0", "external_module_name": "", "need_range": "0", "position": "0", "delay": "Pick up in-store"}, "emitted_at": 1667903102711} +{"stream": "cart_rules", "data": {"id": 1, "id_customer": "0", "date_from": "2021-07-24 20:00:00", "date_to": "2021-08-24 20:00:00", "description": "Some test descriptions for cart rule", "quantity": "4", "quantity_per_user": "10", "priority": "1", "partial_use": "1", "code": "E4639C5C", "minimum_amount": "100.000000", "minimum_amount_tax": "0", "minimum_amount_currency": "1", "minimum_amount_shipping": "0", "country_restriction": "0", "carrier_restriction": "0", "group_restriction": "0", "cart_rule_restriction": "0", "product_restriction": "1", "shop_restriction": "0", "free_shipping": "1", "reduction_percent": "15.00", "reduction_amount": "0.000000", "reduction_tax": "0", "reduction_currency": "1", "reduction_product": "-1", "reduction_exclude_special": "1", "gift_product": "13", "gift_product_attribute": "0", "highlight": "1", "active": "1", "date_add": "2021-07-24 20:31:17", "date_upd": "2021-07-24 20:31:17", "name": "This is cart rule"}, "emitted_at": 1667903102754} +{"stream": "carts", "data": {"id": 1, "id_address_delivery": "5", "id_address_invoice": "5", "id_currency": "1", "id_customer": "2", "id_guest": "1", "id_lang": "1", "id_shop_group": "1", "id_shop": "1", "id_carrier": "2", "recyclable": "0", "gift": "0", "gift_message": "", "mobile_theme": "0", "delivery_option": "{\"3\":\"2,\"}", "secure_key": "b44a6d9efd7a0076a0fbce6b15eaf3b1", "allow_seperated_package": "0", "date_add": "2021-07-23 23:18:42", "date_upd": "2021-07-23 23:18:42", "associations": {"cart_rows": [{"id_product": "1", "id_product_attribute": "1", "id_address_delivery": "3", "id_customization": "0", "quantity": "1"}, {"id_product": "2", "id_product_attribute": "9", "id_address_delivery": "3", "id_customization": "0", "quantity": "1"}]}}, "emitted_at": 1667903102798} +{"stream": "categories", "data": {"id": 1, "id_parent": "0", "level_depth": "0", "nb_products_recursive": "19", "active": "1", "id_shop_default": "1", "is_root_category": "0", "position": "0", "date_add": "2021-07-23 23:17:27", "date_upd": "2021-07-23 23:17:27", "name": "Root", "link_rewrite": "root", "description": "", "meta_title": "", "meta_description": "", "meta_keywords": "", "associations": {"categories": [{"id": "2"}]}}, "emitted_at": 1667903102866} +{"stream": "combinations", "data": {"id": 1, "id_product": "1", "location": "", "ean13": "", "isbn": "", "upc": "", "mpn": "", "quantity": "300", "reference": "demo_1", "supplier_reference": "", "wholesale_price": "0.000000", "price": "0.000000", "ecotax": "0.000000", "weight": "0.000000", "unit_price_impact": "0.000000", "minimal_quantity": "1", "low_stock_threshold": null, "low_stock_alert": "0", "default_on": "1", "available_date": "2022-01-01", "associations": {"product_option_values": [{"id": "1"}, {"id": "8"}], "images": [{"id": "2"}]}}, "emitted_at": 1667903102939} +{"stream": "configurations", "data": {"id": 354, "value": "1", "name": "GF_INSTALL_CALC", "id_shop_group": "", "id_shop": "", "date_add": "2021-07-23 23:18:07", "date_upd": "2021-07-23 18:07:10"}, "emitted_at": 1667903102994} +{"stream": "contacts", "data": {"id": 1, "email": "integration-test@airbyte.io", "customer_service": "1", "name": "Webmaster", "description": "If a technical problem occurs on this website"}, "emitted_at": 1667903103626} +{"stream": "content_management_system", "data": {"id": 1, "id_cms_category": "1", "position": "0", "indexation": "0", "active": "1", "meta_description": "Our terms and conditions of delivery", "meta_keywords": "conditions, delivery, delay, shipment, pack", "meta_title": "Delivery", "head_seo_title": "", "link_rewrite": "delivery", "content": "

    Shipments and returns

    Your pack shipment

    Packages are generally dispatched within 2 days after receipt of payment and are shipped via UPS with tracking and drop-off without signature. If you prefer delivery by UPS Extra with required signature, an additional cost will be applied, so please contact us before choosing this method. Whichever shipment choice you make, we will provide you with a link to track your package online.

    Shipping fees include handling and packing fees as well as postage costs. Handling fees are fixed, whereas transport fees vary according to total weight of the shipment. We advise you to group your items in one order. We cannot group two distinct orders placed separately, and shipping fees will apply to each of them. Your package will be dispatched at your own risk, but special care is taken to protect fragile objects.

    Boxes are amply sized and your items are well-protected.

    "}, "emitted_at": 1667903103655} +{"stream": "countries", "data": {"id": 1, "id_zone": "1", "id_currency": "0", "call_prefix": "49", "iso_code": "DE", "active": "0", "contains_states": "0", "need_identification_number": "0", "need_zip_code": "1", "zip_code_format": "NNNNN", "display_tax_label": "1", "name": "Germany"}, "emitted_at": 1667903103703} +{"stream": "currencies", "data": {"id": 1, "names": "US Dollar", "name": "US Dollar", "symbol": "$", "iso_code": "USD", "numeric_iso_code": "840", "precision": "2", "conversion_rate": "1.000000", "deleted": "0", "active": "1", "unofficial": "0", "modified": "0", "pattern": ""}, "emitted_at": 1667903103948} +{"stream": "customer_messages", "data": {"id": 1, "id_employee": "1", "id_customer_thread": "1", "ip_address": "100.1.1.0", "message": "Hello, how are you? It is me Eugene.", "file_name": null, "user_agent": "SOme user Agent2", "private": "0", "date_add": "2020-07-25 02:42:14", "date_upd": "2021-07-02 02:42:22", "read": "1"}, "emitted_at": 1667903103972} +{"stream": "customer_threads", "data": {"id": 1, "id_lang": "1", "id_shop": "1", "id_customer": "1", "id_order": "1", "id_product": "1", "id_contact": "1", "email": "some@mail.com", "token": null, "status": "open", "date_add": "2021-02-25 02:39:00", "date_upd": "2021-06-25 02:39:07", "associations": {"customer_messages": [{"id": "1"}, {"id": "2"}]}}, "emitted_at": 1667903104001} +{"stream": "customers", "data": {"id": 1, "id_default_group": "3", "id_lang": "1", "newsletter_date_add": "0000-00-00 00:00:00", "ip_registration_newsletter": "", "last_passwd_gen": "2021-07-23 17:18:07", "secure_key": "490f551646c7f55281a5be6e9d70f121", "deleted": "0", "passwd": "prestashop", "lastname": "Anonymous", "firstname": "Anonymous", "email": "anonymous@psgdpr.com", "id_gender": "1", "birthday": "0000-00-00", "newsletter": "0", "optin": "1", "website": "", "company": "", "siret": "", "ape": "", "outstanding_allow_amount": "0.000000", "show_public_prices": "0", "id_risk": "0", "max_payment_days": "0", "active": "0", "note": "", "is_guest": "0", "id_shop": "1", "id_shop_group": "1", "date_add": "2021-07-23 23:18:07", "date_upd": "2021-07-23 23:18:07", "reset_password_token": "", "reset_password_validity": "2025-01-01 10:30:00", "associations": {"groups": [{"id": "3"}]}}, "emitted_at": 1667903104030} +{"stream": "deliveries", "data": {"id": 1, "id_carrier": "2", "id_range_price": "0", "id_range_weight": "1", "id_zone": "1", "id_shop": "", "id_shop_group": "", "price": "5.000000"}, "emitted_at": 1667903104066} +{"stream": "employees", "data": {"id": 1, "id_lang": "1", "last_passwd_gen": "2021-07-23 17:17:28", "stats_date_from": "2021-06-23", "stats_date_to": "2021-07-23", "stats_compare_from": "2021-06-23", "stats_compare_to": "2021-07-23", "passwd": "$2y$10$BOkpSr6te9uvu97I05XI3u8hMMowukHd0PWNTR6qjTcA/YOlYc2ja", "lastname": "Kulak", "firstname": "Eugene", "email": "integration-test@airbyte.io", "active": "1", "id_profile": "1", "bo_color": null, "default_tab": "1", "bo_theme": "default", "bo_css": "theme.css", "bo_width": "0", "bo_menu": "1", "stats_compare_option": "1", "preselect_date_range": null, "id_last_order": "0", "id_last_customer_message": "0", "id_last_customer": "0", "reset_password_token": null, "reset_password_validity": "2025-01-01 10:30:00"}, "emitted_at": 1667903104106} +{"stream": "groups", "data": {"id": 1, "reduction": "0.00", "price_display_method": "1", "show_prices": "1", "date_add": "2021-07-23 23:17:27", "date_upd": "2021-07-23 23:17:28", "name": "Visitor"}, "emitted_at": 1667903104146} +{"stream": "guests", "data": {"id": 2, "id_customer": "0", "id_operating_system": "0", "id_web_browser": "0", "javascript": "0", "screen_resolution_x": "0", "screen_resolution_y": "0", "screen_color": "0", "sun_java": "0", "adobe_flash": "0", "adobe_director": "0", "apple_quicktime": "0", "real_player": "0", "windows_media": "0", "accept_language": "", "mobile_theme": "0"}, "emitted_at": 1667903104191} +{"stream": "image_types", "data": {"id": 1, "name": "cart_default", "width": "125", "height": "125", "categories": "0", "products": "1", "manufacturers": "0", "suppliers": "0", "stores": "0"}, "emitted_at": 1667903104232} +{"stream": "languages", "data": {"id": 1, "name": "English (English)", "iso_code": "en", "locale": "en-US", "language_code": "en-us", "active": "1", "is_rtl": "0", "date_format_lite": "m/d/Y", "date_format_full": "m/d/Y H:i:s"}, "emitted_at": 1667903104272} +{"stream": "manufacturers", "data": {"id": 1, "active": "1", "link_rewrite": "studio-design", "name": "Studio Design", "date_add": "2021-07-23 23:18:41", "date_upd": "2021-07-23 23:18:41", "description": "

    Studio Design offers a range of items from ready-to-wear collections to contemporary objects. The brand has been presenting new ideas and trends since its creation in 2012.

    ", "short_description": "", "meta_title": "", "meta_description": "", "meta_keywords": "", "associations": {"addresses": [{"id": "4"}]}}, "emitted_at": 1667903104313} +{"stream": "messages", "data": {"id": 1, "id_cart": "1", "id_order": "1", "id_customer": "2", "id_employee": "1", "message": "This message is about order ", "private": "1", "date_add": "2021-06-25 04:20:13"}, "emitted_at": 1667903104358} +{"stream": "order_carriers", "data": {"id": 1, "id_order": "1", "id_carrier": "2", "id_order_invoice": "0", "weight": "0.000000", "shipping_cost_tax_excl": "2.000000", "shipping_cost_tax_incl": "2.000000", "tracking_number": "", "date_add": "2021-07-23 23:18:42"}, "emitted_at": 1667903104401} +{"stream": "order_details", "data": {"id": 1, "id_order": "1", "product_id": "1", "product_attribute_id": "1", "product_quantity_reinjected": "0", "group_reduction": "0.00", "discount_quantity_applied": "0", "download_hash": "", "download_deadline": "0000-00-00 00:00:00", "id_order_invoice": "0", "id_warehouse": "0", "id_shop": "1", "id_customization": "0", "product_name": "Hummingbird printed t-shirt - Color : White, Size : S", "product_quantity": "1", "product_quantity_in_stock": "1", "product_quantity_return": "0", "product_quantity_refunded": "0", "product_price": "23.900000", "reduction_percent": "0.00", "reduction_amount": "0.000000", "reduction_amount_tax_incl": "0.000000", "reduction_amount_tax_excl": "0.000000", "product_quantity_discount": "0.000000", "product_ean13": "", "product_isbn": "", "product_upc": "", "product_mpn": "", "product_reference": "demo_1", "product_supplier_reference": "", "product_weight": "0.000000", "tax_computation_method": "0", "id_tax_rules_group": "0", "ecotax": "0.000000", "ecotax_tax_rate": "0.000", "download_nb": "0", "unit_price_tax_incl": "23.900000", "unit_price_tax_excl": "23.900000", "total_price_tax_incl": "23.900000", "total_price_tax_excl": "23.900000", "total_shipping_price_tax_excl": "0.000000", "total_shipping_price_tax_incl": "0.000000", "purchase_supplier_price": "0.000000", "original_product_price": "23.900000", "original_wholesale_price": "0.000000", "total_refunded_tax_excl": "0.000000", "total_refunded_tax_incl": "0.000000"}, "emitted_at": 1667903104450} +{"stream": "order_histories", "data": {"id": 1, "id_employee": "0", "id_order_state": "1", "id_order": "1", "date_add": "2021-07-23 23:18:42"}, "emitted_at": 1667903104490} +{"stream": "order_invoices", "data": {"id": 1, "id_order": "1", "number": "1000", "delivery_number": "1", "delivery_date": "2021-08-25 03:08:28", "total_discount_tax_excl": "23.000000", "total_discount_tax_incl": "25.000000", "total_paid_tax_excl": "11.000000", "total_paid_tax_incl": "33.000000", "total_products": "1.000000", "total_products_wt": "0.000000", "total_shipping_tax_excl": "0.000000", "total_shipping_tax_incl": "0.000000", "shipping_tax_computation_method": "1", "total_wrapping_tax_excl": "0.000000", "total_wrapping_tax_incl": "0.000000", "shop_address": "Test address", "note": "some note", "date_add": "2021-05-25 03:09:23"}, "emitted_at": 1667903104525} +{"stream": "order_payments", "data": {"id": 1, "order_reference": "1000", "id_currency": "1", "amount": "30.000000", "payment_method": "1", "conversion_rate": "1.000000", "transaction_id": "1", "card_number": "12343243432434", "card_brand": "Visa", "card_expiration": "10-22", "card_holder": "Customer", "date_add": "2021-07-09 04:22:37"}, "emitted_at": 1667903104553} +{"stream": "order_slip", "data": {"id": 1, "id_customer": "1", "id_order": "1", "conversion_rate": "1.000000", "total_products_tax_excl": "30.000000", "total_products_tax_incl": "20.000000", "total_shipping_tax_excl": "11.000000", "total_shipping_tax_incl": "12.000000", "amount": "10.000000", "shipping_cost": "0", "shipping_cost_amount": "15.000000", "partial": "1", "date_add": "2021-05-25 04:25:48", "date_upd": "2021-06-25 04:25:53", "order_slip_type": "0"}, "emitted_at": 1667903104580} +{"stream": "order_states", "data": {"id": 2, "unremovable": "1", "delivery": "0", "hidden": "0", "send_email": "1", "module_name": "", "invoice": "1", "color": "#3498D8", "logable": "1", "shipped": "0", "paid": "1", "pdf_delivery": "0", "pdf_invoice": "1", "deleted": "0", "name": "Payment accepted", "template": "payment"}, "emitted_at": 1667903104608} +{"stream": "orders", "data": {"id": 1, "id_address_delivery": "5", "id_address_invoice": "5", "id_cart": "1", "id_currency": "1", "id_lang": "1", "id_customer": "2", "id_carrier": "2", "current_state": "6", "module": "ps_checkpayment", "invoice_number": "0", "invoice_date": "0000-00-00 00:00:00", "delivery_number": "0", "delivery_date": "0000-00-00 00:00:00", "valid": "0", "date_add": "2021-07-23 23:18:42", "date_upd": "2021-07-23 23:18:42", "shipping_number": "", "id_shop_group": "1", "id_shop": "1", "secure_key": "b44a6d9efd7a0076a0fbce6b15eaf3b1", "payment": "Payment by check", "recyclable": "0", "gift": "0", "gift_message": "", "mobile_theme": "0", "total_discounts": "0.000000", "total_discounts_tax_incl": "0.000000", "total_discounts_tax_excl": "0.000000", "total_paid": "61.800000", "total_paid_tax_incl": "61.800000", "total_paid_tax_excl": "61.800000", "total_paid_real": "0.000000", "total_products": "59.800000", "total_products_wt": "59.800000", "total_shipping": "2.000000", "total_shipping_tax_incl": "2.000000", "total_shipping_tax_excl": "2.000000", "carrier_tax_rate": "0.000", "total_wrapping": "0.000000", "total_wrapping_tax_incl": "0.000000", "total_wrapping_tax_excl": "0.000000", "round_mode": "0", "round_type": "0", "conversion_rate": "1.000000", "reference": "XKBKNABJK", "associations": {"order_rows": [{"id": "1", "product_id": "1", "product_attribute_id": "1", "product_quantity": "1", "product_name": "Hummingbird printed t-shirt - Color : White, Size : S", "product_reference": "demo_1", "product_ean13": "", "product_isbn": "", "product_upc": "", "product_price": "23.900000", "id_customization": "0", "unit_price_tax_incl": "23.900000", "unit_price_tax_excl": "23.900000"}, {"id": "2", "product_id": "2", "product_attribute_id": "9", "product_quantity": "1", "product_name": "Hummingbird printed sweater - Color : White, Size : S", "product_reference": "demo_3", "product_ean13": "", "product_isbn": "", "product_upc": "", "product_price": "35.900000", "id_customization": "0", "unit_price_tax_incl": "35.900000", "unit_price_tax_excl": "35.900000"}]}}, "emitted_at": 1667903104644} +{"stream": "price_ranges", "data": {"id": 1, "id_carrier": "2", "delimiter1": "0.000000", "delimiter2": "10000.000000"}, "emitted_at": 1667903104689} +{"stream": "product_customization_fields", "data": {"id": 1, "id_product": "19", "type": "1", "required": "1", "is_module": "0", "is_deleted": "0", "name": "Type your text here"}, "emitted_at": 1667903104729} +{"stream": "product_feature_values", "data": {"id": 1, "id_feature": "1", "custom": "0", "value": "Polyester"}, "emitted_at": 1667903104770} +{"stream": "product_features", "data": {"id": 1, "position": "0", "name": "Composition"}, "emitted_at": 1667903104811} +{"stream": "product_option_values", "data": {"id": 1, "id_attribute_group": "1", "color": "", "position": "0", "name": "S"}, "emitted_at": 1667903104852} +{"stream": "product_options", "data": {"id": 1, "is_color_group": "0", "group_type": "select", "position": "0", "name": "Size", "public_name": "Size", "associations": {"product_option_values": [{"id": "1"}, {"id": "2"}, {"id": "3"}, {"id": "4"}]}}, "emitted_at": 1667903104883} +{"stream": "product_suppliers", "data": {"id": 1, "id_product": "19", "id_product_attribute": "0", "id_supplier": "1", "id_currency": "1", "product_supplier_reference": "", "product_supplier_price_te": "0.000000"}, "emitted_at": 1667903104920} +{"stream": "products", "data": {"id": 1, "id_manufacturer": "1", "id_supplier": "0", "id_category_default": "4", "new": null, "cache_default_attribute": "1", "id_default_image": "1", "id_default_combination": "1", "id_tax_rules_group": "9", "position_in_category": "1", "manufacturer_name": "Studio Design", "quantity": "0", "type": "simple", "id_shop_default": "1", "reference": "demo_1", "supplier_reference": "", "location": "", "width": "0.000000", "height": "0.000000", "depth": "0.000000", "weight": "0.300000", "quantity_discount": "0", "ean13": "", "isbn": "", "upc": "", "mpn": "", "cache_is_pack": "0", "cache_has_attachments": "0", "is_virtual": "0", "state": "1", "additional_delivery_times": "1", "delivery_in_stock": "", "delivery_out_stock": "", "on_sale": "0", "online_only": "0", "ecotax": "0.000000", "minimal_quantity": "1", "low_stock_threshold": null, "low_stock_alert": "0", "price": "23.900000", "wholesale_price": "0.000000", "unity": "", "unit_price_ratio": "0.000000", "additional_shipping_cost": "0.000000", "customizable": "0", "text_fields": "0", "uploadable_files": "0", "active": "1", "redirect_type": "301-category", "id_type_redirected": "0", "available_for_order": "1", "available_date": "0000-00-00", "show_condition": "0", "condition": "new", "show_price": "1", "indexed": "1", "visibility": "both", "advanced_stock_management": "0", "date_add": "2021-07-23 23:18:42", "date_upd": "2021-07-23 23:18:42", "pack_stock_type": "3", "meta_description": "", "meta_keywords": "", "meta_title": "", "link_rewrite": "hummingbird-printed-t-shirt", "name": "Hummingbird printed t-shirt", "description": "

    Symbol of lightness and delicacy, the hummingbird evokes curiosity and joy. Studio Design' PolyFaune collection features classic products with colorful patterns, inspired by the traditional japanese origamis. To wear with a chino or jeans. The sublimation textile printing process provides an exceptional color rendering and a color, guaranteed overtime.

    ", "description_short": "

    Regular fit, round neckline, short sleeves. Made of extra long staple pima cotton.

    \r\n

    ", "available_now": "", "available_later": "", "associations": {"categories": [{"id": "2"}, {"id": "3"}, {"id": "4"}], "images": [{"id": "1"}, {"id": "2"}], "combinations": [{"id": "1"}, {"id": "2"}, {"id": "3"}, {"id": "4"}, {"id": "5"}, {"id": "6"}, {"id": "7"}, {"id": "8"}], "product_option_values": [{"id": "1"}, {"id": "8"}, {"id": "11"}, {"id": "2"}, {"id": "3"}, {"id": "4"}], "product_features": [{"id": "1", "id_feature_value": "4"}, {"id": "2", "id_feature_value": "8"}], "stock_availables": [{"id": "1", "id_product_attribute": "0"}, {"id": "20", "id_product_attribute": "1"}, {"id": "21", "id_product_attribute": "2"}, {"id": "22", "id_product_attribute": "3"}, {"id": "23", "id_product_attribute": "4"}, {"id": "24", "id_product_attribute": "5"}, {"id": "25", "id_product_attribute": "6"}, {"id": "26", "id_product_attribute": "7"}, {"id": "27", "id_product_attribute": "8"}]}}, "emitted_at": 1667903105015} +{"stream": "shop_groups", "data": {"id": 1, "name": "Default", "share_customer": "0", "share_order": "0", "share_stock": "0", "active": "1", "deleted": "0"}, "emitted_at": 1667903105082} +{"stream": "shop_urls", "data": {"id": 1, "id_shop": "1", "active": "1", "main": "1", "domain": "localhost:8080", "domain_ssl": "localhost:8080", "physical_uri": "/", "virtual_uri": ""}, "emitted_at": 1667903105120} +{"stream": "shops", "data": {"id": 1, "id_shop_group": "1", "id_category": "2", "active": "1", "deleted": "0", "name": "Airbyte", "theme_name": "classic"}, "emitted_at": 1667903105158} +{"stream": "specific_price_rules", "data": {"id": 2, "id_shop": "1", "id_country": "0", "id_currency": "0", "id_group": "0", "name": "Test price rule 2", "from_quantity": "1", "price": "-1.000000", "reduction": "1.000000", "reduction_tax": "0", "reduction_type": "amount", "from": "0000-00-00 00:00:00", "to": "0000-00-00 00:00:00"}, "emitted_at": 1667903105199} +{"stream": "specific_prices", "data": {"id": 1, "id_shop_group": "0", "id_shop": "0", "id_cart": "0", "id_product": "1", "id_product_attribute": "0", "id_currency": "0", "id_country": "0", "id_group": "0", "id_customer": "0", "id_specific_price_rule": "0", "price": "-1.000000", "from_quantity": "1", "reduction": "0.200000", "reduction_tax": "1", "reduction_type": "percentage", "from": "0000-00-00 00:00:00", "to": "0000-00-00 00:00:00"}, "emitted_at": 1667903105241} +{"stream": "states", "data": {"id": 89, "id_zone": "2", "id_country": "4", "iso_code": "ON", "name": "Ontario", "active": "1"}, "emitted_at": 1667903105781} +{"stream": "stock_availables", "data": {"id": 1, "id_product": "1", "id_product_attribute": "0", "id_shop": "1", "id_shop_group": "0", "quantity": "2958", "depends_on_stock": "0", "out_of_stock": "2", "location": ""}, "emitted_at": 1667903106787} +{"stream": "stock_movement_reasons", "data": {"id": 1, "sign": "1", "deleted": "0", "date_add": "2021-07-23 23:17:27", "date_upd": "2021-07-23 23:17:27", "name": "Increase"}, "emitted_at": 1667903106900} +{"stream": "stock_movements", "data": {"id": 1, "id_product": "", "id_product_attribute": "", "id_warehouse": "", "id_currency": "", "management_type": null, "id_employee": "1", "id_stock": "20", "id_stock_mvt_reason": "11", "id_order": "", "id_supply_order": "", "product_name": false, "ean13": null, "upc": null, "reference": null, "mpn": null, "physical_quantity": "500", "sign": "1", "last_wa": "0.000000", "current_wa": "0.000000", "price_te": "0.000000", "date_add": "2021-07-24 20:28:51"}, "emitted_at": 1667903107797} +{"stream": "stores", "data": {"id": 1, "id_country": "21", "id_state": "12", "hours": " [[\"09:00AM - 07:00PM\"],[\"09:00AM - 07:00PM\"],[\"09:00AM - 07:00PM\"],[\"09:00AM - 07:00PM\"],[\"09:00AM - 07:00PM\"],[\"10:00AM - 04:00PM\"],[\"10:00AM - 04:00PM\"]]", "postcode": "33135", "city": "Miami", "latitude": "25.76500500", "longitude": "-80.24379700", "phone": "", "fax": "", "email": "", "active": "1", "date_add": "2021-07-23 23:18:43", "date_upd": "2021-07-23 23:18:43", "name": "Dade County", "address1": "3030 SW 8th St Miami", "address2": "", "note": ""}, "emitted_at": 1667903107872} +{"stream": "suppliers", "data": {"id": 1, "link_rewrite": "some-cool-supplier", "name": "Some cool supplier", "active": "1", "date_add": "2021-07-24 20:26:22", "date_upd": "2021-07-24 20:26:22", "description": "

    This is a reach text.

    \n

    bold colored

    \n

    italic

    \n

    underscored

    \n

    strikeout

    \n
    \n

    test quotes

    \n

    another line

    \n
    \n

    HEADER 1

    \n

    header 2

    ", "meta_title": "", "meta_description": "", "meta_keywords": "Tag1, Tag2, Tag3, Tag4, Tag4, Tag5, Tag6, Tag7, Tag8, Tag9, Tag10, Tag11"}, "emitted_at": 1667903107922} +{"stream": "tags", "data": {"id": 1, "id_lang": "1", "name": "Crazy"}, "emitted_at": 1667903107957} +{"stream": "tax_rule_groups", "data": {"id": 1, "name": "US-AL Rate (4%)", "active": "1", "deleted": "0", "date_add": "2021-07-23 23:17:28", "date_upd": "2021-07-23 23:17:28"}, "emitted_at": 1667903108794} +{"stream": "tax_rules", "data": {"id": 1, "id_tax_rules_group": "1", "id_state": "4", "id_country": "21", "zipcode_from": "0", "zipcode_to": "0", "id_tax": "1", "behavior": "1", "description": ""}, "emitted_at": 1667903108948} +{"stream": "taxes", "data": {"id": 1, "rate": "4.000", "active": "1", "deleted": "0", "name": "Sales-taxes US-AL 4%"}, "emitted_at": 1667903109059} +{"stream": "translated_configurations", "data": {"id": 38, "value": "#IN", "date_add": "2021-07-25 20:30:30", "date_upd": null, "name": "PS_INVOICE_PREFIX", "id_shop_group": "", "id_shop": ""}, "emitted_at": 1667903109162} +{"stream": "weight_ranges", "data": {"id": 1, "id_carrier": "2", "delimiter1": "0.000000", "delimiter2": "10000.000000"}, "emitted_at": 1667903109211} +{"stream": "zones", "data": {"id": 1, "name": "Europe", "active": "1"}, "emitted_at": 1667903109268} diff --git a/airbyte-integrations/connectors/source-prestashop/integration_tests/future_state.json b/airbyte-integrations/connectors/source-prestashop/integration_tests/future_state.json deleted file mode 100644 index 436fc6fde3952..0000000000000 --- a/airbyte-integrations/connectors/source-prestashop/integration_tests/future_state.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "addresses": { - "date_upd": "2121-06-16 14:13:26" - }, - "cart_rules": { - "date_upd": "2121-06-16 14:13:26" - }, - "carts": { - "date_upd": "2121-06-16 14:13:26" - }, - "categories": { - "date_upd": "2121-06-16 14:13:26" - }, - "customer_messages": { - "date_upd": "2121-06-16 14:13:26" - }, - "customer_threads": { - "date_upd": "2121-06-16 14:13:26" - }, - "groups": { - "date_upd": "2121-06-16 14:13:26" - }, - "customers": { - "date_upd": "2121-06-16 14:13:26" - }, - "configurations": { - "date_upd": "2121-06-16 14:13:26" - }, - "manufacturers": { - "date_upd": "2121-06-16 14:13:26" - }, - "messages": { - "date_add": "2121-06-16 14:13:26" - }, - "order_carriers": { - "date_add": "2121-06-16 14:13:26" - }, - "order_histories": { - "date_add": "2121-06-16 14:13:26" - }, - "order_invoices": { - "date_add": "2121-06-16 14:13:26" - }, - "order_payments": { - "date_add": "2121-06-16 14:13:26" - }, - "order_slip": { - "date_upd": "2121-06-16 14:13:26" - }, - "orders": { - "date_upd": "2121-06-16 14:13:26" - }, - "products": { - "date_upd": "2121-06-16 14:13:26" - }, - "stock_movement_reasons": { - "date_upd": "2121-06-16 14:13:26" - }, - "stock_movements": { - "date_add": "2121-06-16 14:13:26" - }, - "stores": { - "date_upd": "2121-06-16 14:13:26" - }, - "suppliers": { - "date_upd": "2121-06-16 14:13:26" - }, - "tax_rule_groups": { - "date_upd": "2121-06-16 14:13:26" - } -} diff --git a/airbyte-integrations/connectors/source-prestashop/main.py b/airbyte-integrations/connectors/source-prestashop/main.py index 09ab0e9706e82..ff1dbd5a8190c 100644 --- a/airbyte-integrations/connectors/source-prestashop/main.py +++ b/airbyte-integrations/connectors/source-prestashop/main.py @@ -6,8 +6,8 @@ import sys from airbyte_cdk.entrypoint import launch -from source_prestashop import SourcePrestaShop +from source_prestashop import SourcePrestashop if __name__ == "__main__": - source = SourcePrestaShop() + source = SourcePrestashop() launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-prestashop/setup.py b/airbyte-integrations/connectors/source-prestashop/setup.py index 933ed04edaaca..d84be3b868b98 100644 --- a/airbyte-integrations/connectors/source-prestashop/setup.py +++ b/airbyte-integrations/connectors/source-prestashop/setup.py @@ -21,7 +21,7 @@ author_email="contact@airbyte.io", packages=find_packages(), install_requires=MAIN_REQUIREMENTS, - package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]}, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, extras_require={ "tests": TEST_REQUIREMENTS, }, diff --git a/airbyte-integrations/connectors/source-prestashop/source_prestashop/__init__.py b/airbyte-integrations/connectors/source-prestashop/source_prestashop/__init__.py index c78c76823e680..4809f5c8e4e67 100644 --- a/airbyte-integrations/connectors/source-prestashop/source_prestashop/__init__.py +++ b/airbyte-integrations/connectors/source-prestashop/source_prestashop/__init__.py @@ -1,28 +1,8 @@ # -# MIT License -# -# Copyright (c) 2020 Airbyte -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. # -from .source import SourcePrestaShop +from .source import SourcePrestashop -__all__ = ["SourcePrestaShop"] +__all__ = ["SourcePrestashop"] diff --git a/airbyte-integrations/connectors/source-prestashop/source_prestashop/prestashop.yaml b/airbyte-integrations/connectors/source-prestashop/source_prestashop/prestashop.yaml new file mode 100644 index 0000000000000..9b743fea3b85c --- /dev/null +++ b/airbyte-integrations/connectors/source-prestashop/source_prestashop/prestashop.yaml @@ -0,0 +1,541 @@ +version: "0.3.0" + +definitions: + selector: + extractor: + field_pointer: ["{{ options['data_key'] }}"] + requester: + url_base: "{{ config['url'] }}/api/" + http_method: "GET" + authenticator: + type: BasicHttpAuthenticator + username: "{{ config['access_key'] }}" + request_options_provider: + request_headers: + Output-Format: JSON + request_parameters: + display: full + limit: "'{{ next_page_token['next_page_token'] or '0' }},50'" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "OffsetIncrement" + page_size: 50 + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + base_incremental_stream: + $ref: "*ref(definitions.base_stream)" + stream_cursor_field: "date_upd" + checkpoint_interval: 500 + retriever: + $ref: "*ref(definitions.retriever)" + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ now_utc() }}" + datetime_format: "%Y-%m-%d %H:%M:%S.%f+00:00" + step: "1y" + datetime_format: "%Y-%m-%d %H:%M:%S" + requester: + $ref: "*ref(definitions.requester)" + request_options_provider: + request_headers: + $ref: "*ref(definitions.requester.request_options_provider.request_headers)" + request_parameters: + $ref: "*ref(definitions.requester.request_options_provider.request_parameters)" + date: "1" + sort: "[{{ options['cursor_field'] }}_ASC,{{ options['primary_key'] }}_ASC]" + "filter[{{ options['cursor_field'] }}]": "[{{ stream_slice['start_time'] }},{{ stream_slice['end_time'] }}]" + addresses_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "addresses" + path: "/addresses" + data_key: "addresses" + primary_key: "id" + cursor_field: "date_upd" + carriers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "carriers" + path: "/carriers" + data_key: "carriers" + primary_key: "id" + cart_rules_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "cart_rules" + path: "/cart_rules" + data_key: "cart_rules" + primary_key: "id" + cursor_field: "date_upd" + carts_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "carts" + path: "/carts" + data_key: "carts" + primary_key: "id" + cursor_field: "date_upd" + categories_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "categories" + path: "/categories" + data_key: "categories" + primary_key: "id" + cursor_field: "date_upd" + combinations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "combinations" + path: "/combinations" + data_key: "combinations" + primary_key: "id" + configurations_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "configurations" + path: "/configurations" + data_key: "configurations" + primary_key: "id" + cursor_field: "date_upd" + contacts_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "contacts" + path: "/contacts" + data_key: "contacts" + primary_key: "id" + content_management_system_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "content_management_system" + path: "/content_management_system" + data_key: "content_management_system" + primary_key: "id" + countries_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "countries" + path: "/countries" + data_key: "countries" + primary_key: "id" + currencies_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "currencies" + path: "/currencies" + data_key: "currencies" + primary_key: "id" + customer_messages_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "customer_messages" + path: "/customer_messages" + data_key: "customer_messages" + primary_key: "id" + cursor_field: "date_upd" + customers_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "customers" + path: "/customers" + data_key: "customers" + primary_key: "id" + cursor_field: "date_upd" + customer_threads_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "customer_threads" + path: "/customer_threads" + data_key: "customer_threads" + primary_key: "id" + cursor_field: "date_upd" + deliveries_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "deliveries" + path: "/deliveries" + data_key: "deliveries" + primary_key: "id" + employees_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "employees" + path: "/employees" + data_key: "employees" + primary_key: "id" + groups_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "groups" + path: "/groups" + data_key: "groups" + primary_key: "id" + cursor_field: "date_upd" + guests_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "guests" + path: "/guests" + data_key: "guests" + primary_key: "id" + image_types_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "image_types" + path: "/image_types" + data_key: "image_types" + primary_key: "id" + languages_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "languages" + path: "/languages" + data_key: "languages" + primary_key: "id" + manufacturers_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "manufacturers" + path: "/manufacturers" + data_key: "manufacturers" + primary_key: "id" + cursor_field: "date_upd" + messages_stream: + $ref: "*ref(definitions.base_incremental_stream)" + stream_cursor_field: "date_add" + $options: + name: "messages" + path: "/messages" + data_key: "messages" + primary_key: "id" + cursor_field: "date_add" + order_carriers_stream: + $ref: "*ref(definitions.base_incremental_stream)" + stream_cursor_field: "date_add" + $options: + name: "order_carriers" + path: "/order_carriers" + data_key: "order_carriers" + primary_key: "id" + cursor_field: "date_add" + order_details_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "order_details" + path: "/order_details" + data_key: "order_details" + primary_key: "id" + order_histories_stream: + $ref: "*ref(definitions.base_incremental_stream)" + stream_cursor_field: "date_add" + $options: + name: "order_histories" + path: "/order_histories" + data_key: "order_histories" + primary_key: "id" + cursor_field: "date_add" + order_invoices_stream: + $ref: "*ref(definitions.base_incremental_stream)" + stream_cursor_field: "date_add" + $options: + name: "order_invoices" + path: "/order_invoices" + data_key: "order_invoices" + primary_key: "id" + cursor_field: "date_add" + order_payments_stream: + $ref: "*ref(definitions.base_incremental_stream)" + stream_cursor_field: "date_add" + $options: + name: "order_payments" + path: "/order_payments" + data_key: "order_payments" + primary_key: "id" + cursor_field: "date_add" + orders_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "orders" + path: "/orders" + data_key: "orders" + primary_key: "id" + cursor_field: "date_upd" + order_slip_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "order_slip" + path: "/order_slip" + data_key: "order_slips" + primary_key: "id" + cursor_field: "date_upd" + order_states_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "order_states" + path: "/order_states" + data_key: "order_states" + primary_key: "id" + price_ranges_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "price_ranges" + path: "/price_ranges" + data_key: "price_ranges" + primary_key: "id" + product_customization_fields_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "product_customization_fields" + path: "/product_customization_fields" + data_key: "customization_fields" + primary_key: "id" + product_features_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "product_features" + path: "/product_features" + data_key: "product_features" + primary_key: "id" + product_feature_values_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "product_feature_values" + path: "/product_feature_values" + data_key: "product_feature_values" + primary_key: "id" + product_options_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "product_options" + path: "/product_options" + data_key: "product_options" + primary_key: "id" + product_option_values_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "product_option_values" + path: "/product_option_values" + data_key: "product_option_values" + primary_key: "id" + products_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "products" + path: "/products" + data_key: "products" + primary_key: "id" + cursor_field: "date_upd" + product_suppliers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "product_suppliers" + path: "/product_suppliers" + data_key: "product_suppliers" + primary_key: "id" + shop_groups_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "shop_groups" + path: "/shop_groups" + data_key: "shop_groups" + primary_key: "id" + shops_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "shops" + path: "/shops" + data_key: "shops" + primary_key: "id" + shop_urls_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "shop_urls" + path: "/shop_urls" + data_key: "shop_urls" + primary_key: "id" + specific_price_rules_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "specific_price_rules" + path: "/specific_price_rules" + data_key: "specific_price_rules" + primary_key: "id" + specific_prices_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "specific_prices" + path: "/specific_prices" + data_key: "specific_prices" + primary_key: "id" + states_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "states" + path: "/states" + data_key: "states" + primary_key: "id" + stock_availables_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "stock_availables" + path: "/stock_availables" + data_key: "stock_availables" + primary_key: "id" + stock_movement_reasons_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "stock_movement_reasons" + path: "/stock_movement_reasons" + data_key: "stock_movement_reasons" + primary_key: "id" + cursor_field: "date_upd" + stock_movements_stream: + $ref: "*ref(definitions.base_incremental_stream)" + stream_cursor_field: "date_add" + $options: + name: "stock_movements" + path: "/stock_movements" + data_key: "stock_mvts" + primary_key: "id" + cursor_field: "date_add" + stores_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "stores" + path: "/stores" + data_key: "stores" + primary_key: "id" + cursor_field: "date_upd" + suppliers_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "suppliers" + path: "/suppliers" + data_key: "suppliers" + primary_key: "id" + cursor_field: "date_upd" + tags_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tags" + path: "/tags" + data_key: "tags" + primary_key: "id" + taxes_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "taxes" + path: "/taxes" + data_key: "taxes" + primary_key: "id" + tax_rule_groups_stream: + $ref: "*ref(definitions.base_incremental_stream)" + $options: + name: "tax_rule_groups" + path: "/tax_rule_groups" + data_key: "tax_rule_groups" + primary_key: "id" + cursor_field: "date_upd" + tax_rules_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tax_rules" + path: "/tax_rules" + data_key: "tax_rules" + primary_key: "id" + translated_configurations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "translated_configurations" + path: "/translated_configurations" + data_key: "translated_configurations" + primary_key: "id" + weight_ranges_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "weight_ranges" + path: "/weight_ranges" + data_key: "weight_ranges" + primary_key: "id" + zones_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "zones" + path: "/zones" + data_key: "zones" + primary_key: "id" + +streams: + - "*ref(definitions.addresses_stream)" + - "*ref(definitions.carriers_stream)" + - "*ref(definitions.cart_rules_stream)" + - "*ref(definitions.carts_stream)" + - "*ref(definitions.categories_stream)" + - "*ref(definitions.combinations_stream)" + - "*ref(definitions.configurations_stream)" + - "*ref(definitions.contacts_stream)" + - "*ref(definitions.content_management_system_stream)" + - "*ref(definitions.countries_stream)" + - "*ref(definitions.currencies_stream)" + - "*ref(definitions.customer_messages_stream)" + - "*ref(definitions.customers_stream)" + - "*ref(definitions.customer_threads_stream)" + - "*ref(definitions.deliveries_stream)" + - "*ref(definitions.employees_stream)" + - "*ref(definitions.groups_stream)" + - "*ref(definitions.guests_stream)" + - "*ref(definitions.image_types_stream)" + - "*ref(definitions.languages_stream)" + - "*ref(definitions.manufacturers_stream)" + - "*ref(definitions.messages_stream)" + - "*ref(definitions.order_carriers_stream)" + - "*ref(definitions.order_details_stream)" + - "*ref(definitions.order_histories_stream)" + - "*ref(definitions.order_invoices_stream)" + - "*ref(definitions.order_payments_stream)" + - "*ref(definitions.orders_stream)" + - "*ref(definitions.order_slip_stream)" + - "*ref(definitions.order_states_stream)" + - "*ref(definitions.price_ranges_stream)" + - "*ref(definitions.product_customization_fields_stream)" + - "*ref(definitions.product_features_stream)" + - "*ref(definitions.product_feature_values_stream)" + - "*ref(definitions.product_options_stream)" + - "*ref(definitions.product_option_values_stream)" + - "*ref(definitions.products_stream)" + - "*ref(definitions.product_suppliers_stream)" + - "*ref(definitions.shop_groups_stream)" + - "*ref(definitions.shops_stream)" + - "*ref(definitions.shop_urls_stream)" + - "*ref(definitions.specific_price_rules_stream)" + - "*ref(definitions.specific_prices_stream)" + - "*ref(definitions.states_stream)" + - "*ref(definitions.stock_availables_stream)" + - "*ref(definitions.stock_movement_reasons_stream)" + - "*ref(definitions.stock_movements_stream)" + - "*ref(definitions.stores_stream)" + - "*ref(definitions.suppliers_stream)" + - "*ref(definitions.tags_stream)" + - "*ref(definitions.taxes_stream)" + - "*ref(definitions.tax_rule_groups_stream)" + - "*ref(definitions.tax_rules_stream)" + - "*ref(definitions.translated_configurations_stream)" + - "*ref(definitions.weight_ranges_stream)" + - "*ref(definitions.zones_stream)" + +check: + stream_names: + - "shops" diff --git a/airbyte-integrations/connectors/source-prestashop/source_prestashop/source.py b/airbyte-integrations/connectors/source-prestashop/source_prestashop/source.py index 9be6af3ed7aec..1054b393c01a7 100644 --- a/airbyte-integrations/connectors/source-prestashop/source_prestashop/source.py +++ b/airbyte-integrations/connectors/source-prestashop/source_prestashop/source.py @@ -2,155 +2,60 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # -import re -from base64 import b64encode -from typing import Any, List, Mapping, Tuple -from airbyte_cdk.models import SyncMode -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. -from .streams import ( - Addresses, - Carriers, - CartRules, - Carts, - Categories, - Combinations, - Configurations, - Contacts, - ContentManagementSystem, - Countries, - Currencies, - CustomerMessages, - Customers, - CustomerThreads, - Deliveries, - Employees, - Groups, - Guests, - ImageTypes, - Languages, - Manufacturers, - Messages, - OrderCarriers, - OrderDetails, - OrderHistories, - OrderInvoices, - OrderPayments, - Orders, - OrderSlip, - OrderStates, - PriceRanges, - ProductCustomizationFields, - ProductFeatures, - ProductFeatureValues, - ProductOptions, - ProductOptionValues, - Products, - ProductSuppliers, - ShopGroups, - Shops, - ShopUrls, - SpecificPriceRules, - SpecificPrices, - States, - StockAvailables, - StockMovementReasons, - StockMovements, - Stores, - Suppliers, - Tags, - Taxes, - TaxRuleGroups, - TaxRules, - TranslatedConfigurations, - WeightRanges, - Zones, +WARNING: Do not modify this file. +""" + +import logging +from typing import Any, Iterator, List, Mapping, MutableMapping, Union + +from airbyte_cdk.models import ( + AirbyteCatalog, + AirbyteConnectionStatus, + AirbyteMessage, + AirbyteStateMessage, + ConfiguredAirbyteCatalog, + Status, ) +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + + +class ConfigException(Exception): + pass -class SourcePrestaShop(AbstractSource): +# Declarative Source +class SourcePrestashop(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "prestashop.yaml"}) + def _validate_and_transform(self, config: Mapping[str, Any]): if not config.get("_allow_http"): - if re.match(r"^http://", config["url"], re.I): - raise Exception(f"Invalid url: {config['url']}, only https scheme is allowed") + if not config["url"].lower().startswith("https://"): + raise ConfigException(f"Invalid url: {config['url']}, only https scheme is allowed") return config - @staticmethod - def get_authenticator(config: Mapping[str, Any]): - token = b64encode(bytes(config["access_key"] + ":", "utf-8")).decode("ascii") - authenticator = TokenAuthenticator(token, auth_method="Basic") - return authenticator - - def check_connection(self, logger, config) -> Tuple[bool, any]: + def discover(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteCatalog: config = self._validate_and_transform(config) - authenticator = self.get_authenticator(config) - shops = Shops(authenticator=authenticator, url=config["url"]).read_records(sync_mode=SyncMode.full_refresh) - next(shops) - return True, None - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: + return super().discover(logger, config) + + def check(self, logger: logging.Logger, config: Mapping[str, Any]) -> AirbyteConnectionStatus: + try: + config = self._validate_and_transform(config) + except ConfigException as e: + return AirbyteConnectionStatus(status=Status.FAILED, message=str(e)) + return super().check(logger, config) + + def read( + self, + logger: logging.Logger, + config: Mapping[str, Any], + catalog: ConfiguredAirbyteCatalog, + state: Union[List[AirbyteStateMessage], MutableMapping[str, Any]] = None, + ) -> Iterator[AirbyteMessage]: config = self._validate_and_transform(config) - authenticator = self.get_authenticator(config) - stream_classes = [ - Addresses, - Carriers, - CartRules, - Carts, - Categories, - Combinations, - Configurations, - Contacts, - ContentManagementSystem, - Countries, - Currencies, - CustomerMessages, - CustomerThreads, - Customers, - Deliveries, - Employees, - Groups, - Guests, - ImageTypes, - Languages, - Manufacturers, - Messages, - OrderCarriers, - OrderDetails, - OrderHistories, - OrderInvoices, - OrderPayments, - OrderSlip, - OrderStates, - Orders, - PriceRanges, - ProductCustomizationFields, - ProductFeatureValues, - ProductFeatures, - ProductOptionValues, - ProductOptions, - ProductSuppliers, - Products, - ShopGroups, - ShopUrls, - Shops, - SpecificPriceRules, - SpecificPrices, - States, - StockAvailables, - StockMovementReasons, - StockMovements, - Stores, - Suppliers, - Tags, - TaxRuleGroups, - TaxRules, - Taxes, - TranslatedConfigurations, - WeightRanges, - Zones, - ] - - return [cls(authenticator=authenticator, url=config["url"]) for cls in stream_classes] + return super().read(logger, config, catalog, state) diff --git a/airbyte-integrations/connectors/source-prestashop/source_prestashop/spec.json b/airbyte-integrations/connectors/source-prestashop/source_prestashop/spec.json deleted file mode 100644 index 96fd0815e416a..0000000000000 --- a/airbyte-integrations/connectors/source-prestashop/source_prestashop/spec.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "documentationUrl": "https://docsurl.com", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "PrestaShop Spec", - "type": "object", - "required": ["url", "access_key"], - "properties": { - "url": { - "type": "string", - "description": "Shop URL without trailing slash (domain name or IP address)" - }, - "access_key": { - "type": "string", - "description": "Your PrestaShop access key. See the docs for info on how to obtain this.", - "airbyte_secret": true - } - } - } -} diff --git a/airbyte-integrations/connectors/source-prestashop/source_prestashop/spec.yaml b/airbyte-integrations/connectors/source-prestashop/source_prestashop/spec.yaml new file mode 100644 index 0000000000000..018494defce9c --- /dev/null +++ b/airbyte-integrations/connectors/source-prestashop/source_prestashop/spec.yaml @@ -0,0 +1,29 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/presta-shop +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: PrestaShop Spec + type: object + required: + - access_key + - url + - start_date + properties: + access_key: + type: string + title: Access Key + description: Your PrestaShop access key. See the docs for info on how to obtain this. + order: 0 + airbyte_secret: true + url: + type: string + title: Shop URL + description: Shop URL without trailing slash. + order: 1 + start_date: + type: string + title: Start date + description: The Start date in the format YYYY-MM-DD. + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-01" + order: 2 diff --git a/airbyte-integrations/connectors/source-punk-api/.dockerignore b/airbyte-integrations/connectors/source-punk-api/.dockerignore new file mode 100644 index 0000000000000..6463725c508f6 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_punk_api +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-punk-api/Dockerfile b/airbyte-integrations/connectors/source-punk-api/Dockerfile new file mode 100644 index 0000000000000..675ca5d0790a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_punk_api ./source_punk_api + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-punk-api diff --git a/airbyte-integrations/connectors/source-punk-api/README.md b/airbyte-integrations/connectors/source-punk-api/README.md new file mode 100644 index 0000000000000..2d2f0f3139974 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/README.md @@ -0,0 +1,102 @@ +# Punk Api Source + +This is the repository for the Punk Api configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/punk-api). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-punk-api:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/punk-api) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_punk_api/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source punk-api test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-punk-api:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-punk-api:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-punk-api:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-punk-api:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-punk-api:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-punk-api:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-punk-api:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-punk-api:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-punk-api/__init__.py b/airbyte-integrations/connectors/source-punk-api/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-punk-api/acceptance-test-config.yml b/airbyte-integrations/connectors/source-punk-api/acceptance-test-config.yml new file mode 100644 index 0000000000000..63b3771f1518e --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/acceptance-test-config.yml @@ -0,0 +1,24 @@ + +connector_image: airbyte/source-punk-api:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_punk_api/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-punk-api/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-punk-api/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-punk-api/bootstrap.md b/airbyte-integrations/connectors/source-punk-api/bootstrap.md new file mode 100644 index 0000000000000..505cba3d16dbb --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/bootstrap.md @@ -0,0 +1,34 @@ +# Punk-API + +The connector uses the v2 API documented here: https://punkapi.com/documentation/v2 . It is +straightforward HTTP REST API with API authentication. + +## API key + +Api key is not required for this connector to work,But a dummy key need to be passed to enhance in next versions. Example:123 +Just pass the dummy API key and optional parameter for establishing the connection. Example:123 + +## Implementation details + +## Setup guide + +### Step 1: Set up Punk-API connection + +- Pass a dummy API key (Example: 12345) +- Params (Optional ID) + +## Step 2: Generate schema for the endpoint + +### Custom schema is generated and tested with different IDs + +## Step 3: Spec, Secrets, and connector yaml files are configured with reference to the Airbyte documentation. + +## In a nutshell: + +1. Navigate to the Airbyte Open Source dashboard. +2. Set the name for your source. +4. Enter your dummy `api_key`. +5. Enter the params configuration if needed: ID (Optional) +6. Click **Set up source**. + + * We use only GET methods, towards the beers endpoints which is straightforward \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-punk-api/build.gradle b/airbyte-integrations/connectors/source-punk-api/build.gradle new file mode 100644 index 0000000000000..5027bccce1171 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_punk_api' +} diff --git a/airbyte-integrations/connectors/source-punk-api/integration_tests/__init__.py b/airbyte-integrations/connectors/source-punk-api/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-punk-api/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-punk-api/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-punk-api/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-punk-api/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..10f81a9aa2d4c --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/integration_tests/configured_catalog.json @@ -0,0 +1,22 @@ +{ + "streams": [ + { + "stream": { + "name": "beers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "beers_with_id", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-punk-api/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-punk-api/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..f3732995784f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "todo-wrong-field": "this should be an incomplete config file, used in standard tests" +} diff --git a/airbyte-integrations/connectors/source-punk-api/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-punk-api/integration_tests/sample_config.json new file mode 100644 index 0000000000000..a548a84770124 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "id": "1", + "brewed_before": "10-2020", + "brewed_after": "10-2010" +} diff --git a/airbyte-integrations/connectors/source-punk-api/main.py b/airbyte-integrations/connectors/source-punk-api/main.py new file mode 100644 index 0000000000000..7a7d32b27a7e4 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_punk_api import SourcePunkApi + +if __name__ == "__main__": + source = SourcePunkApi() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-punk-api/requirements.txt b/airbyte-integrations/connectors/source-punk-api/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-punk-api/setup.py b/airbyte-integrations/connectors/source-punk-api/setup.py new file mode 100644 index 0000000000000..bb920efc7d2a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_punk_api", + description="Source implementation for Punk Api.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-punk-api/source_punk_api/__init__.py b/airbyte-integrations/connectors/source-punk-api/source_punk_api/__init__.py new file mode 100644 index 0000000000000..5ae4a11cd2bdd --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/source_punk_api/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourcePunkApi + +__all__ = ["SourcePunkApi"] diff --git a/airbyte-integrations/connectors/source-punk-api/source_punk_api/punk_api.yaml b/airbyte-integrations/connectors/source-punk-api/source_punk_api/punk_api.yaml new file mode 100644 index 0000000000000..fa6ea890b71a2 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/source_punk_api/punk_api.yaml @@ -0,0 +1,95 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://api.punkapi.com/v2/" + http_method: "GET" + request_options_provider: + request_parameters: + id: | + {{ + config['id'] + if options['name'] == 'beers_with_id' + else '' + }} + + stream_slicer: + type: DatetimeStreamSlicer + start_datetime: + datetime: "{{ config['brewed_after'] }}" + format: "mm-YYYY" + end_datetime: + datetime: "{{ config['brewed_before'] }}" + format: "mm-YYYY" + # end_datetime: + # datetime: "{{ now_local() }}" + # format: "%Y-%m-%d %H:%M:%S.%f" + step: "1m" + datetime_format: "mm-YYYY" + cursor_field: airdate + # start_time_option: + # field_name: "date" + # inject_into: "request_parameter" + + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + # stream_slicer: + # $ref: "*ref(definitions.stream_slicer)" + + base_stream: + schema_loader: + type: JsonSchema + file_path: "./source_punk_api/schemas/{{ options['name'] }}.json" + retriever: + $ref: "*ref(definitions.retriever)" + + page_stream: + schema_loader: + type: JsonSchema + file_path: "./source_punk_api/schemas/{{ options['name'] }}.json" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: "DefaultPaginator" + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "PageIncrement" + page_size: 25 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + page_size_option: + inject_into: "request_parameter" + field_name: "per_page" + requester: + $ref: "*ref(definitions.requester)" + + beers_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "beers" + path: "/beers" + + beers_with_id_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "beers_with_id" + path: "/beers?{{ config['brewed_after'] }}" + +streams: + - "*ref(definitions.beers_stream)" + - "*ref(definitions.beers_with_id_stream)" + +check: + stream_names: + - "beers" + - "beers_with_id" diff --git a/airbyte-integrations/connectors/source-punk-api/source_punk_api/schemas/beers.json b/airbyte-integrations/connectors/source-punk-api/source_punk_api/schemas/beers.json new file mode 100644 index 0000000000000..0a4d5c831b898 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/source_punk_api/schemas/beers.json @@ -0,0 +1,358 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667227839.json", + "title": "Root", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items", + "title": "Items", + "type": ["null", "object"], + "properties": { + "id": { + "$id": "#root/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "name": { + "$id": "#root/items/name", + "title": "Name", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "tagline": { + "$id": "#root/items/tagline", + "title": "Tagline", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "first_brewed": { + "$id": "#root/items/first_brewed", + "title": "First_brewed", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "description": { + "$id": "#root/items/description", + "title": "Description", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "image_url": { + "$id": "#root/items/image_url", + "title": "Image_url", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "abv": { + "$id": "#root/items/abv", + "title": "Abv", + "type": "number", + "default": 0.0 + }, + "ibu": { + "$id": "#root/items/ibu", + "title": "Ibu", + "type": ["null", "number", "integer"], + "default": 0 + }, + "target_fg": { + "$id": "#root/items/target_fg", + "title": "Target_fg", + "type": ["null", "integer", "number"], + "default": 0 + }, + "target_og": { + "$id": "#root/items/target_og", + "title": "Target_og", + "type": ["null", "number", "integer"], + "default": 0 + }, + "ebc": { + "$id": "#root/items/ebc", + "title": "Ebc", + "type": ["null", "number", "integer", "string"], + "default": 0 + }, + "srm": { + "$id": "#root/items/srm", + "title": "Srm", + "type": ["null", "number", "integer", "string"], + "default": 0 + }, + "ph": { + "$id": "#root/items/ph", + "title": "Ph", + "type": ["null", "number", "integer", "string"], + "default": 0.0 + }, + "attenuation_level": { + "$id": "#root/items/attenuation_level", + "title": "Attenuation_level", + "type": ["null", "number", "integer", "string"], + "default": 0 + }, + "volume": { + "$id": "#root/items/volume", + "title": "Volume", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/volume/value", + "title": "Value", + "type": ["null", "number", "integer"], + "default": 0 + }, + "unit": { + "$id": "#root/items/volume/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "boil_volume": { + "$id": "#root/items/boil_volume", + "title": "Boil_volume", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/boil_volume/value", + "title": "Value", + "type": ["null", "number", "integer"], + "default": 0 + }, + "unit": { + "$id": "#root/items/boil_volume/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "method": { + "$id": "#root/items/method", + "title": "Method", + "type": ["null", "object"], + "properties": { + "mash_temp": { + "$id": "#root/items/method/mash_temp", + "title": "Mash_temp", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items/method/mash_temp/items", + "title": "Items", + "type": ["null", "object"], + "properties": { + "temp": { + "$id": "#root/items/method/mash_temp/items/temp", + "title": "Temp", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/method/mash_temp/items/temp/value", + "title": "Value", + "type": ["null", "integer", "number"], + "default": 0 + }, + "unit": { + "$id": "#root/items/method/mash_temp/items/temp/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "duration": { + "$id": "#root/items/method/mash_temp/items/duration", + "title": "Duration", + "type": ["null", "number", "integer"], + "default": 0 + } + } + } + }, + "fermentation": { + "$id": "#root/items/method/fermentation", + "title": "Fermentation", + "type": ["null", "object"], + "properties": { + "temp": { + "$id": "#root/items/method/fermentation/temp", + "title": "Temp", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/method/fermentation/temp/value", + "title": "Value", + "type": ["null", "integer", "number"], + "default": 0 + }, + "unit": { + "$id": "#root/items/method/fermentation/temp/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + } + } + }, + "twist": { + "$id": "#root/items/method/twist", + "title": "Twist", + "type": ["null", "number", "integer", "string"], + "default": null + } + } + }, + "ingredients": { + "$id": "#root/items/ingredients", + "title": "Ingredients", + "type": ["null", "object"], + "properties": { + "malt": { + "$id": "#root/items/ingredients/malt", + "title": "Malt", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items/ingredients/malt/items", + "title": "Items", + "type": ["null", "object"], + "properties": { + "name": { + "$id": "#root/items/ingredients/malt/items/name", + "title": "Name", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "amount": { + "$id": "#root/items/ingredients/malt/items/amount", + "title": "Amount", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/ingredients/malt/items/amount/value", + "title": "Value", + "type": "number", + "default": 0.0 + }, + "unit": { + "$id": "#root/items/ingredients/malt/items/amount/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "hops": { + "$id": "#root/items/ingredients/hops", + "title": "Hops", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items/ingredients/hops/items", + "title": "Items", + "type": ["null", "object"], + "properties": { + "name": { + "$id": "#root/items/ingredients/hops/items/name", + "title": "Name", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "amount": { + "$id": "#root/items/ingredients/hops/items/amount", + "title": "Amount", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/ingredients/hops/items/amount/value", + "title": "Value", + "type": ["null", "number", "integer"], + "default": 0 + }, + "unit": { + "$id": "#root/items/ingredients/hops/items/amount/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "add": { + "$id": "#root/items/ingredients/hops/items/add", + "title": "Add", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "attribute": { + "$id": "#root/items/ingredients/hops/items/attribute", + "title": "Attribute", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + } + }, + "yeast": { + "$id": "#root/items/ingredients/yeast", + "title": "Yeast", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "food_pairing": { + "$id": "#root/items/food_pairing", + "title": "Food_pairing", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items/food_pairing/items", + "title": "Items", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + }, + "brewers_tips": { + "$id": "#root/items/brewers_tips", + "title": "Brewers_tips", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "contributed_by": { + "$id": "#root/items/contributed_by", + "title": "Contributed_by", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + } +} diff --git a/airbyte-integrations/connectors/source-punk-api/source_punk_api/schemas/beers_with_id.json b/airbyte-integrations/connectors/source-punk-api/source_punk_api/schemas/beers_with_id.json new file mode 100644 index 0000000000000..f000365e26aa4 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/source_punk_api/schemas/beers_with_id.json @@ -0,0 +1,358 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667227839.json", + "title": "Root", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items", + "title": "Items", + "type": ["null", "object"], + "properties": { + "id": { + "$id": "#root/items/id", + "title": "Id", + "type": ["null", "integer", "number"], + "default": 0 + }, + "name": { + "$id": "#root/items/name", + "title": "Name", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "tagline": { + "$id": "#root/items/tagline", + "title": "Tagline", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "first_brewed": { + "$id": "#root/items/first_brewed", + "title": "First_brewed", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "description": { + "$id": "#root/items/description", + "title": "Description", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "image_url": { + "$id": "#root/items/image_url", + "title": "Image_url", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "abv": { + "$id": "#root/items/abv", + "title": "Abv", + "type": "number", + "default": 0.0 + }, + "ibu": { + "$id": "#root/items/ibu", + "title": "Ibu", + "type": ["null", "number", "integer"], + "default": 0 + }, + "target_fg": { + "$id": "#root/items/target_fg", + "title": "Target_fg", + "type": ["null", "integer", "number"], + "default": 0 + }, + "target_og": { + "$id": "#root/items/target_og", + "title": "Target_og", + "type": ["null", "number", "integer"], + "default": 0 + }, + "ebc": { + "$id": "#root/items/ebc", + "title": "Ebc", + "type": ["null", "number", "integer", "string"], + "default": 0 + }, + "srm": { + "$id": "#root/items/srm", + "title": "Srm", + "type": ["null", "number", "integer", "string"], + "default": 0 + }, + "ph": { + "$id": "#root/items/ph", + "title": "Ph", + "type": ["null", "number", "integer", "string"], + "default": 0.0 + }, + "attenuation_level": { + "$id": "#root/items/attenuation_level", + "title": "Attenuation_level", + "type": ["null", "number", "integer", "string"], + "default": 0 + }, + "volume": { + "$id": "#root/items/volume", + "title": "Volume", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/volume/value", + "title": "Value", + "type": ["null", "number", "integer"], + "default": 0 + }, + "unit": { + "$id": "#root/items/volume/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "boil_volume": { + "$id": "#root/items/boil_volume", + "title": "Boil_volume", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/boil_volume/value", + "title": "Value", + "type": ["null", "number", "integer"], + "default": 0 + }, + "unit": { + "$id": "#root/items/boil_volume/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "method": { + "$id": "#root/items/method", + "title": "Method", + "type": ["null", "object"], + "properties": { + "mash_temp": { + "$id": "#root/items/method/mash_temp", + "title": "Mash_temp", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items/method/mash_temp/items", + "title": "Items", + "type": ["null", "object"], + "properties": { + "temp": { + "$id": "#root/items/method/mash_temp/items/temp", + "title": "Temp", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/method/mash_temp/items/temp/value", + "title": "Value", + "type": ["null", "integer", "number"], + "default": 0 + }, + "unit": { + "$id": "#root/items/method/mash_temp/items/temp/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "duration": { + "$id": "#root/items/method/mash_temp/items/duration", + "title": "Duration", + "type": ["null", "number", "integer"], + "default": 0 + } + } + } + }, + "fermentation": { + "$id": "#root/items/method/fermentation", + "title": "Fermentation", + "type": ["null", "object"], + "properties": { + "temp": { + "$id": "#root/items/method/fermentation/temp", + "title": "Temp", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/method/fermentation/temp/value", + "title": "Value", + "type": ["null", "integer", "number"], + "default": 0 + }, + "unit": { + "$id": "#root/items/method/fermentation/temp/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + } + } + }, + "twist": { + "$id": "#root/items/method/twist", + "title": "Twist", + "type": ["null", "number", "integer", "string"], + "default": null + } + } + }, + "ingredients": { + "$id": "#root/items/ingredients", + "title": "Ingredients", + "type": ["null", "object"], + "properties": { + "malt": { + "$id": "#root/items/ingredients/malt", + "title": "Malt", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items/ingredients/malt/items", + "title": "Items", + "type": ["null", "object"], + "properties": { + "name": { + "$id": "#root/items/ingredients/malt/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "amount": { + "$id": "#root/items/ingredients/malt/items/amount", + "title": "Amount", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/ingredients/malt/items/amount/value", + "title": "Value", + "type": "number", + "default": 0.0 + }, + "unit": { + "$id": "#root/items/ingredients/malt/items/amount/unit", + "title": "Unit", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "hops": { + "$id": "#root/items/ingredients/hops", + "title": "Hops", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items/ingredients/hops/items", + "title": "Items", + "type": ["null", "object"], + "properties": { + "name": { + "$id": "#root/items/ingredients/hops/items/name", + "title": "Name", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "amount": { + "$id": "#root/items/ingredients/hops/items/amount", + "title": "Amount", + "type": ["null", "object"], + "properties": { + "value": { + "$id": "#root/items/ingredients/hops/items/amount/value", + "title": "Value", + "type": ["null", "number", "integer"], + "default": 0 + }, + "unit": { + "$id": "#root/items/ingredients/hops/items/amount/unit", + "title": "Unit", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + }, + "add": { + "$id": "#root/items/ingredients/hops/items/add", + "title": "Add", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "attribute": { + "$id": "#root/items/ingredients/hops/items/attribute", + "title": "Attribute", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + } + }, + "yeast": { + "$id": "#root/items/ingredients/yeast", + "title": "Yeast", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + }, + "food_pairing": { + "$id": "#root/items/food_pairing", + "title": "Food_pairing", + "type": ["null", "array", "object"], + "default": [], + "items": { + "$id": "#root/items/food_pairing/items", + "title": "Items", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + }, + "brewers_tips": { + "$id": "#root/items/brewers_tips", + "title": "Brewers_tips", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + }, + "contributed_by": { + "$id": "#root/items/contributed_by", + "title": "Contributed_by", + "type": ["null", "array", "object", "string"], + "default": "", + "pattern": "^.*$" + } + } + } +} diff --git a/airbyte-integrations/connectors/source-punk-api/source_punk_api/source.py b/airbyte-integrations/connectors/source-punk-api/source_punk_api/source.py new file mode 100644 index 0000000000000..fc86b16fae5d2 --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/source_punk_api/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourcePunkApi(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "punk_api.yaml"}) diff --git a/airbyte-integrations/connectors/source-punk-api/source_punk_api/spec.yaml b/airbyte-integrations/connectors/source-punk-api/source_punk_api/spec.yaml new file mode 100644 index 0000000000000..1d7da8b0a768d --- /dev/null +++ b/airbyte-integrations/connectors/source-punk-api/source_punk_api/spec.yaml @@ -0,0 +1,31 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/punk-api +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Punk Api Spec + type: object + required: + - brewed_before + - brewed_after + additionalProperties: true + properties: + id: + title: Beers with specific ID + type: string + description: To extract specific data with Unique ID + examples: + - 1 + - 22 + brewed_before: + title: Brewed before data to get incremental reads + type: string + description: To extract specific data with Unique ID + pattern: ^[0-9]{2}-[0-9]{4}$ + examples: + - MM-YYYY + brewed_after: + title: Brewed after data to get incremental reads + type: string + description: To extract specific data with Unique ID + pattern: ^[0-9]{2}-[0-9]{4}$ + examples: + - MM-YYYY diff --git a/airbyte-integrations/connectors/source-pypi/.dockerignore b/airbyte-integrations/connectors/source-pypi/.dockerignore new file mode 100644 index 0000000000000..c0cf1a30c41d7 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_pypi +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-pypi/Dockerfile b/airbyte-integrations/connectors/source-pypi/Dockerfile new file mode 100644 index 0000000000000..a66fa8d7efdbd --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_pypi ./source_pypi + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-pypi diff --git a/airbyte-integrations/connectors/source-pypi/README.md b/airbyte-integrations/connectors/source-pypi/README.md new file mode 100644 index 0000000000000..f3411d8bae3a9 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/README.md @@ -0,0 +1,79 @@ +# Pypi Source + +This is the repository for the Pypi configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/pypi). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-pypi:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/pypi) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_pypi/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source pypi test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-pypi:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-pypi:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-pypi:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-pypi:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-pypi:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-pypi:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-pypi:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-pypi:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-pypi/__init__.py b/airbyte-integrations/connectors/source-pypi/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-pypi/acceptance-test-config.yml b/airbyte-integrations/connectors/source-pypi/acceptance-test-config.yml new file mode 100644 index 0000000000000..5aa563a20c368 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/acceptance-test-config.yml @@ -0,0 +1,27 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-pypi:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_pypi/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + bypass_reason: "This connector does not implement incremental sync" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-pypi/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-pypi/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-pypi/build.gradle b/airbyte-integrations/connectors/source-pypi/build.gradle new file mode 100644 index 0000000000000..08ad52762c4a9 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_pypi' +} diff --git a/airbyte-integrations/connectors/source-pypi/integration_tests/__init__.py b/airbyte-integrations/connectors/source-pypi/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-pypi/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-pypi/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-pypi/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-pypi/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..86da23dc9ac86 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/integration_tests/configured_catalog.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "stream": { + "name": "project", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "release", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "stats", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-pypi/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-pypi/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..f78166d1ef17f --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "project_name": 22, + "version": false +} diff --git a/airbyte-integrations/connectors/source-pypi/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-pypi/integration_tests/sample_config.json new file mode 100644 index 0000000000000..546221778209d --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "project_name": "sampleproject", + "version": "1.2.0" +} diff --git a/airbyte-integrations/connectors/source-pypi/main.py b/airbyte-integrations/connectors/source-pypi/main.py new file mode 100644 index 0000000000000..17000165c3bf9 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_pypi import SourcePypi + +if __name__ == "__main__": + source = SourcePypi() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-pypi/requirements.txt b/airbyte-integrations/connectors/source-pypi/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-pypi/setup.py b/airbyte-integrations/connectors/source-pypi/setup.py new file mode 100644 index 0000000000000..ec0aa103d81fe --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_pypi", + description="Source implementation for Pypi.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-pypi/source_pypi/__init__.py b/airbyte-integrations/connectors/source-pypi/source_pypi/__init__.py new file mode 100644 index 0000000000000..0e2a2c8c59967 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/source_pypi/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourcePypi + +__all__ = ["SourcePypi"] diff --git a/airbyte-integrations/connectors/source-pypi/source_pypi/pypi.yaml b/airbyte-integrations/connectors/source-pypi/source_pypi/pypi.yaml new file mode 100644 index 0000000000000..b6f64ce312a08 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/source_pypi/pypi.yaml @@ -0,0 +1,57 @@ +version: "0.1.0" + +definitions: + schema_loader: + type: JsonSchema + file_path: "./source_pypi/schemas/{{ options['name'] }}.json" + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://pypi.org" + http_method: "GET" + request_options_provider: + request_headers: + User-Agent: "Airbyte" + Accept: "application/json" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + schema_loader: + $ref: "*ref(definitions.schema_loader)" + retriever: + $ref: "*ref(definitions.retriever)" + + project_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "project" + path: "/pypi/{{ config['project_name'] }}/json" + + release_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "release" + path: "/pypi/{{ config['project_name'] }}/{{ config['version'] }}/json" + + stats_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "stats" + path: "/stats" + +streams: + - "*ref(definitions.project_stream)" + - "*ref(definitions.release_stream)" + - "*ref(definitions.stats_stream)" + +check: + stream_names: + - "project" + - "release" + - "stats" diff --git a/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/project.json b/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/project.json new file mode 100644 index 0000000000000..e2952840224b8 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/project.json @@ -0,0 +1,369 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Warehouse JSON API version 1.0", + "type": "object", + "required": ["info", "last_serial", "releases", "urls"], + "properties": { + "info": { + "type": "object", + "description": "Generic information about a specific version of a project", + "required": [ + "author", + "author_email", + "license", + "name", + "project_url", + "version", + "yanked", + "yanked_reason" + ], + "properties": { + "author": { + "description": "The name of the company or individual who created the project", + "type": "string" + }, + "author_email": { + "description": "The author's email address", + "type": "string" + }, + "bugtrack_url": { + "description": "URL to find issues and bugs for the project", + "type": ["string", "null"] + }, + "classifiers": { + "description": "Trove Classifier. Corresponds to https://packaging.python.org/specifications/core-metadata/#classifier-multiple-use", + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "description": "Corresponds to https://packaging.python.org/specifications/core-metadata/#description", + "type": "string" + }, + "description_content_type": { + "description": "Corresponds to https://packaging.python.org/specifications/core-metadata/#description-content-type", + "type": ["string", "null"] + }, + "docs_url": { + "description": "URL to the project's documentation", + "type": ["string", "null"] + }, + "download_url": { + "description": "[DEPRECATED]", + "type": ["string", "null"] + }, + "downloads": { + "description": "[DEPRECATED]", + "type": "object" + }, + "home_page": { + "description": "URL to project home page", + "type": "string" + }, + "keywords": { + "description": "Keywords to use for project searching", + "type": "string" + }, + "license": { + "description": "Project's open source license", + "type": ["string", "null"] + }, + "maintainer": { + "description": "Project maintainer name", + "type": ["string", "null"] + }, + "maintainer_email": { + "description": "Project maintainer email address", + "type": ["string", "null"] + }, + "name": { + "description": "Project's raw (non-normailzed name)", + "type": "string" + }, + "package_url": { + "description": "URL to the project page", + "type": "string" + }, + "platform": { + "description": "[DEPRECATED]", + "type": ["null", "string"] + }, + "project_url": { + "description": "URL to the project page", + "type": "string" + }, + "project_urls": { + "description": "Additional URLs that are relevant to your project. Corresponds to https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use", + "patternProperties": { + ".*": { + "type": ["null", "string"] + } + } + }, + "release_url": { + "description": "URL of the release page of the version of the project", + "type": "string" + }, + "requires_dist": { + "description": "Calculated project dependencies. Corresponds to https://packaging.python.org/specifications/core-metadata/#requires-dist-multiple-use", + "type": ["array", "null"], + "items": { + "type": "string" + } + }, + "requires_python": { + "description": "Python runtime version required for project. Corresponds to https://packaging.python.org/specifications/core-metadata/#requires-python", + "type": ["string", "null"] + }, + "summary": { + "description": "A one-line summary of what the distribution does.. Corresponds to https://packaging.python.org/specifications/core-metadata/#summary", + "type": ["string", "null"] + }, + "version": { + "description": "A string containing the distributions version number in the format specified in PEP 440. Corresponds to https://packaging.python.org/specifications/core-metadata/#version", + "type": "string" + }, + "yanked": { + "description": "If the version has been yanked. As defined in PEP 592", + "type": "boolean" + }, + "yanked_reason": { + "description": "Reason for applying PEP 592 version yank", + "type": ["string", "null"] + } + } + }, + "last_serial": { + "type": "integer", + "description": "Monotonically increasing integer sequence that changes every time the project is updated" + }, + "releases": { + "type": "object", + "patternProperties": { + ".*": { + "version_urls": { + "type": "array", + "description": "A list of release artifacts associated with a version", + "items": { + "release_file": { + "description": "A single downloadable and installable artifact", + "type": "object", + "required": [ + "digests", + "filename", + "packagetype", + "size", + "upload_time_iso_8601", + "url", + "yanked", + "yanked_reason" + ], + "properties": { + "comment_text": { + "description": "[DEPRECATED]", + "type": "string" + }, + "digests": { + "description": "The file checksums", + "type": "object", + "properties": { + "md5": { + "description": "The MD5 checksum of the release file", + "type": "string" + }, + "sha256": { + "description": "The SHA256 checksum of the release file", + "type": "string" + } + } + }, + "downloads": { + "description": "[DEPRECATED]", + "type": "integer" + }, + "filename": { + "description": "Full filename (including extension)", + "type": "string" + }, + "has_sig": { + "description": "Indicates whether a $(filename).asc GPG signature file was provided", + "type": "boolean" + }, + "md5_digest": { + "description": "[DEPRECATED]", + "type": "string" + }, + "packagetype": { + "description": "Release file type: 'sdist', 'bdist_wheel', etc", + "type": "string" + }, + "python_version": { + "description": "Can be 'source' or Python Tag as defined in https://www.python.org/dev/peps/pep-0425/#python-tag", + "type": "string" + }, + "requires_python": { + "description": "Python runtime version required for project. Corresponds to https://packaging.python.org/specifications/core-metadata/#requires-python", + "type": ["string", "null"] + }, + "size": { + "description": "File size in bytes", + "type": "integer" + }, + "upload_time": { + "description": "strftime('%Y-%m-%dT%H:%M:%S') of when the release file was uploaded", + "type": "string" + }, + "upload_time_iso_8601": { + "description": "ISO 8601 timestamp of when the release file was uploaded", + "type": "string" + }, + "url": { + "description": "Downloadable URL of the release file", + "type": "string" + }, + "yanked": { + "description": "Is release file PEP 592 yanked", + "type": "boolean" + }, + "yanked_reason": { + "description": "PEP 592 reason for yanking release file", + "type": ["string", "null"] + } + } + } + } + } + } + } + }, + "urls": { + "type": "array", + "description": "A list of release artifacts associated with a version", + "items": { + "release_file": { + "description": "A single downloadable and installable artifact", + "type": "object", + "required": [ + "digests", + "filename", + "packagetype", + "size", + "upload_time_iso_8601", + "url", + "yanked", + "yanked_reason" + ], + "properties": { + "comment_text": { + "description": "[DEPRECATED]", + "type": "string" + }, + "digests": { + "description": "The file checksums", + "type": "object", + "properties": { + "md5": { + "description": "The MD5 checksum of the release file", + "type": "string" + }, + "sha256": { + "description": "The SHA256 checksum of the release file", + "type": "string" + } + } + }, + "downloads": { + "description": "[DEPRECATED]", + "type": "integer" + }, + "filename": { + "description": "Full filename (including extension)", + "type": "string" + }, + "has_sig": { + "description": "Indicates whether a $(filename).asc GPG signature file was provided", + "type": "boolean" + }, + "md5_digest": { + "description": "[DEPRECATED]", + "type": "string" + }, + "packagetype": { + "description": "Release file type: 'sdist', 'bdist_wheel', etc", + "type": "string" + }, + "python_version": { + "description": "Can be 'source' or Python Tag as defined in https://www.python.org/dev/peps/pep-0425/#python-tag", + "type": "string" + }, + "requires_python": { + "description": "Python runtime version required for project. Corresponds to https://packaging.python.org/specifications/core-metadata/#requires-python", + "type": ["string", "null"] + }, + "size": { + "description": "File size in bytes", + "type": "integer" + }, + "upload_time": { + "description": "strftime('%Y-%m-%dT%H:%M:%S') of when the release file was uploaded", + "type": "string" + }, + "upload_time_iso_8601": { + "description": "ISO 8601 timestamp of when the release file was uploaded", + "type": "string" + }, + "url": { + "description": "Downloadable URL of the release file", + "type": "string" + }, + "yanked": { + "description": "Is release file PEP 592 yanked", + "type": "boolean" + }, + "yanked_reason": { + "description": "PEP 592 reason for yanking release file", + "type": ["string", "null"] + } + } + } + } + }, + "vulnerabilities": { + "type": "array", + "description": "A mapping of version identifiers to vulnerability information", + "items": { + "type": "object", + "properties": { + "aliases": { + "type": ["array", "null"], + "description": "A list of CVE vulns", + "items": { + "type": "string" + } + }, + "details": { + "type": ["string", "null"], + "description": "A description of the vulnerability" + }, + "fixed_in": { + "type": ["array", "null"], + "description": "A list of versions that are fixed", + "items": { + "type": "string" + } + }, + "id": { + "type": ["string", "null"] + }, + "link": { + "type": ["string", "null"] + }, + "source": { + "type": ["string", "null"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/release.json b/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/release.json new file mode 100644 index 0000000000000..697f7c96cd577 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/release.json @@ -0,0 +1,270 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Warehouse JSON API version 1.0", + "type": "object", + "required": ["info", "last_serial", "urls"], + "properties": { + "info": { + "type": "object", + "description": "Generic information about a specific version of a project", + "required": [ + "author", + "author_email", + "license", + "name", + "project_url", + "version", + "yanked", + "yanked_reason" + ], + "properties": { + "author": { + "description": "The name of the company or individual who created the project", + "type": "string" + }, + "author_email": { + "description": "The author's email address", + "type": "string" + }, + "bugtrack_url": { + "description": "URL to find issues and bugs for the project", + "type": ["string", "null"] + }, + "classifiers": { + "description": "Trove Classifier. Corresponds to https://packaging.python.org/specifications/core-metadata/#classifier-multiple-use", + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "description": "Corresponds to https://packaging.python.org/specifications/core-metadata/#description", + "type": "string" + }, + "description_content_type": { + "description": "Corresponds to https://packaging.python.org/specifications/core-metadata/#description-content-type", + "type": ["string", "null"] + }, + "docs_url": { + "description": "URL to the project's documentation", + "type": ["string", "null"] + }, + "download_url": { + "description": "[DEPRECATED]", + "type": ["string", "null"] + }, + "downloads": { + "description": "[DEPRECATED]", + "type": "object" + }, + "home_page": { + "description": "URL to project home page", + "type": "string" + }, + "keywords": { + "description": "Keywords to use for project searching", + "type": "string" + }, + "license": { + "description": "Project's open source license", + "type": ["string", "null"] + }, + "maintainer": { + "description": "Project maintainer name", + "type": ["string", "null"] + }, + "maintainer_email": { + "description": "Project maintainer email address", + "type": ["string", "null"] + }, + "name": { + "description": "Project's raw (non-normailzed name)", + "type": "string" + }, + "package_url": { + "description": "URL to the project page", + "type": "string" + }, + "platform": { + "description": "[DEPRECATED]", + "type": ["null", "string"] + }, + "project_url": { + "description": "URL to the project page", + "type": "string" + }, + "project_urls": { + "description": "Additional URLs that are relevant to your project. Corresponds to https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use", + "patternProperties": { + ".*": { + "type": ["null", "string"] + } + } + }, + "release_url": { + "description": "URL of the release page of the version of the project", + "type": "string" + }, + "requires_dist": { + "description": "Calculated project dependencies. Corresponds to https://packaging.python.org/specifications/core-metadata/#requires-dist-multiple-use", + "type": ["array", "null"], + "items": { + "type": "string" + } + }, + "requires_python": { + "description": "Python runtime version required for project. Corresponds to https://packaging.python.org/specifications/core-metadata/#requires-python", + "type": ["string", "null"] + }, + "summary": { + "description": "A one-line summary of what the distribution does.. Corresponds to https://packaging.python.org/specifications/core-metadata/#summary", + "type": ["string", "null"] + }, + "version": { + "description": "A string containing the distributions version number in the format specified in PEP 440. Corresponds to https://packaging.python.org/specifications/core-metadata/#version", + "type": "string" + }, + "yanked": { + "description": "If the version has been yanked. As defined in PEP 592", + "type": "boolean" + }, + "yanked_reason": { + "description": "Reason for applying PEP 592 version yank", + "type": ["string", "null"] + } + } + }, + "last_serial": { + "type": "integer", + "description": "Monotonically increasing integer sequence that changes every time the project is updated" + }, + "urls": { + "type": "array", + "description": "A list of release artifacts associated with a version", + "items": { + "release_file": { + "description": "A single downloadable and installable artifact", + "type": "object", + "required": [ + "digests", + "filename", + "packagetype", + "size", + "upload_time_iso_8601", + "url", + "yanked", + "yanked_reason" + ], + "properties": { + "comment_text": { + "description": "[DEPRECATED]", + "type": "string" + }, + "digests": { + "description": "The file checksums", + "type": "object", + "properties": { + "md5": { + "description": "The MD5 checksum of the release file", + "type": "string" + }, + "sha256": { + "description": "The SHA256 checksum of the release file", + "type": "string" + } + } + }, + "downloads": { + "description": "[DEPRECATED]", + "type": "integer" + }, + "filename": { + "description": "Full filename (including extension)", + "type": "string" + }, + "has_sig": { + "description": "Indicates whether a $(filename).asc GPG signature file was provided", + "type": "boolean" + }, + "md5_digest": { + "description": "[DEPRECATED]", + "type": "string" + }, + "packagetype": { + "description": "Release file type: 'sdist', 'bdist_wheel', etc", + "type": "string" + }, + "python_version": { + "description": "Can be 'source' or Python Tag as defined in https://www.python.org/dev/peps/pep-0425/#python-tag", + "type": "string" + }, + "requires_python": { + "description": "Python runtime version required for project. Corresponds to https://packaging.python.org/specifications/core-metadata/#requires-python", + "type": ["string", "null"] + }, + "size": { + "description": "File size in bytes", + "type": "integer" + }, + "upload_time": { + "description": "strftime('%Y-%m-%dT%H:%M:%S') of when the release file was uploaded", + "type": "string" + }, + "upload_time_iso_8601": { + "description": "ISO 8601 timestamp of when the release file was uploaded", + "type": "string" + }, + "url": { + "description": "Downloadable URL of the release file", + "type": "string" + }, + "yanked": { + "description": "Is release file PEP 592 yanked", + "type": "boolean" + }, + "yanked_reason": { + "description": "PEP 592 reason for yanking release file", + "type": ["string", "null"] + } + } + } + } + }, + "vulnerabilities": { + "type": "array", + "description": "A mapping of version identifiers to vulnerability information", + "items": { + "type": "object", + "properties": { + "aliases": { + "type": ["array", "null"], + "description": "A list of CVE vulns", + "items": { + "type": "string" + } + }, + "details": { + "type": ["string", "null"], + "description": "A description of the vulnerability" + }, + "fixed_in": { + "type": ["array", "null"], + "description": "A list of versions that are fixed", + "items": { + "type": "string" + } + }, + "id": { + "type": ["string", "null"] + }, + "link": { + "type": ["string", "null"] + }, + "source": { + "type": ["string", "null"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/stats.json b/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/stats.json new file mode 100644 index 0000000000000..8ce33f667530f --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/source_pypi/schemas/stats.json @@ -0,0 +1,25 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "PyPI status", + "type": "object", + "required": ["top_packages", "total_packages_size"], + "properties": { + "top_packages": { + "type": "object", + "patternProperties": { + ".*": { + "type": "object", + "required": ["size"], + "properties": { + "size": { + "type": ["null", "integer"] + } + } + } + } + }, + "total_packages_size": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-pypi/source_pypi/source.py b/airbyte-integrations/connectors/source-pypi/source_pypi/source.py new file mode 100644 index 0000000000000..754a3600be541 --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/source_pypi/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourcePypi(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "pypi.yaml"}) diff --git a/airbyte-integrations/connectors/source-pypi/source_pypi/spec.yaml b/airbyte-integrations/connectors/source-pypi/source_pypi/spec.yaml new file mode 100644 index 0000000000000..134fb0e24a81d --- /dev/null +++ b/airbyte-integrations/connectors/source-pypi/source_pypi/spec.yaml @@ -0,0 +1,25 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/pypi +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Pypi Spec + type: object + required: + - project_name + additionalProperties: true + properties: + project_name: + type: string + title: PyPI Package + description: >- + Name of the project/package. Can only be in lowercase with hyphen. + This is the name used using pip command for installing the package. + examples: + - sampleproject + version: + title: Package Version + type: string + description: >- + Version of the project/package. + Use it to find a particular release instead of all releases. + examples: + - 1.2.0 diff --git a/airbyte-integrations/connectors/source-qonto/.dockerignore b/airbyte-integrations/connectors/source-qonto/.dockerignore new file mode 100644 index 0000000000000..028a3333acb20 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_qonto +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-qonto/Dockerfile b/airbyte-integrations/connectors/source-qonto/Dockerfile new file mode 100644 index 0000000000000..b753a3e701cb4 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_qonto ./source_qonto + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-qonto diff --git a/airbyte-integrations/connectors/source-qonto/README.md b/airbyte-integrations/connectors/source-qonto/README.md new file mode 100644 index 0000000000000..7118ea0ff6a0a --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/README.md @@ -0,0 +1,132 @@ +# Qonto Source + +This is the repository for the Qonto source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/qonto). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-qonto:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/qonto) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_qonto/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source qonto test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-qonto:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-qonto:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-qonto:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-qonto:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-qonto:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-qonto:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-qonto:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-qonto:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-qonto/acceptance-test-config.yml b/airbyte-integrations/connectors/source-qonto/acceptance-test-config.yml new file mode 100644 index 0000000000000..0508da7f7e665 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/acceptance-test-config.yml @@ -0,0 +1,20 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-qonto:dev +tests: + spec: + - spec_path: "source_qonto/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-qonto/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-qonto/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-qonto/build.gradle b/airbyte-integrations/connectors/source-qonto/build.gradle new file mode 100644 index 0000000000000..5e53cd9432189 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_qonto' +} diff --git a/airbyte-integrations/connectors/source-qonto/integration_tests/__init__.py b/airbyte-integrations/connectors/source-qonto/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-qonto/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-qonto/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-qonto/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-qonto/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..43704ee30da75 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/integration_tests/configured_catalog.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "stream": { + "name": "memberships", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "labels", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "transactions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-qonto/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-qonto/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..a22da8cc220f7 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/integration_tests/invalid_config.json @@ -0,0 +1,7 @@ +{ + "endpoint": "fake-endpoint", + "iban": "fake_iban", + "organization_slug": "fake_organization", + "secret_key": "fake_secret", + "start_date": "9999-99-99" +} diff --git a/airbyte-integrations/connectors/source-qonto/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-qonto/integration_tests/sample_config.json new file mode 100644 index 0000000000000..52b6ca534b1a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/integration_tests/sample_config.json @@ -0,0 +1,7 @@ +{ + "endpoint": "Test Mocked API Server", + "iban": "REPLACEME", + "organization_slug": "REPLACEME", + "secret_key": "REPLACEME", + "start_date": "2022-11-04" +} diff --git a/airbyte-integrations/connectors/source-qonto/main.py b/airbyte-integrations/connectors/source-qonto/main.py new file mode 100644 index 0000000000000..6430830e4bb28 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_qonto import SourceQonto + +if __name__ == "__main__": + source = SourceQonto() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-qonto/requirements.txt b/airbyte-integrations/connectors/source-qonto/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-qonto/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-qonto/sample_files/configured_catalog.json new file mode 100644 index 0000000000000..a4e6293f32126 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/sample_files/configured_catalog.json @@ -0,0 +1,157 @@ +{ + "streams": [ + { + "stream": { + "name": "memberships", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "first_name": { + "type": "string" + }, + "last_name": { + "type": "string" + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "labels", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "parent_id": { + "type": "string" + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "transactions", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "transaction_id": { + "type": "string" + }, + "amount": { + "type": "number" + }, + "amount_cents": { + "type": "integer" + }, + "settled_balance": { + "type": "number" + }, + "settled_balance_cents": { + "type": "integer" + }, + "attachment_ids": { + "type": ["array"], + "items": { + "type": ["string"] + } + }, + "local_amount": { + "type": "number" + }, + "local_amount_cents": { + "type": "integer" + }, + "side": { + "type": "string" + }, + "operation_type": { + "type": "string" + }, + "currency": { + "type": "string" + }, + "local_currency": { + "type": "string" + }, + "label": { + "type": "string" + }, + "settled_at": { + "type": "string" + }, + "emitted_at": { + "type": "string" + }, + "updated_at": { + "type": "string" + }, + "status": { + "type": "string" + }, + "note": { + "type": "string" + }, + "reference": { + "type": "string" + }, + "vat_amount": { + "type": "number" + }, + "vat_amount_cents": { + "type": "number" + }, + "vat_rate": { + "type": "number" + }, + "initiator_id": { + "type": "string" + }, + "label_ids": { + "type": ["array"], + "items": { + "type": ["string"] + } + }, + "attachment_lost": { + "type": "boolean" + }, + "attachment_required": { + "type": "boolean" + }, + "card_last_digits": { + "type": "string" + }, + "category": { + "type": "string" + }, + "id": { + "type": "string" + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-qonto/setup.py b/airbyte-integrations/connectors/source-qonto/setup.py new file mode 100644 index 0000000000000..dc7b2baee5c5f --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_qonto", + description="Source implementation for Qonto API.", + author="Leïla Ballouard", + author_email="leila.ballouard@backmarket.com", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-qonto/source_qonto/__init__.py b/airbyte-integrations/connectors/source-qonto/source_qonto/__init__.py new file mode 100644 index 0000000000000..0dbd24ec2933b --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/source_qonto/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceQonto + +__all__ = ["SourceQonto"] diff --git a/airbyte-integrations/connectors/source-qonto/source_qonto/auth.py b/airbyte-integrations/connectors/source-qonto/source_qonto/auth.py new file mode 100644 index 0000000000000..5765ba63e5adc --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/source_qonto/auth.py @@ -0,0 +1,37 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.sources.streams.http.requests_native_auth.abstract_token import AbstractHeaderAuthenticator + + +class QontoApiKeyAuthenticator(AbstractHeaderAuthenticator): + """ + QontoApiKeyAuthenticator sets a request header on the HTTP requests sent. + + The header is of the form: + `"Authorization": ""` + + For example, + `QontoApiKeyAuthenticator("my-organization", "3564f")` + will result in the following header set on the HTTP request + `"Authorization": "my-organization:3564f"` + + Attributes: + organization_slug (str): Organization slug to use in the header + secret_key (str): Secret key to use in the header + """ + + def __init__(self, organization_slug: str, secret_key: str): + super().__init__() + self.organization_slug = organization_slug + self.secret_key = secret_key + + @property + def auth_header(self) -> str: + return "Authorization" + + @property + def token(self) -> str: + return f"{self.organization_slug}:{self.secret_key}" diff --git a/airbyte-integrations/connectors/source-qonto/source_qonto/endpoint.py b/airbyte-integrations/connectors/source-qonto/source_qonto/endpoint.py new file mode 100644 index 0000000000000..bfc1fe9f6626d --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/source_qonto/endpoint.py @@ -0,0 +1,17 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from typing import Dict + +QONTO_ENDPOINT_MAP: Dict = { + "Sandbox": "https://thirdparty-sandbox.staging.qonto.co/v2/", + "Production": "https://thirdparty.qonto.com/v2/", + "Test Mocked API Server": "https://stoplight.io/mocks/qonto-next/business-api/8419419/v2/", +} + + +def get_url_base(endpoint: str) -> str: + """Define the URL Base from user's input with respect to the QONTO_ENDPOINT_MAP""" + return QONTO_ENDPOINT_MAP.get(endpoint) diff --git a/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/labels.json b/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/labels.json new file mode 100644 index 0000000000000..7aeb5973b6fe7 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/labels.json @@ -0,0 +1,15 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": ["null", "string"] + }, + "parent_id": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/memberships.json b/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/memberships.json new file mode 100644 index 0000000000000..0be6884ee3dce --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/memberships.json @@ -0,0 +1,15 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "first_name": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/transactions.json b/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/transactions.json new file mode 100644 index 0000000000000..60a3a42328fa4 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/source_qonto/schemas/transactions.json @@ -0,0 +1,99 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "transaction_id": { + "type": ["null", "string"] + }, + "amount": { + "type": ["null", "number"] + }, + "amount_cents": { + "type": ["null", "integer"] + }, + "settled_balance": { + "type": ["null", "number"] + }, + "settled_balance_cents": { + "type": ["null", "integer"] + }, + "attachment_ids": { + "type": ["array"], + "items": { + "type": ["string"] + } + }, + "local_amount": { + "type": ["null", "number"] + }, + "local_amount_cents": { + "type": ["null", "integer"] + }, + "side": { + "type": ["null", "string"] + }, + "operation_type": { + "type": ["null", "string"] + }, + "currency": { + "type": ["null", "string"] + }, + "local_currency": { + "type": ["null", "string"] + }, + "label": { + "type": ["null", "string"] + }, + "settled_at": { + "type": ["null", "string"] + }, + "emitted_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "note": { + "type": ["null", "string"] + }, + "reference": { + "type": ["null", "string"] + }, + "vat_amount": { + "type": ["null", "number"] + }, + "vat_amount_cents": { + "type": ["null", "number"] + }, + "vat_rate": { + "type": ["null", "number"] + }, + "initiator_id": { + "type": ["null", "string"] + }, + "label_ids": { + "type": ["array"], + "items": { + "type": ["string"] + } + }, + "attachment_lost": { + "type": ["null", "boolean"] + }, + "attachment_required": { + "type": ["null", "boolean"] + }, + "card_last_digits": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "id": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-qonto/source_qonto/source.py b/airbyte-integrations/connectors/source-qonto/source_qonto/source.py new file mode 100644 index 0000000000000..ac50d941bc7f1 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/source_qonto/source.py @@ -0,0 +1,152 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from datetime import datetime +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from source_qonto.auth import QontoApiKeyAuthenticator +from source_qonto.endpoint import get_url_base + + +# Basic full refresh stream +class QontoStream(HttpStream, ABC): + """ + This class represents a stream output by the connector. + This is an abstract base class meant to contain all the common functionality at the API level. + + Each stream should extend this class (or another abstract subclass of it) to specify behavior unique to that stream. + """ + + next_page_token_field = "current_page" + primary_key = "id" + + def __init__(self, config: dict, stream_name: str, **kwargs): + auth = QontoApiKeyAuthenticator(organization_slug=config["organization_slug"], secret_key=config["secret_key"]) + super().__init__(authenticator=auth, **kwargs) + self.stream_name = stream_name + self.config = config + + @property + def url_base(self) -> str: + return get_url_base(self.config["endpoint"]) + + def path( + self, + stream_state: Mapping[str, Any] = None, + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> str: + return self.stream_name + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + """ + Define how a response is parsed. + :return an iterable containing each record in the response + """ + response_json = response.json() + yield from response_json[self.stream_name] + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + """ + Define a pagination strategy. + + :param response: the most recent response from the API + :return If there is another page in the result, a mapping (e.g: dict) containing information needed to query the next page in the response. + If there are no more pages in the result, return None. + """ + decoded_response = response.json() + api_metadata = decoded_response.get("meta", None) + if api_metadata is None: + return None + else: + next_page = api_metadata.get("next_page", None) + if next_page is None: + return None + else: + return {"current_page": next_page} + + +class Memberships(QontoStream): + name = "memberships" + + def __init__(self, config, **kwargs): + super().__init__(config, self.name) + + +class Labels(QontoStream): + name = "labels" + + def __init__(self, config, **kwargs): + super().__init__(config, self.name) + + +class Transactions(QontoStream): + name = "transactions" + cursor_date_format = "%Y-%m-%d" + + def __init__(self, config, **kwargs): + super().__init__(config, self.name) + self.start_date = config["start_date"] + self.iban = config["iban"] + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + """ + Define any query parameters to be set. + """ + start_date = datetime.strptime(stream_state.get(self.cursor_field) if stream_state else self.start_date, self.cursor_date_format) + params = {"iban": self.iban, "settled_at_from": start_date.strftime(self.cursor_date_format)} + if next_page_token: + params.update(next_page_token) + return params + + +# Source +class SourceQonto(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + """ + Validate that the user-provided config can be used to connect to the underlying API + + :param config: the user-input config object conforming to the connector's spec.yaml + :param logger: logger object + :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. + """ + try: + headers = {"Authorization": f'{config["organization_slug"]}:{config["secret_key"]}'} + params = {"iban": config["iban"]} + resp = requests.request("GET", url=f"{get_url_base(config['endpoint'])}/transactions", params=params, headers=headers) + status = resp.status_code + logger.info(f"Ping response code: {status}") + if status == 200: + return True, None + if status == 404: + if resp.text == " ": # When Iban is wrong, the request returns only " " as content + message = "Not Found, the specified IBAN might be wrong" + else: + message = resp.json().get("errors")[0].get("detail") + return False, message + if status == 401: + message = "Invalid credentials, the organization slug or secret key might be wrong" + return False, message + return False, message + except Exception as e: + return False, e + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """ + Return a the list of streams that will be enabled in the connector + + :param config: A Mapping of the user input configuration as defined in the connector spec. + """ + return [Memberships(config), Transactions(config), Labels(config)] diff --git a/airbyte-integrations/connectors/source-qonto/source_qonto/spec.yaml b/airbyte-integrations/connectors/source-qonto/source_qonto/spec.yaml new file mode 100644 index 0000000000000..f6d6e654335e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/source_qonto/spec.yaml @@ -0,0 +1,40 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Qonto Spec + type: object + required: + - endpoint + - organization_slug + - secret_key + - iban + properties: + endpoint: + title: Endpoint + type: string + description: Please choose the right endpoint to use in this connection + enum: + #- Sandbox # not yet supported + - Production + - Test Mocked API Server + organization_slug: + title: Organization slug + type: string + description: Organization slug used in Qonto + secret_key: + title: Secret Key + type: string + description: Secret key of the Qonto account + airbyte_secret: true + iban: + title: IBAN + type: string + description: International Bank Account Number linked used with your Qonto Account + pattern: ^[A-Z0-9]*$ + start_date: + title: Start date + type: string + description: Start getting data from that date. + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + examples: + - YYYY-MM-DD diff --git a/airbyte-integrations/connectors/source-qonto/unit_tests/__init__.py b/airbyte-integrations/connectors/source-qonto/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-qonto/unit_tests/test_auth.py b/airbyte-integrations/connectors/source-qonto/unit_tests/test_auth.py new file mode 100644 index 0000000000000..9af8deff3a758 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/unit_tests/test_auth.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from source_qonto.auth import QontoApiKeyAuthenticator + + +def test_authenticator(): + mocked_config = {"organization_slug": "test_slug", "secret_key": "test_key"} + authenticator = QontoApiKeyAuthenticator(**mocked_config) + expected_authenticator = {"Authorization": "test_slug:test_key"} + assert authenticator.get_auth_header() == expected_authenticator diff --git a/airbyte-integrations/connectors/source-qonto/unit_tests/test_source.py b/airbyte-integrations/connectors/source-qonto/unit_tests/test_source.py new file mode 100644 index 0000000000000..127962b51164f --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/unit_tests/test_source.py @@ -0,0 +1,39 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock, patch + +import pytest +from source_qonto.source import SourceQonto + + +@pytest.mark.parametrize( + ("http_status", "response_text", "expected_result"), + [ + (HTTPStatus.OK, "", (True, None)), + (HTTPStatus.NOT_FOUND, " ", (False, "Not Found, the specified IBAN might be wrong")), + ( + HTTPStatus.UNAUTHORIZED, + "Invalid credentials", + (False, "Invalid credentials, the organization slug or secret key might be wrong"), + ), + ], +) +def test_check_connection(mocker, http_status, response_text, expected_result): + with patch("requests.request") as mock_request: + mock_request.return_value.status_code = http_status + mock_request.return_value.text = response_text + source = SourceQonto() + logger_mock, config_mock = MagicMock(), MagicMock() + print(http_status) + assert source.check_connection(logger_mock, config_mock) == expected_result + + +def test_streams(mocker): + source = SourceQonto() + config_mock = MagicMock() + streams = source.streams(config_mock) + expected_streams_number = 3 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-qonto/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-qonto/unit_tests/test_streams.py new file mode 100644 index 0000000000000..e0869b3ff9908 --- /dev/null +++ b/airbyte-integrations/connectors/source-qonto/unit_tests/test_streams.py @@ -0,0 +1,127 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock, patch + +import pytest +import requests +from source_qonto.source import QontoStream, Transactions + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(QontoStream, "path", "v0/example_endpoint") + mocker.patch.object(QontoStream, "primary_key", "test_primary_key") + + def __mocked_init__(self): + self.stream_name = "test_stream_name" + pass + + mocker.patch.object(QontoStream, "__init__", __mocked_init__) + + +# Base Class +def test_request_params(patch_base_class): + + stream = QontoStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = QontoStream() + simple_page_response_json = { + "transactions": [], + "meta": {"current_page": 1, "next_page": None, "prev_page": None, "total_pages": 3, "total_count": 210, "per_page": 100}, + } + multiple_page_response_json = { + "transactions": [], + "meta": {"current_page": 5, "next_page": 6, "prev_page": 4, "total_pages": 7, "total_count": 210, "per_page": 100}, + } + with patch.object(requests.Response, "json", return_value=simple_page_response_json): + inputs = {"response": requests.Response()} + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + with patch.object(requests.Response, "json", return_value=multiple_page_response_json): + inputs = {"response": requests.Response()} + expected_token = {"current_page": 6} + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class): + stream = QontoStream() + mock_response_json = { + "test_stream_name": [ + {"id": "171dba70-c75f-4337-b419-377a59bc9cf3", "name": "Fantastic Marble Wallet", "parent_id": None}, + { + "id": "2487a014-618f-40e3-8a1f-eb76e883efc5", + "name": "Fantastic Bronze Computer", + "parent_id": "171dba70-c75f-4337-b419-377a59bc9cf3", + }, + ], + "meta": {"current_page": 1, "next_page": None, "prev_page": None, "total_pages": 1, "total_count": 2, "per_page": 100}, + } + with patch.object(requests.Response, "json", return_value=mock_response_json): + inputs = {"response": requests.Response()} + expected_parsed_object = {"id": "171dba70-c75f-4337-b419-377a59bc9cf3", "name": "Fantastic Marble Wallet", "parent_id": None} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(patch_base_class): + stream = QontoStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = QontoStream() + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = QontoStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = QontoStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time + + +# Transactions Class +def test_transactions_request_params(): + mocked_config = { + "organization_slug": "test_slug", + "secret_key": "test_key", + "iban": "FRXXXXXXXXXXXXXXXXXXXXXXXXX", + "start_date": "2022-06-01", + } + stream = Transactions(mocked_config) + + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {"iban": stream.iban, "settled_at_from": stream.start_date} + assert stream.request_params(**inputs) == expected_params + + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"current_page": 6}} + expected_params = {"iban": stream.iban, "settled_at_from": stream.start_date, "current_page": 6} + assert stream.request_params(**inputs) == expected_params diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/abnormal_state.json index 0f178109f5112..90e382ab1b424 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/abnormal_state.json @@ -11,4 +11,4 @@ "analytics_workflow_emails_statistics": { "asset_updated_at": "2217-06-26 21:20:07" } -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/configured_catalog.json index c76a23d355d26..12f8bb6b0ea18 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/configured_catalog.json @@ -4,13 +4,9 @@ "stream": { "name": "emails", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh" - ], + "supported_sync_modes": ["full_refresh"], "source_defined_cursor": true, - "default_cursor_field": [ - "update_time" - ] + "default_cursor_field": ["update_time"] }, "sync_mode": "full_refresh", "destination_sync_mode": "append" @@ -19,13 +15,9 @@ "stream": { "name": "landing_pages", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh" - ], + "supported_sync_modes": ["full_refresh"], "source_defined_cursor": true, - "default_cursor_field": [ - "update_time" - ] + "default_cursor_field": ["update_time"] }, "sync_mode": "full_refresh", "destination_sync_mode": "append" @@ -34,16 +26,12 @@ "stream": { "name": "segmentations", "json_schema": {}, - "supported_sync_modes": [ - "full_refresh" - ], + "supported_sync_modes": ["full_refresh"], "source_defined_cursor": true, - "default_cursor_field": [ - "update_time" - ] + "default_cursor_field": ["update_time"] }, "sync_mode": "full_refresh", "destination_sync_mode": "append" } ] -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/invalid_config.json index 178618cff3b38..22f15f31a32d7 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/invalid_config.json @@ -1,6 +1,5 @@ -{ - "authorization": - { +{ + "authorization": { "auth_type": "Client", "client_id": "fake-client-id", "client_secret": "fake-client-secret", diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_config.json index 5149ad122f2a6..0236b6b71cfcd 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_config.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_config.json @@ -1,6 +1,5 @@ -{ - "authorization": - { +{ + "authorization": { "auth_type": "Client", "client_id": "", "client_secret": "", diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_state.json index e72298059ea41..3d239f0c5b46a 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/integration_tests/sample_state.json @@ -11,4 +11,4 @@ "analytics_workflow_emails_statistics": { "updated_at": "2022-06-26 21:20:07" } -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_conversions.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_conversions.json index b285d069ca0d6..7eed24df25220 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_conversions.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_conversions.json @@ -2,29 +2,29 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "asset_id": { - "type": ["null", "integer"] - }, - "asset_identifier": { - "type": ["null", "string"] - }, - "asset_created_at": { - "type": ["null", "string"] - }, - "asset_updated_at": { - "type": ["null", "string"] - }, - "asset_type": { - "type": ["null", "string"] - }, - "conversion_count": { - "type": ["null", "integer"] - }, - "visits_count": { - "type": ["null", "integer"] - }, - "conversion_rate": { - "type": ["null", "number"] - } + "asset_id": { + "type": ["null", "integer"] + }, + "asset_identifier": { + "type": ["null", "string"] + }, + "asset_created_at": { + "type": ["null", "string"] + }, + "asset_updated_at": { + "type": ["null", "string"] + }, + "asset_type": { + "type": ["null", "string"] + }, + "conversion_count": { + "type": ["null", "integer"] + }, + "visits_count": { + "type": ["null", "integer"] + }, + "conversion_rate": { + "type": ["null", "number"] + } } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_emails.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_emails.json index 6b74fb2db79bc..ce87ee1dab156 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_emails.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_emails.json @@ -2,50 +2,50 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "send_at": { - "type": ["null", "string"] - }, - "campaign_id": { - "type": ["null", "integer"] - }, - "campaign_name": { - "type": ["null", "string"] - }, - "email_dropped_count": { - "type": ["null", "integer"] - }, - "email_delivered_count": { - "type": ["null", "integer"] - }, - "email_bounced_count": { - "type": ["null", "integer"] - }, - "email_opened_count": { - "type": ["null", "integer"] - }, - "email_clicked_count": { - "type": ["null", "integer"] - }, - "email_unsubscribed_count": { - "type": ["null", "integer"] - }, - "email_spam_reported_count": { - "type": ["null", "integer"] - }, - "email_delivered_rate": { - "type": ["null", "number"] - }, - "email_opened_rate": { - "type": ["null", "number"] - }, - "email_clicked_rate": { - "type": ["null", "number"] - }, - "email_spam_reported_rate": { - "type": ["null", "number"] - }, - "contacts_count": { - "type": ["null", "integer"] - } + "send_at": { + "type": ["null", "string"] + }, + "campaign_id": { + "type": ["null", "integer"] + }, + "campaign_name": { + "type": ["null", "string"] + }, + "email_dropped_count": { + "type": ["null", "integer"] + }, + "email_delivered_count": { + "type": ["null", "integer"] + }, + "email_bounced_count": { + "type": ["null", "integer"] + }, + "email_opened_count": { + "type": ["null", "integer"] + }, + "email_clicked_count": { + "type": ["null", "integer"] + }, + "email_unsubscribed_count": { + "type": ["null", "integer"] + }, + "email_spam_reported_count": { + "type": ["null", "integer"] + }, + "email_delivered_rate": { + "type": ["null", "number"] + }, + "email_opened_rate": { + "type": ["null", "number"] + }, + "email_clicked_rate": { + "type": ["null", "number"] + }, + "email_spam_reported_rate": { + "type": ["null", "number"] + }, + "contacts_count": { + "type": ["null", "integer"] } + } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_funnel.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_funnel.json index 8f144ccfa7de9..4beaf9b96bb16 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_funnel.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_funnel.json @@ -2,23 +2,23 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "reference_day": { - "type": ["null", "string"] - }, - "contacts_count": { - "type": ["null", "integer"] - }, - "qualified_contacts_count": { - "type": ["null", "integer"] - }, - "opportunities_count": { - "type": ["null", "integer"] - }, - "sales_count": { - "type": ["null", "integer"] - }, - "visitors_count": { - "type": ["null", "integer"] - } + "reference_day": { + "type": ["null", "string"] + }, + "contacts_count": { + "type": ["null", "integer"] + }, + "qualified_contacts_count": { + "type": ["null", "integer"] + }, + "opportunities_count": { + "type": ["null", "integer"] + }, + "sales_count": { + "type": ["null", "integer"] + }, + "visitors_count": { + "type": ["null", "integer"] } + } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_workflow_emails_statistics.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_workflow_emails_statistics.json index ffe7866f912d2..d039cf8298742 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_workflow_emails_statistics.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/analytics_workflow_emails_statistics.json @@ -2,62 +2,62 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "workflow_name": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"] - }, - "email_name": { - "type": ["null", "string"] - }, - "workflow_action_id": { - "type": ["null", "string"] - }, - "workflow_id": { - "type": ["null", "string"] - }, - "contacts_count": { - "type": ["null", "integer"] - }, - "count_processed": { - "type": ["null", "integer"] - }, - "email_delivered_count": { - "type": ["null", "integer"] - }, - "email_opened_unique_count": { - "type": ["null", "integer"] - }, - "email_clicked_unique_count": { - "type": ["null", "integer"] - }, - "email_dropped_count": { - "type": ["null", "integer"] - }, - "email_unsubscribed_count": { - "type": ["null", "integer"] - }, - "email_spam_reported_count": { - "type": ["null", "integer"] - }, - "email_delivered_rate": { - "type": ["null", "number"] - }, - "email_opened_rate": { - "type": ["null", "number"] - }, - "email_clicked_rate": { - "type": ["null", "number"] - }, - "email_spam_reported_rate": { - "type": ["null", "number"] - }, - "email_bounced_unique_count": { - "type": ["null", "integer"] - } + "workflow_name": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "email_name": { + "type": ["null", "string"] + }, + "workflow_action_id": { + "type": ["null", "string"] + }, + "workflow_id": { + "type": ["null", "string"] + }, + "contacts_count": { + "type": ["null", "integer"] + }, + "count_processed": { + "type": ["null", "integer"] + }, + "email_delivered_count": { + "type": ["null", "integer"] + }, + "email_opened_unique_count": { + "type": ["null", "integer"] + }, + "email_clicked_unique_count": { + "type": ["null", "integer"] + }, + "email_dropped_count": { + "type": ["null", "integer"] + }, + "email_unsubscribed_count": { + "type": ["null", "integer"] + }, + "email_spam_reported_count": { + "type": ["null", "integer"] + }, + "email_delivered_rate": { + "type": ["null", "number"] + }, + "email_opened_rate": { + "type": ["null", "number"] + }, + "email_clicked_rate": { + "type": ["null", "number"] + }, + "email_spam_reported_rate": { + "type": ["null", "number"] + }, + "email_bounced_unique_count": { + "type": ["null", "integer"] } + } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/emails.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/emails.json index d69b22ee0d195..95d5d4d075710 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/emails.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/emails.json @@ -2,55 +2,55 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "campaign_id": { - "type": ["null", "integer"] - }, - "behavior_score_info": { - "type": ["null", "object"], - "properties": { - "engaged": { - "type": ["null", "boolean"] - }, - "disengaged": { - "type": ["null", "boolean"] - }, - "indeterminate": { - "type": ["null", "boolean"] - } - } - }, - "send_at": { - "type": ["null", "string"] - }, - "status": { - "type": ["null", "string"] - }, - "sending_is_imminent": { + "campaign_id": { + "type": ["null", "integer"] + }, + "behavior_score_info": { + "type": ["null", "object"], + "properties": { + "engaged": { "type": ["null", "boolean"] - }, - "is_predictive_sending": { + }, + "disengaged": { "type": ["null", "boolean"] - }, - "id": { - "type": ["null", "integer"] - }, - "name": { - "type": ["null", "string"] - }, - "component_template_id": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"] - }, - "type": { - "type": ["null", "string"] - }, - "leads_count": { - "type": ["null", "integer"] + }, + "indeterminate": { + "type": ["null", "boolean"] + } } + }, + "send_at": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "sending_is_imminent": { + "type": ["null", "boolean"] + }, + "is_predictive_sending": { + "type": ["null", "boolean"] + }, + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "component_template_id": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "leads_count": { + "type": ["null", "integer"] + } } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/embeddables.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/embeddables.json index 052bb931c79cd..b7b3457baae6a 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/embeddables.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/embeddables.json @@ -2,23 +2,23 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "id": { - "type": ["null", "integer"] - }, - "title": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"] - }, - "conversion_identifier": { - "type": ["null", "string"] - }, - "status": { - "type": ["null", "string"] - } + "id": { + "type": ["null", "integer"] + }, + "title": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "conversion_identifier": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] } + } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/fields.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/fields.json index d740af4b0d0c2..761d1225ed76d 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/fields.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/fields.json @@ -2,63 +2,63 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "uuid": { + "uuid": { + "type": ["null", "string"] + }, + "label": { + "type": ["null", "object"], + "properties": { + "en-UD": { "type": ["null", "string"] - }, - "label": { - "type": ["null", "object"], - "properties": { - "en-UD": { - "type": ["null", "string"] - }, - "en-US": { - "type": ["null", "string"] - }, - "es-ES": { - "type": ["null", "string"] - }, - "pt-BR": { - "type": ["null", "string"] - }, - "default": { - "type": ["null", "string"] - } - } - }, - "name": { - "type": ["null", "object"], - "properties": { - "en-UD": { - "type": ["null", "string"] - }, - "en-US": { - "type": ["null", "string"] - }, - "es-ES": { - "type": ["null", "string"] - }, - "pt-BR": { - "type": ["null", "string"] - }, - "default": { - "type": ["null", "string"] - } - } - }, - "api_identifier": { + }, + "en-US": { "type": ["null", "string"] - }, - "custom_field": { - "type": ["null", "boolean"] - }, - "validation_rules": { - "type": ["null", "object"] - }, - "presentation_type": { + }, + "es-ES": { "type": ["null", "string"] - }, - "data_type": { + }, + "pt-BR": { "type": ["null", "string"] + }, + "default": { + "type": ["null", "string"] + } + } + }, + "name": { + "type": ["null", "object"], + "properties": { + "en-UD": { + "type": ["null", "string"] + }, + "en-US": { + "type": ["null", "string"] + }, + "es-ES": { + "type": ["null", "string"] + }, + "pt-BR": { + "type": ["null", "string"] + }, + "default": { + "type": ["null", "string"] + } } + }, + "api_identifier": { + "type": ["null", "string"] + }, + "custom_field": { + "type": ["null", "boolean"] + }, + "validation_rules": { + "type": ["null", "object"] + }, + "presentation_type": { + "type": ["null", "string"] + }, + "data_type": { + "type": ["null", "string"] + } } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/landing_pages.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/landing_pages.json index dc8e55175ab1b..6cc94df49c3b2 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/landing_pages.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/landing_pages.json @@ -2,29 +2,29 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "id": { - "type": ["null", "integer"] - }, - "title": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"] - }, - "conversion_identifier": { - "type": ["null", "string"] - }, - "status": { - "type": ["null", "string"] - }, - "has_active_experiment": { - "type": ["null", "boolean"] - }, - "had_experiment": { - "type": ["null", "boolean"] - } + "id": { + "type": ["null", "integer"] + }, + "title": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "conversion_identifier": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "has_active_experiment": { + "type": ["null", "boolean"] + }, + "had_experiment": { + "type": ["null", "boolean"] } + } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/popups.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/popups.json index 250310cc7748a..404003277f642 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/popups.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/popups.json @@ -2,26 +2,26 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "id": { - "type": ["null", "integer"] - }, - "title": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"] - }, - "conversion_identifier": { - "type": ["null", "string"] - }, - "status": { - "type": ["null", "string"] - }, - "trigger": { - "type": ["null", "string"] - } + "id": { + "type": ["null", "integer"] + }, + "title": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "conversion_identifier": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "trigger": { + "type": ["null", "string"] } + } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/segmentations.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/segmentations.json index 04a2d97658fbc..ea322dc4c3fc2 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/segmentations.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/segmentations.json @@ -2,43 +2,43 @@ "$schema": "https://json-schema.org/draft-07/schema#", "type": "object", "properties": { - "id": { - "type": ["null", "integer"] - }, - "name": { - "type": ["null", "string"] - }, - "standard": { - "type": ["null", "boolean"] - }, - "created_at": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"] - }, - "process_status": { - "type": ["null", "string"] - }, - "links": { - "type": ["null", "array"], - "items": { - "type": ["null", "object"], - "properties": { - "rel": { - "type": ["null", "string"] - }, - "href": { - "type": ["null", "string"] - }, - "media": { - "type": ["null", "string"] - }, - "type": { - "type": ["null", "string"] - } - } + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "standard": { + "type": ["null", "boolean"] + }, + "created_at": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] + }, + "process_status": { + "type": ["null", "string"] + }, + "links": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "rel": { + "type": ["null", "string"] + }, + "href": { + "type": ["null", "string"] + }, + "media": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] } + } } + } } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/workflows.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/workflows.json index c9bd95a326714..f4293acb2a05d 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/workflows.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/schemas/workflows.json @@ -2,23 +2,23 @@ "$schema": "https://json-schema.org/draft-07/schema", "type": "object", "properties": { - "id": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "user_email_created": { - "type": ["null", "string"] - }, - "created_at": { - "type": ["null", "string"] - }, - "user_email_updated": { - "type": ["null", "string"] - }, - "updated_at": { - "type": ["null", "string"] - } + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "user_email_created": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "user_email_updated": { + "type": ["null", "string"] + }, + "updated_at": { + "type": ["null", "string"] } + } } diff --git a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/spec.json b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/spec.json index 72eeecef21684..998021c78a305 100644 --- a/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/spec.json +++ b/airbyte-integrations/connectors/source-rd-station-marketing/source_rd_station_marketing/spec.json @@ -4,9 +4,7 @@ "$schema": "http://json-schema.org/draft-07/schema#", "title": "RD Station Marketing Spec", "type": "object", - "required": [ - "start_date" - ], + "required": ["start_date"], "additionalProperties": true, "properties": { "authorization": { @@ -17,9 +15,7 @@ { "title": "Sign in via RD Station (OAuth)", "type": "object", - "required": [ - "auth_type" - ], + "required": ["auth_type"], "properties": { "auth_type": { "type": "string", @@ -52,9 +48,7 @@ "title": "Start Date", "description": "UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. When specified and not None, then stream will behave as incremental", "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$", - "examples": [ - "2017-01-25T00:00:00Z" - ], + "examples": ["2017-01-25T00:00:00Z"], "type": "string" } } @@ -63,23 +57,9 @@ "authSpecification": { "auth_type": "oauth2.0", "oauth2Specification": { - "rootObject": [ - "authorization", - 0 - ], - "oauthFlowInitParameters": [ - [ - "client_id" - ], - [ - "client_secret" - ] - ], - "oauthFlowOutputParameters": [ - [ - "refresh_token" - ] - ] + "rootObject": ["authorization", 0], + "oauthFlowInitParameters": [["client_id"], ["client_secret"]], + "oauthFlowOutputParameters": [["refresh_token"]] } } -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-recreation/.dockerignore b/airbyte-integrations/connectors/source-recreation/.dockerignore new file mode 100644 index 0000000000000..79d9811d5c040 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_recreation +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-recreation/BOOTSTRAP.md b/airbyte-integrations/connectors/source-recreation/BOOTSTRAP.md new file mode 100644 index 0000000000000..c3c9069c541a5 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/BOOTSTRAP.md @@ -0,0 +1,11 @@ +# Recreation.gov +The Recreation Information Database (RIDB) provides data resources to citizens, +offering a single point of access to information about recreational opportunities nationwide. +The RIDB represents an authoritative source of information and services for millions of visitors to federal lands, +historic sites, museums, and other attractions/resources. +This initiative integrates multiple Federal channels and +sources about recreation opportunities into a one-stop, +searchable database of recreational areas nationwide [[ridb.recreation.gov](https://ridb.recreation.gov/docs)]. + +With this Airbyte connector, you can retrieve data from the [Recreation API](https://ridb.recreation.gov/landing) and +sync it to your data warehouse. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-recreation/Dockerfile b/airbyte-integrations/connectors/source-recreation/Dockerfile new file mode 100644 index 0000000000000..2a89ad5f39955 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_recreation ./source_recreation + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-recreation diff --git a/airbyte-integrations/connectors/source-recreation/README.md b/airbyte-integrations/connectors/source-recreation/README.md new file mode 100644 index 0000000000000..f735d599c2b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/README.md @@ -0,0 +1,80 @@ +# Recreation Source + +This is the repository for the Recreation configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/recreation). + +The Recreation Information Database (RIDB) provides data resources to citizens, offering a single point of access to information about recreational opportunities nationwide. The RIDB represents an authoritative source of information and services for millions of visitors to federal lands, historic sites, museums, and other attractions/resources. This initiative integrates multiple Federal channels and sources about recreation opportunities into a one-stop, searchable database of recreational areas nationwide. +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-recreation:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/recreation) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_recreation/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source recreation test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-recreation:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-recreation:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-recreation:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-recreation:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-recreation:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-recreation:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-recreation:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-recreation:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-recreation/__init__.py b/airbyte-integrations/connectors/source-recreation/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-recreation/acceptance-test-config.yml b/airbyte-integrations/connectors/source-recreation/acceptance-test-config.yml new file mode 100644 index 0000000000000..4a67bef532feb --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/acceptance-test-config.yml @@ -0,0 +1,31 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-recreation:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_recreation/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-recreation/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-recreation/acceptance-test-docker.sh new file mode 100755 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-recreation/build.gradle b/airbyte-integrations/connectors/source-recreation/build.gradle new file mode 100644 index 0000000000000..229eef1be41a2 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_recreation' +} diff --git a/airbyte-integrations/connectors/source-recreation/integration_tests/__init__.py b/airbyte-integrations/connectors/source-recreation/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-recreation/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-recreation/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..956e14f18f6d3 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "activities": { + "ActivityID": "thisiswrong" + } +} diff --git a/airbyte-integrations/connectors/source-recreation/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-recreation/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-recreation/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-recreation/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..0b208e6fc0e84 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/integration_tests/configured_catalog.json @@ -0,0 +1,103 @@ +{ + "streams": [ + { + "stream": { + "name": "tours", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "recreationareaaddresses", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "recreationareas", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "organizations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "media", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "links", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "facilityaddresses", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "facilities", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "events", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "activities", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "campsites", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-recreation/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-recreation/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..f04d17cdc1058 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "apikey": "erroooooorr" +} diff --git a/airbyte-integrations/connectors/source-recreation/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-recreation/integration_tests/sample_config.json new file mode 100644 index 0000000000000..c107f99515add --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "apikey": "69test1e-d615-21b6-137h-1337cool7vf" +} diff --git a/airbyte-integrations/connectors/source-recreation/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-recreation/integration_tests/sample_state.json new file mode 100644 index 0000000000000..4582bdd4083c5 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "activities": { + "ActivityID": 1 + } +} diff --git a/airbyte-integrations/connectors/source-recreation/main.py b/airbyte-integrations/connectors/source-recreation/main.py new file mode 100644 index 0000000000000..71e0abf339426 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_recreation import SourceRecreation + +if __name__ == "__main__": + source = SourceRecreation() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-recreation/requirements.txt b/airbyte-integrations/connectors/source-recreation/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-recreation/setup.py b/airbyte-integrations/connectors/source-recreation/setup.py new file mode 100644 index 0000000000000..2cae757b8a654 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_recreation", + description="Source implementation for Recreation.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/__init__.py b/airbyte-integrations/connectors/source-recreation/source_recreation/__init__.py new file mode 100644 index 0000000000000..e95ddb3405385 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceRecreation + +__all__ = ["SourceRecreation"] diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/recreation.yaml b/airbyte-integrations/connectors/source-recreation/source_recreation/recreation.yaml new file mode 100644 index 0000000000000..9ca028fd6037a --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/recreation.yaml @@ -0,0 +1,145 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["RECDATA"] + requester: + url_base: "https://ridb.recreation.gov/api/v1/" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "apikey" + api_token: "{{ config['apikey'] }}" + request_options_provider: + request_parameters: + # Added a hidden parameter in config to be able to run tests + # Maybe this can be add in the future to the config to people + # query more granular data. + # Example is `query_campsites: "BIKING"` + query: "{{ config['query_' + options.name] }}" + + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: "DefaultPaginator" + $options: + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + pagination_strategy: + type: "OffsetIncrement" + page_size: 50 + page_token_option: + field_name: "offset" + inject_into: "request_parameter" + requester: + $ref: "*ref(definitions.requester)" + + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + + activity_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "activities" + primary_key: "ActivityID" + path: "/activities" + + campsites_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "campsites" + primary_key: "CampsiteID" + path: "/campsites" + + events_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "events" + primary_key: "EventID" + path: "/events" + + facilities_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "facilities" + primary_key: "FacilityID" + path: "/facilities" + + facilityaddresses_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "facilityaddresses" + primary_key: "FacilityAddressID" + path: "/facilityaddresses" + + links_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "links" + primary_key: "EntityLinkID" + path: "/links" + + media_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "media" + primary_key: "EntityMediaID" + path: "/media" + + organizations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "organizations" + primary_key: "OrgID" + path: "/organizations" + + permits_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "permits" + primary_key: "PermitEntranceID" + path: "/permits" + + recreationareas_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "recreationareas" + primary_key: "RecAreaID" + path: "/recareas" + + recreationareaaddresses_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "recreationareaaddresses" + primary_key: "RecAreaAddressID" + path: "/recareaaddresses" + + tours_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tours" + primary_key: "TourID" + path: "/tours" + +streams: + - "*ref(definitions.organizations_stream)" + - "*ref(definitions.media_stream)" + - "*ref(definitions.links_stream)" + - "*ref(definitions.facilityaddresses_stream)" + - "*ref(definitions.facilities_stream)" + - "*ref(definitions.events_stream)" + - "*ref(definitions.activity_stream)" + - "*ref(definitions.campsites_stream)" + - "*ref(definitions.permits_stream)" + - "*ref(definitions.recreationareaaddresses_stream)" + - "*ref(definitions.recreationareas_stream)" + - "*ref(definitions.tours_stream)" + +check: + stream_names: + - "activities" diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/activities.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/activities.json new file mode 100644 index 0000000000000..065c69310e20f --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/activities.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "ActivityID": { + "type": "integer" + }, + "ActivityLevel": { + "type": "integer" + }, + "ActivityName": { + "type": "string" + }, + "ActivityParentID": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/campsites.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/campsites.json new file mode 100644 index 0000000000000..4096f5f147aee --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/campsites.json @@ -0,0 +1,123 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "CampsiteID": { + "type": "string" + }, + "FacilityID": { + "type": "string" + }, + "CampsiteName": { + "type": "string" + }, + "CampsiteType": { + "type": "string" + }, + "TypeOfUse": { + "type": "string" + }, + "Loop": { + "type": "string" + }, + "CampsiteAccessible": { + "type": "boolean" + }, + "CampsiteLongitude": { + "type": "number" + }, + "CampsiteLatitude": { + "type": "number" + }, + "CreatedDate": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + }, + "ATTRIBUTES": { + "type": "array", + "items": { + "type": "object", + "properties": { + "AttributeID": { + "type": "integer" + }, + "AttributeName": { + "type": "string" + }, + "AttributeValue": { + "type": "string" + } + } + } + }, + "PERMITTEDEQUIPMENT": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EquipmentName": { + "type": "string" + }, + "MaxLength": { + "type": "integer" + } + } + } + }, + "ENTITYMEDIA": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EntityMediaID": { + "type": "string" + }, + "MediaType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Subtitle": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "EmbedCode": { + "type": "string" + }, + "Height": { + "type": "integer" + }, + "Width": { + "type": "integer" + }, + "IsPrimary": { + "type": "boolean" + }, + "IsPreview": { + "type": "boolean" + }, + "IsGallery": { + "type": "boolean" + }, + "URL": { + "type": "string" + }, + "Credits": { + "type": "string" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/events.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/events.json new file mode 100644 index 0000000000000..6c9f30186fad3 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/events.json @@ -0,0 +1,15 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "EventID": { + "type": "string" + }, + "EventName": { + "type": "string" + }, + "ResourceLink": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/facilities.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/facilities.json new file mode 100644 index 0000000000000..cdc3a53c9cf61 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/facilities.json @@ -0,0 +1,355 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "FacilityID": { + "type": "string" + }, + "LegacyFacilityID": { + "type": "string" + }, + "OrgFacilityID": { + "type": "string" + }, + "ParentOrgID": { + "type": "string" + }, + "ParentRecAreaID": { + "type": "string" + }, + "FacilityName": { + "type": "string" + }, + "FacilityDescription": { + "type": "string" + }, + "FacilityTypeDescription": { + "type": "string" + }, + "FacilityUseFeeDescription": { + "type": "string" + }, + "FacilityDirections": { + "type": "string" + }, + "FacilityPhone": { + "type": "string" + }, + "FacilityEmail": { + "type": "string" + }, + "FacilityReservationURL": { + "type": "string" + }, + "FacilityMapURL": { + "type": "string" + }, + "FacilityAdaAccess": { + "type": "string" + }, + "GEOJSON": { + "type": "object", + "properties": { + "TYPE": { + "type": "string" + }, + "COORDINATES": { + "type": ["array", "null"], + "items": { + "type": ["string", "number", "null"] + } + } + } + }, + "FacilityLongitude": { + "type": "number" + }, + "FacilityLatitude": { + "type": "number" + }, + "StayLimit": { + "type": "string" + }, + "Keywords": { + "type": "string" + }, + "Reservable": { + "type": "boolean" + }, + "Enabled": { + "type": "boolean" + }, + "LastUpdatedDate": { + "type": "string" + }, + "CAMPSITE": { + "type": "array", + "items": { + "type": "object", + "properties": { + "CampsiteID": { + "type": "string" + }, + "CampsiteName": { + "type": "string" + }, + "ResourceLink": { + "type": "string" + } + } + } + }, + "PERMITENTRANCE": { + "type": "array", + "items": { + "type": "object", + "properties": { + "PermitEntranceID": { + "type": "string" + }, + "PermitEntranceName": { + "type": "string" + }, + "ResourceLink": { + "type": "string" + } + } + } + }, + "TOUR": { + "type": "array", + "items": { + "type": "object", + "properties": { + "TourID": { + "type": "string" + }, + "TourName": { + "type": "string" + }, + "ResourceLink": { + "type": "string" + } + } + } + }, + "ORGANIZATION": { + "type": "array", + "items": { + "type": "object", + "properties": { + "OrgID": { + "type": "string" + }, + "OrgName": { + "type": "string" + }, + "OrgImageURL": { + "type": "string" + }, + "OrgURLText": { + "type": "string" + }, + "OrgURLAddress": { + "type": "string" + }, + "OrgType": { + "type": "string" + }, + "OrgAbbrevName": { + "type": "string" + }, + "OrgJurisdictionType": { + "type": "string" + }, + "OrgParentID": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + } + } + } + }, + "RECAREA": { + "type": "array", + "items": { + "type": "object", + "properties": { + "RecAreaID": { + "type": "string" + }, + "RecAreaName": { + "type": "string" + }, + "ResourceLink": { + "type": "string" + } + } + } + }, + "FACILITYADDRESS": { + "type": "array", + "items": { + "type": "object", + "properties": { + "FacilityAddressID": { + "type": "string" + }, + "FacilityID": { + "type": "string" + }, + "FacilityAddressType": { + "type": "string" + }, + "FacilityStreetAddress1": { + "type": "string" + }, + "FacilityStreetAddress2": { + "type": "string" + }, + "FacilityStreetAddress3": { + "type": "string" + }, + "City": { + "type": "string" + }, + "PostalCode": { + "type": "string" + }, + "AddressStateCode": { + "type": "string" + }, + "AddressCountryCode": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + } + } + } + }, + "ACTIVITY": { + "type": "array", + "items": { + "type": "object", + "properties": { + "ActivityID": { + "type": "string" + }, + "FacilityID": { + "type": "string" + }, + "ActivityName": { + "type": "string" + }, + "FacilityActivityDescription": { + "type": "string" + }, + "FacilityActivityFeeDescription": { + "type": "string" + } + } + } + }, + "EVENT": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EventID": { + "type": "string" + }, + "EventName": { + "type": "string" + }, + "ResourceLink": { + "type": "string" + } + } + } + }, + "LINK": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EntityLinkID": { + "type": "string" + }, + "LinkType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "URL": { + "type": "string" + } + } + } + }, + "MEDIA": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EntityMediaID": { + "type": "string" + }, + "MediaType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Subtitle": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "EmbedCode": { + "type": "string" + }, + "Height": { + "type": "integer" + }, + "Width": { + "type": "integer" + }, + "IsPrimary": { + "type": "boolean" + }, + "IsPreview": { + "type": "boolean" + }, + "IsGallery": { + "type": "boolean" + }, + "URL": { + "type": "string" + }, + "Credits": { + "type": "string" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/facilityaddresses.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/facilityaddresses.json new file mode 100644 index 0000000000000..63e261c42534f --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/facilityaddresses.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "FacilityAddressID": { + "type": "string" + }, + "FacilityID": { + "type": "string" + }, + "FacilityAddressType": { + "type": "string" + }, + "FacilityStreetAddress1": { + "type": "string" + }, + "FacilityStreetAddress2": { + "type": "string" + }, + "FacilityStreetAddress3": { + "type": "string" + }, + "City": { + "type": "string" + }, + "PostalCode": { + "type": "string" + }, + "AddressStateCode": { + "type": "string" + }, + "AddressCountryCode": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/links.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/links.json new file mode 100644 index 0000000000000..89a6fb305417b --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/links.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "EntityLinkID": { + "type": "string" + }, + "LinkType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "URL": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/media.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/media.json new file mode 100644 index 0000000000000..b6d62c0972717 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/media.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "EntityMediaID": { + "type": "string" + }, + "MediaType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Subtitle": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "EmbedCode": { + "type": "string" + }, + "Height": { + "type": "integer" + }, + "Width": { + "type": "integer" + }, + "IsPrimary": { + "type": "boolean" + }, + "IsPreview": { + "type": "boolean" + }, + "IsGallery": { + "type": "boolean" + }, + "URL": { + "type": "string" + }, + "Credits": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/organizations.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/organizations.json new file mode 100644 index 0000000000000..b5e732305d9ff --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/organizations.json @@ -0,0 +1,36 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "OrgID": { + "type": "string" + }, + "OrgName": { + "type": "string" + }, + "OrgImageURL": { + "type": "string" + }, + "OrgURLText": { + "type": "string" + }, + "OrgURLAddress": { + "type": "string" + }, + "OrgType": { + "type": "string" + }, + "OrgAbbrevName": { + "type": "string" + }, + "OrgJurisdictionType": { + "type": "string" + }, + "OrgParentID": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/permits.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/permits.json new file mode 100644 index 0000000000000..747d6a40a9674 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/permits.json @@ -0,0 +1,137 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "PermitEntranceID": { + "type": "string" + }, + "FacilityID": { + "type": "string" + }, + "PermitEntranceName": { + "type": "string" + }, + "PermitEntranceDescription": { + "type": "string" + }, + "District": { + "type": "string" + }, + "Town": { + "type": "string" + }, + "PermitEntranceAccessible": { + "type": "boolean" + }, + "Longitude": { + "type": "integer" + }, + "Latitude": { + "type": "integer" + }, + "GEOSJON": { + "type": "object", + "properties": { + "TYPE": { + "type": "string" + }, + "COORDINATES": { + "type": "array", + "items": { + "type": "number" + } + } + } + }, + "CreatedDate": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + }, + "ATTRIBUTES": { + "type": "array", + "items": { + "type": "object", + "properties": { + "AttributeID": { + "type": "integer" + }, + "AttributeName": { + "type": "string" + }, + "AttributeValue": { + "type": "string" + } + } + } + }, + "ENTITYMEDIA": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EntityMediaID": { + "type": "string" + }, + "MediaType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Subtitle": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "EmbedCode": { + "type": "string" + }, + "Height": { + "type": "integer" + }, + "Width": { + "type": "integer" + }, + "IsPrimary": { + "type": "boolean" + }, + "IsPreview": { + "type": "boolean" + }, + "IsGallery": { + "type": "boolean" + }, + "URL": { + "type": "string" + }, + "Credits": { + "type": "string" + } + } + } + }, + "ZONES": { + "type": "array", + "items": { + "type": "object", + "properties": { + "PermitEntranceZoneID": { + "type": "string" + }, + "Zone": { + "type": "string" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/recreationareaaddresses.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/recreationareaaddresses.json new file mode 100644 index 0000000000000..0f0df1016ce6c --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/recreationareaaddresses.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "RecAreaAddressID": { + "type": "string" + }, + "RecAreaID": { + "type": "string" + }, + "RecAreaAddressType": { + "type": "string" + }, + "RecAreaStreetAddress1": { + "type": "string" + }, + "RecAreaStreetAddress2": { + "type": "string" + }, + "RecAreaStreetAddress3": { + "type": "string" + }, + "City": { + "type": "string" + }, + "PostalCode": { + "type": "string" + }, + "AddressStateCode": { + "type": "string" + }, + "AddressCountryCode": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/recreationareas.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/recreationareas.json new file mode 100644 index 0000000000000..c93c16be4a7ea --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/recreationareas.json @@ -0,0 +1,295 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "RecAreaID": { + "type": "string" + }, + "OrgRecAreaID": { + "type": "string" + }, + "ParentOrgID": { + "type": "string" + }, + "RecAreaName": { + "type": "string" + }, + "RecAreaDescription": { + "type": "string" + }, + "RecAreaFeeDescription": { + "type": "string" + }, + "RecAreaDirections": { + "type": "string" + }, + "RecAreaPhone": { + "type": "string" + }, + "RecAreaEmail": { + "type": "string" + }, + "RecAreaReservationURL": { + "type": "string" + }, + "RecAreaMapURL": { + "type": "string" + }, + "GEOJSON": { + "type": "object", + "properties": { + "TYPE": { + "type": "string" + }, + "COORDINATES": { + "type": ["array", "null"], + "items": { + "type": ["string", "number", "null"] + } + } + } + }, + "RecAreaLongitude": { + "type": "number" + }, + "RecAreaLatitude": { + "type": "number" + }, + "StayLimit": { + "type": "string" + }, + "Keywords": { + "type": "string" + }, + "Reservable": { + "type": "boolean" + }, + "Enabled": { + "type": "boolean" + }, + "LastUpdatedDate": { + "type": "string" + }, + "ORGANIZATION": { + "type": "array", + "items": { + "type": "object", + "properties": { + "OrgID": { + "type": "string" + }, + "OrgName": { + "type": "string" + }, + "OrgImageURL": { + "type": "string" + }, + "OrgURLText": { + "type": "string" + }, + "OrgURLAddress": { + "type": "string" + }, + "OrgType": { + "type": "string" + }, + "OrgAbbrevName": { + "type": "string" + }, + "OrgJurisdictionType": { + "type": "string" + }, + "OrgParentID": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + } + } + } + }, + "FACILITY": { + "type": "array", + "items": { + "type": "object", + "properties": { + "FacilityID": { + "type": "string" + }, + "FacilityName": { + "type": "string" + }, + "ResourceLink": { + "type": "string" + } + } + } + }, + "RECAREAADDRESS": { + "type": "array", + "items": { + "type": "object", + "properties": { + "RecAreaAddressID": { + "type": "string" + }, + "RecAreaID": { + "type": "string" + }, + "RecAreaAddressType": { + "type": "string" + }, + "RecAreaStreetAddress1": { + "type": "string" + }, + "RecAreaStreetAddress2": { + "type": "string" + }, + "RecAreaStreetAddress3": { + "type": "string" + }, + "City": { + "type": "string" + }, + "PostalCode": { + "type": "string" + }, + "AddressStateCode": { + "type": "string" + }, + "AddressCountryCode": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + } + } + } + }, + "ACTIVITY": { + "type": "array", + "items": { + "type": "object", + "properties": { + "ActivityID": { + "type": "string" + }, + "ActivityParentID": { + "type": "string" + }, + "RecAreaID": { + "type": "string" + }, + "ActivityName": { + "type": "string" + }, + "RecAreaActivityDescription": { + "type": "string" + }, + "RecAreaActivityFeeDescription": { + "type": "string" + } + } + } + }, + "EVENT": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EventID": { + "type": "string" + }, + "EventName": { + "type": "string" + }, + "ResourceLink": { + "type": "string" + } + } + } + }, + "MEDIA": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EntityMediaID": { + "type": "string" + }, + "MediaType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Subtitle": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "EmbedCode": { + "type": "string" + }, + "Height": { + "type": "integer" + }, + "Width": { + "type": "integer" + }, + "IsPrimary": { + "type": "boolean" + }, + "IsPreview": { + "type": "boolean" + }, + "IsGallery": { + "type": "boolean" + }, + "URL": { + "type": "string" + }, + "Credits": { + "type": "string" + } + } + } + }, + "LINK": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EntityLinkID": { + "type": "string" + }, + "LinkType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "URL": { + "type": "string" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/tours.json b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/tours.json new file mode 100644 index 0000000000000..dc40ea75dd7e9 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/schemas/tours.json @@ -0,0 +1,114 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "TourID": { + "type": "string" + }, + "FacilityID": { + "type": "string" + }, + "TourName": { + "type": "string" + }, + "TourType": { + "type": "string" + }, + "TourDescription": { + "type": "string" + }, + "TourDuration": { + "type": "integer" + }, + "TourAccessible": { + "type": "boolean" + }, + "CreatedDate": { + "type": "string" + }, + "LastUpdatedDate": { + "type": "string" + }, + "ATTRIBUTES": { + "type": "array", + "items": { + "type": "object", + "properties": { + "AttributeID": { + "type": "integer" + }, + "AttributeName": { + "type": "string" + }, + "AttributeValue": { + "type": "string" + } + } + } + }, + "ENTITYMEDIA": { + "type": "array", + "items": { + "type": "object", + "properties": { + "EntityMediaID": { + "type": "string" + }, + "MediaType": { + "type": "string" + }, + "EntityID": { + "type": "string" + }, + "EntityType": { + "type": "string" + }, + "Title": { + "type": "string" + }, + "Subtitle": { + "type": "string" + }, + "Description": { + "type": "string" + }, + "EmbedCode": { + "type": "string" + }, + "Height": { + "type": "integer" + }, + "Width": { + "type": "integer" + }, + "IsPrimary": { + "type": "boolean" + }, + "IsPreview": { + "type": "boolean" + }, + "IsGallery": { + "type": "boolean" + }, + "URL": { + "type": "string" + }, + "Credits": { + "type": "string" + } + } + } + }, + "MEMBERTOURS": { + "type": ["null", "array"], + "items": { + "type": "object", + "properties": { + "MemberTourID": { + "type": ["string", "integer", "null"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/source.py b/airbyte-integrations/connectors/source-recreation/source_recreation/source.py new file mode 100644 index 0000000000000..2b4ac4c5331ba --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceRecreation(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "recreation.yaml"}) diff --git a/airbyte-integrations/connectors/source-recreation/source_recreation/spec.yaml b/airbyte-integrations/connectors/source-recreation/source_recreation/spec.yaml new file mode 100644 index 0000000000000..dcef1ce1b2ec8 --- /dev/null +++ b/airbyte-integrations/connectors/source-recreation/source_recreation/spec.yaml @@ -0,0 +1,17 @@ +documentationUrl: "https://docs.airbyte.com/integrations/sources/recreation" +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Recreation Spec + type: object + required: + - apikey + additionalProperties: true + properties: + apikey: + title: API Key + type: string + description: API Key + airbyte_secret: true + query_campsites: + title: Query Campsite + type: string diff --git a/airbyte-integrations/connectors/source-recruitee/.dockerignore b/airbyte-integrations/connectors/source-recruitee/.dockerignore new file mode 100644 index 0000000000000..0d96d4b0be98c --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_recruitee +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-recruitee/Dockerfile b/airbyte-integrations/connectors/source-recruitee/Dockerfile new file mode 100644 index 0000000000000..212677e681b01 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_recruitee ./source_recruitee + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-recruitee diff --git a/airbyte-integrations/connectors/source-recruitee/README.md b/airbyte-integrations/connectors/source-recruitee/README.md new file mode 100644 index 0000000000000..d5528ee0dd1b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/README.md @@ -0,0 +1,79 @@ +# Recruitee Source + +This is the repository for the Recruitee configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/recruitee). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-recruitee:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/recruitee) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_recruitee/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source recruitee test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-recruitee:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-recruitee:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-recruitee:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-recruitee:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-recruitee:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-recruitee:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-recruitee:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-recruitee:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-recruitee/__init__.py b/airbyte-integrations/connectors/source-recruitee/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-recruitee/acceptance-test-config.yml b/airbyte-integrations/connectors/source-recruitee/acceptance-test-config.yml new file mode 100644 index 0000000000000..8a709b59b3ff3 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-recruitee:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_recruitee/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-recruitee/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-recruitee/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-recruitee/build.gradle b/airbyte-integrations/connectors/source-recruitee/build.gradle new file mode 100644 index 0000000000000..3912692e17238 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_recruitee' +} diff --git a/airbyte-integrations/connectors/source-recruitee/integration_tests/__init__.py b/airbyte-integrations/connectors/source-recruitee/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-recruitee/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-recruitee/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..9fa2adfa4be61 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "candidates": { + "id": "invalid-integer-id" + } +} diff --git a/airbyte-integrations/connectors/source-recruitee/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-recruitee/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-recruitee/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-recruitee/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..a6354d9400b60 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/integration_tests/configured_catalog.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "stream": { + "name": "candidates", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "offers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "departments", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-recruitee/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-recruitee/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..ec875b1d5e66a --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "invalid key" +} diff --git a/airbyte-integrations/connectors/source-recruitee/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-recruitee/integration_tests/sample_config.json new file mode 100644 index 0000000000000..2e9b9bf580b6e --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "", + "company_id": 123 +} diff --git a/airbyte-integrations/connectors/source-recruitee/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-recruitee/integration_tests/sample_state.json new file mode 100644 index 0000000000000..31ba869222634 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/integration_tests/sample_state.json @@ -0,0 +1,13 @@ +{ + "departments": { + "grouped_translations": { + "en": { + "name": "Sales" + } + }, + "id": 123, + "name": "Sales", + "offers_count": 0, + "status": null + } +} diff --git a/airbyte-integrations/connectors/source-recruitee/main.py b/airbyte-integrations/connectors/source-recruitee/main.py new file mode 100644 index 0000000000000..4d8c9929f74e1 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_recruitee import SourceRecruitee + +if __name__ == "__main__": + source = SourceRecruitee() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-recruitee/requirements.txt b/airbyte-integrations/connectors/source-recruitee/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-recruitee/setup.py b/airbyte-integrations/connectors/source-recruitee/setup.py new file mode 100644 index 0000000000000..0ee9b4beb0032 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_recruitee", + description="Source implementation for Recruitee.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-recruitee/source_recruitee/__init__.py b/airbyte-integrations/connectors/source-recruitee/source_recruitee/__init__.py new file mode 100644 index 0000000000000..d35ace2fc2003 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/source_recruitee/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceRecruitee + +__all__ = ["SourceRecruitee"] diff --git a/airbyte-integrations/connectors/source-recruitee/source_recruitee/recruitee.yaml b/airbyte-integrations/connectors/source-recruitee/source_recruitee/recruitee.yaml new file mode 100644 index 0000000000000..4e6f02e79abd5 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/source_recruitee/recruitee.yaml @@ -0,0 +1,56 @@ +version: "0.1.0" + +definitions: + schema_loader: + type: JsonSchema + file_path: "./source_sentry/schemas/{{ options.name }}.json" + selector: + extractor: + field_pointer: + - "{{ options['name'] }}" + requester: + url_base: "https://api.recruitee.com/c/{{ config.company_id }}" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + primary_key: "id" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + candidates_stream: + # Docs: https://docs.recruitee.com/reference/candidates-get + $ref: "*ref(definitions.base_stream)" + $options: + name: "candidates" + path: "/candidates" + offers_stream: + # Docs: https://docs.recruitee.com/reference/offers-get + $ref: "*ref(definitions.base_stream)" + $options: + name: "offers" + path: "/offers" + departments_stream: + # Docs: https://docs.recruitee.com/reference/departments-get + $ref: "*ref(definitions.base_stream)" + $options: + name: "departments" + path: "/departments" + +streams: + - "*ref(definitions.candidates_stream)" + - "*ref(definitions.offers_stream)" + - "*ref(definitions.departments_stream)" + +check: + stream_names: + - "candidates" + - "offers" + - "departments" diff --git a/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/candidates.json b/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/candidates.json new file mode 100644 index 0000000000000..c1a6c3aab864a --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/candidates.json @@ -0,0 +1,154 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "rating": { + "type": "integer" + }, + "created_at": { + "type": "string" + }, + "phones": { + "type": "array", + "items": { + "type": "string" + } + }, + "placements": { + "type": "array", + "items": { + "type": "object", + "properties": { + "stage_id": { + "type": ["null", "integer"] + }, + "job_starts_at": { + "type": ["null", "string"] + }, + "created_at": { + "type": "string" + }, + "language": { + "type": ["null", "string"] + }, + "hired_at": { + "type": ["null", "string"] + }, + "offer_id": { + "type": "integer" + }, + "candidate_id": { + "type": ["null", "integer"] + }, + "overdue_diff": { + "type": ["null", "string"] + }, + "updated_at": { + "type": "string" + }, + "ratings": { + "type": "object" + }, + "overdue_at": { + "type": ["null", "string"] + }, + "positive_ratings": { + "type": ["null", "integer"] + }, + "id": { + "type": "integer" + }, + "position": { + "type": ["null", "integer"] + } + } + } + }, + "source": { + "type": ["null", "string"] + }, + "adminapp_url": { + "type": ["null", "string"] + }, + "example": { + "type": "boolean" + }, + "emails": { + "type": "array", + "items": { + "type": "string" + } + }, + "pending_result_request": { + "type": "boolean" + }, + "has_avatar": { + "type": "boolean" + }, + "updated_at": { + "type": "string" + }, + "ratings": { + "type": "object", + "properties": { + "yes": { + "type": "integer" + } + } + }, + "viewed": { + "type": "boolean" + }, + "positive_ratings": { + "type": ["null", "integer"] + }, + "id": { + "type": "integer" + }, + "ratings_count": { + "type": "integer" + }, + "tasks_count": { + "type": "integer" + }, + "upcoming_event": { + "type": "boolean" + }, + "unread_notifications": { + "type": "boolean" + }, + "photo_thumb_url": { + "type": ["null", "string"] + }, + "initials": { + "type": ["null", "string"] + }, + "followed": { + "type": "boolean" + }, + "my_upcoming_event": { + "type": "boolean" + }, + "referrer": { + "type": ["null", "string"] + }, + "my_pending_result_request": { + "type": "boolean" + }, + "admin_id": { + "type": ["null", "integer"] + }, + "my_last_rating": { + "type": ["null", "string"] + }, + "notes_count": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "last_message_at": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/departments.json b/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/departments.json new file mode 100644 index 0000000000000..5dad6879caed2 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/departments.json @@ -0,0 +1,31 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "offers_count": { + "type": "integer" + }, + "grouped_translations": { + "type": "object", + "properties": { + "en": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + } + } + } + }, + "id": { + "type": "integer" + }, + "status": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/offers.json b/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/offers.json new file mode 100644 index 0000000000000..b7400d1b9a5bd --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/source_recruitee/schemas/offers.json @@ -0,0 +1,149 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "has_active_campaign": { + "type": "boolean" + }, + "employment_type": { + "type": ["null", "string"] + }, + "eeo_settings": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "created_at": { + "type": "string" + }, + "title": { + "type": "string" + }, + "adminapp_url": { + "type": ["null", "string"] + }, + "example": { + "type": "boolean" + }, + "mailbox_email": { + "type": ["null", "string"] + }, + "pipeline_template": { + "type": ["null", "object"] + }, + "careers_url": { + "type": "string" + }, + "disqualified_candidates_count": { + "type": "integer" + }, + "updated_at": { + "type": "string" + }, + "state_name": { + "type": ["null", "string"] + }, + "street": { + "type": ["null", "string"] + }, + "id": { + "type": "integer" + }, + "hiring_manager_id": { + "type": ["null", "integer"] + }, + "department": { + "type": ["null", "string"] + }, + "published_at": { + "type": ["null", "string"] + }, + "slug": { + "type": "string" + }, + "closed_at": { + "type": ["null", "string"] + }, + "department_id": { + "type": "integer" + }, + "qualified_candidates_count": { + "type": "integer" + }, + "kind": { + "type": "string" + }, + "recruiter_id": { + "type": ["null", "integer"] + }, + "enabled_languages": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": { + "type": "string" + }, + "name": { + "type": "string" + }, + "native_name": { + "type": "string" + } + } + } + }, + "followed": { + "type": "boolean" + }, + "url": { + "type": "string" + }, + "pipeline": { + "type": "boolean" + }, + "country_code": { + "type": ["null", "string"] + }, + "followers": { + "type": "array" + }, + "number_of_openings": { + "type": ["null", "integer"] + }, + "candidates_count": { + "type": "integer" + }, + "hired_candidates_count": { + "type": "integer" + }, + "job_scheduler": { + "type": ["null", "string"] + }, + "guid": { + "type": "string" + }, + "enabled_for_referrals": { + "type": "boolean" + }, + "location": { + "type": "string" + }, + "position": { + "type": ["null", "integer"] + }, + "postal_code": { + "type": ["null", "string"] + }, + "state_code": { + "type": ["null", "string"] + }, + "offer_tags": { + "type": "array" + }, + "status": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-recruitee/source_recruitee/source.py b/airbyte-integrations/connectors/source-recruitee/source_recruitee/source.py new file mode 100644 index 0000000000000..b5bde6d0ffe55 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/source_recruitee/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceRecruitee(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "recruitee.yaml"}) diff --git a/airbyte-integrations/connectors/source-recruitee/source_recruitee/spec.yaml b/airbyte-integrations/connectors/source-recruitee/source_recruitee/spec.yaml new file mode 100644 index 0000000000000..ad328d15781d5 --- /dev/null +++ b/airbyte-integrations/connectors/source-recruitee/source_recruitee/spec.yaml @@ -0,0 +1,19 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/recruitee +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Recruitee Spec + type: object + required: + - api_key + - company_id + additionalProperties: true + properties: + api_key: + title: API Key + type: string + description: Recruitee API Key. See here. + airbyte_secret: true + company_id: + title: Company ID + type: integer + description: Recruitee Company ID. You can also find this ID on the Recruitee API tokens page. diff --git a/airbyte-integrations/connectors/source-redshift/acceptance-test-config.yml b/airbyte-integrations/connectors/source-redshift/acceptance-test-config.yml new file mode 100644 index 0000000000000..269041c35e8fc --- /dev/null +++ b/airbyte-integrations/connectors/source-redshift/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-redshift:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-redshift/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-redshift/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-redshift/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-redshift/build.gradle b/airbyte-integrations/connectors/source-redshift/build.gradle index 5e8a4af941334..02ebd41a9977f 100644 --- a/airbyte-integrations/connectors/source-redshift/build.gradle +++ b/airbyte-integrations/connectors/source-redshift/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-redshift/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-redshift/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-redshift/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-redshift/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-redshift/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..fcf524915eccb --- /dev/null +++ b/airbyte-integrations/connectors/source-redshift/src/test-integration/resources/expected_spec.json @@ -0,0 +1,68 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/destinations/redshift", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Redshift Source Spec", + "type": "object", + "required": ["host", "port", "database", "username", "password"], + "properties": { + "host": { + "title": "Host", + "description": "Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com).", + "type": "string", + "order": 1 + }, + "port": { + "title": "Port", + "description": "Port of the database.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 5439, + "examples": ["5439"], + "order": 2 + }, + "database": { + "title": "Database", + "description": "Name of the database.", + "type": "string", + "examples": ["master"], + "order": 3 + }, + "schemas": { + "title": "Schemas", + "description": "The list of schemas to sync from. Specify one or more explicitly or keep empty to process all schemas. Schema names are case sensitive.", + "type": "array", + "items": { + "type": "string" + }, + "minItems": 0, + "uniqueItems": true, + "examples": ["public"], + "order": 4 + }, + "username": { + "title": "Username", + "description": "Username to use to access the database.", + "type": "string", + "order": 5 + }, + "password": { + "title": "Password", + "description": "Password associated with the username.", + "type": "string", + "airbyte_secret": true, + "order": 6 + }, + "jdbc_url_params": { + "title": "JDBC URL Params", + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "type": "string", + "order": 7 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/AbstractDbSource.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/AbstractDbSource.java index 6fb801b4d8b9d..2f415395bcb33 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/AbstractDbSource.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/AbstractDbSource.java @@ -24,7 +24,6 @@ import io.airbyte.db.IncrementalUtils; import io.airbyte.db.jdbc.JdbcDatabase; import io.airbyte.integrations.BaseConnector; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.base.AirbyteTraceMessageUtility; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.relationaldb.InvalidCursorInfoUtil.InvalidCursorInfo; @@ -40,6 +39,7 @@ import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.CommonField; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; @@ -131,21 +131,20 @@ public AirbyteCatalog discover(final JsonNode config) throws Exception { } /** - * Creates a list of AirbyteMessageIterators with all the streams selected in a configured - * catalog + * Creates a list of AirbyteMessageIterators with all the streams selected in a configured catalog * - * @param config - integration-specific configuration object as json. e.g. { "username": - * "airbyte", "password": "super secure" } + * @param config - integration-specific configuration object as json. e.g. { "username": "airbyte", + * "password": "super secure" } * @param catalog - schema of the incoming messages. - * @param state - state of the incoming messages. + * @param state - state of the incoming messages. * @return AirbyteMessageIterator with all the streams that are to be synced * @throws Exception */ @Override public AutoCloseableIterator read(final JsonNode config, - final ConfiguredAirbyteCatalog catalog, - final JsonNode state) - throws Exception { + final ConfiguredAirbyteCatalog catalog, + final JsonNode state) + throws Exception { final StateManager stateManager = StateManagerFactory.createStateManager(getSupportedStateType(config), deserializeInitialState(state, config), catalog); @@ -182,8 +181,8 @@ public AutoCloseableIterator read(final JsonNode config, } private void validateCursorFieldForIncrementalTables( - final Map>> tableNameToTable, - final ConfiguredAirbyteCatalog catalog) { + final Map>> tableNameToTable, + final ConfiguredAirbyteCatalog catalog) { final List tablesWithInvalidCursor = new ArrayList<>(); for (final ConfiguredAirbyteStream airbyteStream : catalog.getStreams()) { final AirbyteStream stream = airbyteStream.getStream(); @@ -218,12 +217,13 @@ private void validateCursorFieldForIncrementalTables( if (!tablesWithInvalidCursor.isEmpty()) { throw new ConfigErrorException( - InvalidCursorInfoUtil.getInvalidCursorConfigMessage(tablesWithInvalidCursor)) ; + InvalidCursorInfoUtil.getInvalidCursorConfigMessage(tablesWithInvalidCursor)); } } private List>> discoverWithoutSystemTables( - final Database database) throws Exception { + final Database database) + throws Exception { final Set systemNameSpaces = getExcludedInternalNameSpaces(); final List>> discoveredTables = discoverInternal(database); return (systemNameSpaces == null || systemNameSpaces.isEmpty() ? discoveredTables @@ -233,11 +233,11 @@ private List>> discoverWithoutSystemTables( } private List> getFullRefreshIterators( - final Database database, - final ConfiguredAirbyteCatalog catalog, - final Map>> tableNameToTable, - final StateManager stateManager, - final Instant emittedAt) { + final Database database, + final ConfiguredAirbyteCatalog catalog, + final Map>> tableNameToTable, + final StateManager stateManager, + final Instant emittedAt) { return getSelectedIterators( database, catalog, @@ -248,11 +248,11 @@ private List> getFullRefreshIterators( } protected List> getIncrementalIterators( - final Database database, - final ConfiguredAirbyteCatalog catalog, - final Map>> tableNameToTable, - final StateManager stateManager, - final Instant emittedAt) { + final Database database, + final ConfiguredAirbyteCatalog catalog, + final Map>> tableNameToTable, + final StateManager stateManager, + final Instant emittedAt) { return getSelectedIterators( database, catalog, @@ -265,23 +265,21 @@ protected List> getIncrementalIterators( /** * Creates a list of read iterators for each stream within an ConfiguredAirbyteCatalog * - * @param database Source Database - * @param catalog List of streams (e.g. database tables or API endpoints) with settings - * on sync mode + * @param database Source Database + * @param catalog List of streams (e.g. database tables or API endpoints) with settings on sync mode * @param tableNameToTable Mapping of table name to table - * @param stateManager Manager used to track the state of data synced by the connector - * @param emittedAt Time when data was emitted from the Source database - * @param selector essentially a boolean that verifies if configuredStream has selected a - * sync mode + * @param stateManager Manager used to track the state of data synced by the connector + * @param emittedAt Time when data was emitted from the Source database + * @param selector essentially a boolean that verifies if configuredStream has selected a sync mode * @return List of AirbyteMessageIterators containing all iterators for a catalog */ private List> getSelectedIterators( - final Database database, - final ConfiguredAirbyteCatalog catalog, - final Map>> tableNameToTable, - final StateManager stateManager, - final Instant emittedAt, - final Predicate selector) { + final Database database, + final ConfiguredAirbyteCatalog catalog, + final Map>> tableNameToTable, + final StateManager stateManager, + final Instant emittedAt, + final Predicate selector) { final List> iteratorList = new ArrayList<>(); for (final ConfiguredAirbyteStream airbyteStream : catalog.getStreams()) { if (selector.test(airbyteStream)) { @@ -312,18 +310,18 @@ private List> getSelectedIterators( /** * ReadIterator is used to retrieve records from a source connector * - * @param database Source Database + * @param database Source Database * @param airbyteStream represents an ingestion source (e.g. API endpoint or database table) - * @param table information in tabular format - * @param stateManager Manager used to track the state of data synced by the connector - * @param emittedAt Time when data was emitted from the Source database + * @param table information in tabular format + * @param stateManager Manager used to track the state of data synced by the connector + * @param emittedAt Time when data was emitted from the Source database * @return */ private AutoCloseableIterator createReadIterator(final Database database, - final ConfiguredAirbyteStream airbyteStream, - final TableInfo> table, - final StateManager stateManager, - final Instant emittedAt) { + final ConfiguredAirbyteStream airbyteStream, + final TableInfo> table, + final StateManager stateManager, + final Instant emittedAt) { final String streamName = airbyteStream.getStream().getName(); final String namespace = airbyteStream.getStream().getNamespace(); final AirbyteStreamNameNamespacePair pair = new AirbyteStreamNameNamespacePair(streamName, @@ -393,21 +391,20 @@ private AutoCloseableIterator createReadIterator(final Database } /** - * @param database Source Database - * @param airbyteStream represents an ingestion source (e.g. API endpoint or database - * table) + * @param database Source Database + * @param airbyteStream represents an ingestion source (e.g. API endpoint or database table) * @param selectedDatabaseFields subset of database fields selected for replication - * @param table information in tabular format - * @param cursorInfo state of where to start the sync from - * @param emittedAt Time when data was emitted from the Source database + * @param table information in tabular format + * @param cursorInfo state of where to start the sync from + * @param emittedAt Time when data was emitted from the Source database * @return AirbyteMessage Iterator that */ private AutoCloseableIterator getIncrementalStream(final Database database, - final ConfiguredAirbyteStream airbyteStream, - final List selectedDatabaseFields, - final TableInfo> table, - final CursorInfo cursorInfo, - final Instant emittedAt) { + final ConfiguredAirbyteStream airbyteStream, + final List selectedDatabaseFields, + final TableInfo> table, + final CursorInfo cursorInfo, + final Instant emittedAt) { final String streamName = airbyteStream.getStream().getName(); final String namespace = airbyteStream.getStream().getNamespace(); final String cursorField = IncrementalUtils.getCursorField(airbyteStream); @@ -435,21 +432,21 @@ private AutoCloseableIterator getIncrementalStream(final Databas /** * Creates a AirbyteMessageIterator that contains all records for a database source connection * - * @param database Source Database - * @param streamName name of an individual stream in which a stream represents a - * source (e.g. API endpoint or database table) - * @param namespace Namespace of the database (e.g. public) + * @param database Source Database + * @param streamName name of an individual stream in which a stream represents a source (e.g. API + * endpoint or database table) + * @param namespace Namespace of the database (e.g. public) * @param selectedDatabaseFields List of all interested database column names - * @param table information in tabular format - * @param emittedAt Time when data was emitted from the Source database + * @param table information in tabular format + * @param emittedAt Time when data was emitted from the Source database * @return AirbyteMessageIterator with all records for a database source */ private AutoCloseableIterator getFullRefreshStream(final Database database, - final String streamName, - final String namespace, - final List selectedDatabaseFields, - final TableInfo> table, - final Instant emittedAt) { + final String streamName, + final String namespace, + final List selectedDatabaseFields, + final TableInfo> table, + final Instant emittedAt) { final AutoCloseableIterator queryStream = queryTableFullRefresh(database, selectedDatabaseFields, table.getNameSpace(), table.getName()); @@ -461,10 +458,10 @@ private String getFullyQualifiedTableName(final String nameSpace, final String t } private AutoCloseableIterator getMessageIterator( - final AutoCloseableIterator recordIterator, - final String streamName, - final String namespace, - final long emittedAt) { + final AutoCloseableIterator recordIterator, + final String streamName, + final String namespace, + final long emittedAt) { return AutoCloseableIterators.transform(recordIterator, r -> new AirbyteMessage() .withType(Type.RECORD) .withRecord(new AirbyteRecordMessage() @@ -520,8 +517,9 @@ private Field toField(final CommonField field) { } } - private void assertColumnsWithSameNameAreSame(final String nameSpace, final String tableName, - final List> columns) { + private void assertColumnsWithSameNameAreSame(final String nameSpace, + final String tableName, + final List> columns) { columns.stream() .collect(Collectors.groupingBy(CommonField::getName)) .values() @@ -540,19 +538,21 @@ private void assertColumnsWithSameNameAreSame(final String nameSpace, final Stri /** * @param database - The database where from privileges for tables will be consumed - * @param schema - The schema where from privileges for tables will be consumed - * @return Set with privileges for tables for current DB-session user The method is responsible - * for SELECT-ing the table with privileges. In some cases such SELECT doesn't require (e.g. in - * Oracle DB - the schema is the user, you cannot REVOKE a privilege on a table from its owner). + * @param schema - The schema where from privileges for tables will be consumed + * @return Set with privileges for tables for current DB-session user The method is responsible for + * SELECT-ing the table with privileges. In some cases such SELECT doesn't require (e.g. in + * Oracle DB - the schema is the user, you cannot REVOKE a privilege on a table from its + * owner). */ protected Set getPrivilegesTableForCurrentUser(final JdbcDatabase database, - final String schema) throws SQLException { + final String schema) + throws SQLException { return Collections.emptySet(); } /** - * Map a database implementation-specific configuration to json object that adheres to the - * database config spec. See resources/spec.json. + * Map a database implementation-specific configuration to json object that adheres to the database + * config spec. See resources/spec.json. * * @param config database implementation-specific configuration. * @return database spec config @@ -599,31 +599,32 @@ protected abstract List> getCheckOperations * @throws Exception access to the database might lead to an exceptions. */ protected abstract List>> discoverInternal( - final Database database) + final Database database) throws Exception; /** * Discovers all available tables within a schema in the source database. * * @param database - source database - * @param schema - source schema + * @param schema - source schema * @return list of source tables * @throws Exception - access to the database might lead to exceptions. */ protected abstract List>> discoverInternal( - final Database database, String schema) + final Database database, + String schema) throws Exception; /** * Discover Primary keys for each table and @return a map of namespace.table name to their * associated list of primary key fields. * - * @param database source database + * @param database source database * @param tableInfos list of tables * @return map of namespace.table and primary key fields. */ protected abstract Map> discoverPrimaryKeys(Database database, - List>> tableInfos); + List>> tableInfos); /** * Returns quote symbol of the database @@ -635,16 +636,16 @@ protected abstract Map> discoverPrimaryKeys(Database databa /** * Read all data from a table. * - * @param database source database + * @param database source database * @param columnNames interested column names - * @param schemaName table namespace - * @param tableName target table + * @param schemaName table namespace + * @param tableName target table * @return iterator with read data */ protected abstract AutoCloseableIterator queryTableFullRefresh(final Database database, - final List columnNames, - final String schemaName, - final String tableName); + final List columnNames, + final String schemaName, + final String tableName); /** * Read incremental data from a table. Incremental read should return only records where cursor @@ -655,16 +656,16 @@ protected abstract AutoCloseableIterator queryTableFullRefresh(final D * @return iterator with read data */ protected abstract AutoCloseableIterator queryTableIncremental(Database database, - List columnNames, - String schemaName, - String tableName, - CursorInfo cursorInfo, - DataType cursorFieldType); + List columnNames, + String schemaName, + String tableName, + CursorInfo cursorInfo, + DataType cursorFieldType); /** * When larger than 0, the incremental iterator will emit intermediate state for every N records. - * Please note that if intermediate state emission is enabled, the incremental query must be - * ordered by the cursor field. + * Please note that if intermediate state emission is enabled, the incremental query must be ordered + * by the cursor field. */ protected int getStateEmissionFrequency() { return 0; @@ -686,11 +687,11 @@ private Database createDatabaseInternal(final JsonNode sourceConfig) throws Exce * Deserializes the state represented as JSON into an object representation. * * @param initialStateJson The state as JSON. - * @param config The connector configuration. + * @param config The connector configuration. * @return The deserialized object representation of the state. */ protected List deserializeInitialState(final JsonNode initialStateJson, - final JsonNode config) { + final JsonNode config) { final Optional typedState = StateMessageHelper.getTypedState(initialStateJson, featureFlags.useStreamCapableState()); return typedState.map((state) -> { diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/CdcStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/CdcStateManager.java index 1e898a7061043..cca8d5da5c8c4 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/CdcStateManager.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/CdcStateManager.java @@ -5,8 +5,8 @@ package io.airbyte.integrations.source.relationaldb; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.models.CdcState; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.Collections; import java.util.Set; import org.slf4j.Logger; diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/RelationalDbQueryUtils.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/RelationalDbQueryUtils.java index 975068ce37904..0222796860900 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/RelationalDbQueryUtils.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/RelationalDbQueryUtils.java @@ -1,3 +1,7 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + package io.airbyte.integrations.source.relationaldb; import com.fasterxml.jackson.databind.JsonNode; @@ -9,7 +13,7 @@ import java.util.stream.Stream; /** - * Utility class for methods to query a relational db. + * Utility class for methods to query a relational db. */ public class RelationalDbQueryUtils { diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIterator.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIterator.java index 738f7eba31930..75675bfc792b3 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIterator.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIterator.java @@ -6,11 +6,11 @@ import com.google.common.collect.AbstractIterator; import io.airbyte.db.IncrementalUtils; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.JsonSchemaPrimitive; import java.util.Iterator; import java.util.Objects; diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/AbstractStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/AbstractStateManager.java index df8d1200b5b5e..a71ab96c341d4 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/AbstractStateManager.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/AbstractStateManager.java @@ -4,9 +4,9 @@ package io.airbyte.integrations.source.relationaldb.state; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.CursorInfo; import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.Collection; import java.util.List; diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/CursorManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/CursorManager.java index ca5c0504d9e9b..3539fc9219b42 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/CursorManager.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/CursorManager.java @@ -5,8 +5,8 @@ package io.airbyte.integrations.source.relationaldb.state; import com.google.common.annotations.VisibleForTesting; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.CursorInfo; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import java.util.Collection; @@ -93,7 +93,7 @@ protected Map createCursorInfoMap( final Set allStreamNames = catalog.getStreams() .stream() .map(ConfiguredAirbyteStream::getStream) - .map(AirbyteStreamNameNamespacePair::fromAirbyteSteam) + .map(AirbyteStreamNameNamespacePair::fromAirbyteStream) .collect(Collectors.toSet()); allStreamNames.addAll(streamSupplier.get().stream().map(namespacePairFunction).filter(Objects::nonNull).collect(Collectors.toSet())); diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManager.java index ee170e5d518c8..548c171797517 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManager.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManager.java @@ -10,7 +10,6 @@ import static io.airbyte.integrations.source.relationaldb.state.StateGeneratorUtils.NAME_NAMESPACE_PAIR_FUNCTION; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.CdcStateManager; import io.airbyte.integrations.source.relationaldb.models.CdcState; import io.airbyte.integrations.source.relationaldb.models.DbState; @@ -18,6 +17,7 @@ import io.airbyte.protocol.models.AirbyteGlobalState; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.StreamDescriptor; diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManager.java index a1e147e76d055..669a07fdb72df 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManager.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/LegacyStateManager.java @@ -5,12 +5,12 @@ package io.airbyte.integrations.source.relationaldb.state; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.CdcStateManager; import io.airbyte.integrations.source.relationaldb.models.DbState; import io.airbyte.integrations.source.relationaldb.models.DbStreamState; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.List; import java.util.Objects; diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtils.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtils.java index 130a520e98b23..493eafbffccf5 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtils.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateGeneratorUtils.java @@ -6,13 +6,13 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.CursorInfo; import io.airbyte.integrations.source.relationaldb.models.DbState; import io.airbyte.integrations.source.relationaldb.models.DbStreamState; import io.airbyte.protocol.models.AirbyteGlobalState; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.StreamDescriptor; import java.util.Collections; diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManager.java index 3039758f97465..07593cfe923d5 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManager.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StateManager.java @@ -5,10 +5,10 @@ package io.airbyte.integrations.source.relationaldb.state; import com.google.common.base.Preconditions; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.CdcStateManager; import io.airbyte.integrations.source.relationaldb.CursorInfo; import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; diff --git a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManager.java b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManager.java index 2d1cd66673d18..f4169006b62ab 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManager.java +++ b/airbyte-integrations/connectors/source-relational-db/src/main/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManager.java @@ -10,11 +10,11 @@ import static io.airbyte.integrations.source.relationaldb.state.StateGeneratorUtils.NAME_NAMESPACE_PAIR_FUNCTION; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.CdcStateManager; import io.airbyte.integrations.source.relationaldb.CursorInfo; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import java.util.List; diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIteratorTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIteratorTest.java index 088f46fdfd5be..26873ac039c87 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIteratorTest.java +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/StateDecoratingIteratorTest.java @@ -13,12 +13,12 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.util.MoreIterators; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.state.StateManager; import io.airbyte.protocol.models.AirbyteMessage; import io.airbyte.protocol.models.AirbyteMessage.Type; import io.airbyte.protocol.models.AirbyteRecordMessage; import io.airbyte.protocol.models.AirbyteStateMessage; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.JsonSchemaPrimitive; import java.sql.SQLException; import java.util.Collections; diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/CursorManagerTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/CursorManagerTest.java index 5f85d99be4d8e..f3e7f62df0f5f 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/CursorManagerTest.java +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/CursorManagerTest.java @@ -15,9 +15,9 @@ import static io.airbyte.integrations.source.relationaldb.state.StateTestConstants.getStream; import static org.junit.jupiter.api.Assertions.assertEquals; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.CursorInfo; import io.airbyte.integrations.source.relationaldb.models.DbStreamState; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import java.util.Collections; import java.util.Optional; import java.util.function.Function; diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManagerTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManagerTest.java index d342347fbc0ef..5124df49ab91c 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManagerTest.java +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/GlobalStateManagerTest.java @@ -17,7 +17,6 @@ import static org.mockito.Mockito.mock; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.models.CdcState; import io.airbyte.integrations.source.relationaldb.models.DbState; import io.airbyte.integrations.source.relationaldb.models.DbStreamState; @@ -25,6 +24,7 @@ import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateTestConstants.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateTestConstants.java index 1e6ac72d25b3f..50d2247592314 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateTestConstants.java +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StateTestConstants.java @@ -4,9 +4,9 @@ package io.airbyte.integrations.source.relationaldb.state; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.models.DbStreamState; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; import java.util.Collections; diff --git a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManagerTest.java b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManagerTest.java index e2733bfbbb924..1afd609c77fbd 100644 --- a/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManagerTest.java +++ b/airbyte-integrations/connectors/source-relational-db/src/test/java/io/airbyte/integrations/source/relationaldb/state/StreamStateManagerTest.java @@ -20,12 +20,12 @@ import com.google.common.collect.Lists; import io.airbyte.commons.json.Jsons; -import io.airbyte.integrations.base.AirbyteStreamNameNamespacePair; import io.airbyte.integrations.source.relationaldb.models.DbState; import io.airbyte.integrations.source.relationaldb.models.DbStreamState; import io.airbyte.protocol.models.AirbyteStateMessage; import io.airbyte.protocol.models.AirbyteStateMessage.AirbyteStateType; import io.airbyte.protocol.models.AirbyteStream; +import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; import io.airbyte.protocol.models.ConfiguredAirbyteStream; diff --git a/airbyte-integrations/connectors/source-reply-io/.dockerignore b/airbyte-integrations/connectors/source-reply-io/.dockerignore new file mode 100644 index 0000000000000..93a48058214fd --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_reply_io +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-reply-io/Dockerfile b/airbyte-integrations/connectors/source-reply-io/Dockerfile new file mode 100644 index 0000000000000..8bc80fe136e8e --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_reply_io ./source_reply_io + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-reply-io diff --git a/airbyte-integrations/connectors/source-reply-io/README.md b/airbyte-integrations/connectors/source-reply-io/README.md new file mode 100644 index 0000000000000..de3045a09dbdf --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/README.md @@ -0,0 +1,79 @@ +# Reply Io Source + +This is the repository for the Reply Io configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/reply-io). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-reply-io:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/reply-io) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_reply_io/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source reply-io test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-reply-io:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-reply-io:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-reply-io:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-reply-io:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-reply-io:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-reply-io:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-reply-io:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-reply-io:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-reply-io/__init__.py b/airbyte-integrations/connectors/source-reply-io/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-reply-io/acceptance-test-config.yml b/airbyte-integrations/connectors/source-reply-io/acceptance-test-config.yml new file mode 100644 index 0000000000000..f84c25d17ac47 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/acceptance-test-config.yml @@ -0,0 +1,25 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-reply-io:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_reply_io/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-reply-io/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-reply-io/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-reply-io/build.gradle b/airbyte-integrations/connectors/source-reply-io/build.gradle new file mode 100644 index 0000000000000..d08c75bfff8e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_reply_io' +} diff --git a/airbyte-integrations/connectors/source-reply-io/integration_tests/__init__.py b/airbyte-integrations/connectors/source-reply-io/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-reply-io/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-reply-io/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-reply-io/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-reply-io/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..0e377247e340c --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/integration_tests/configured_catalog.json @@ -0,0 +1,40 @@ +{ + "streams": [ + { + "stream": { + "name": "campaigns", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "people", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "email_accounts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "templates", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-reply-io/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-reply-io/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..89993539f0df2 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-reply-io/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-reply-io/integration_tests/sample_config.json new file mode 100644 index 0000000000000..c16cd6d40596a --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-reply-io/integration_tests/simple_catalog.json b/airbyte-integrations/connectors/source-reply-io/integration_tests/simple_catalog.json new file mode 100644 index 0000000000000..88f0beacf8b19 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/integration_tests/simple_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "templates", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-reply-io/main.py b/airbyte-integrations/connectors/source-reply-io/main.py new file mode 100644 index 0000000000000..22a8dedb79093 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_reply_io import SourceReplyIo + +if __name__ == "__main__": + source = SourceReplyIo() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-reply-io/requirements.txt b/airbyte-integrations/connectors/source-reply-io/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-reply-io/setup.py b/airbyte-integrations/connectors/source-reply-io/setup.py new file mode 100644 index 0000000000000..98a9d6f930a0e --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_reply_io", + description="Source implementation for Reply Io.", + author="Elliot Trabac", + author_email="elliot.trabac1@gmail.com", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-reply-io/source_reply_io/__init__.py b/airbyte-integrations/connectors/source-reply-io/source_reply_io/__init__.py new file mode 100644 index 0000000000000..79e0a438dff86 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/source_reply_io/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceReplyIo + +__all__ = ["SourceReplyIo"] diff --git a/airbyte-integrations/connectors/source-reply-io/source_reply_io/reply_io.yaml b/airbyte-integrations/connectors/source-reply-io/source_reply_io/reply_io.yaml new file mode 100644 index 0000000000000..682d187dc4c84 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/source_reply_io/reply_io.yaml @@ -0,0 +1,90 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://api.reply.io/v1/" + http_method: "GET" + request_options_provider: + request_headers: + x-api-key: "{{ config['api_key'] }}" + error_handler: + type: "CompositeErrorHandler" + error_handlers: + - response_filters: + - predicate: "{{ 'Too much requests' in response }}" + action: RETRY + backoff_strategies: + - type: "ConstantBackoffStrategy" + backoff_time_in_seconds: 15 + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + paginated_stream: + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + extractor: + field_pointer: ["{{ options.name }}"] + paginator: + type: "DefaultPaginator" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + pagination_strategy: + type: "PageIncrement" + page_size: 1000 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + url_base: + $ref: "*ref(definitions.requester.url_base)" + + campaigns_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "campaigns" + primary_key: "id" + path: "/campaigns" + email_accounts_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "email_accounts" + primary_key: "id" + path: "/emailAccounts" + people_stream: + $ref: "*ref(definitions.paginated_stream)" + $options: + name: "people" + primary_key: "id" + path: "/people" + templates_stream: + retriever: + $ref: "*ref(definitions.base_stream.retriever)" + record_selector: + extractor: + field_pointer: ["userTemplates"] + $options: + name: "templates" + primary_key: "id" + path: "/templates" + +streams: + - "*ref(definitions.campaigns_stream)" + - "*ref(definitions.email_accounts_stream)" + - "*ref(definitions.people_stream)" + - "*ref(definitions.templates_stream)" + +check: + stream_names: + - "campaigns" diff --git a/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/campaigns.json b/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/campaigns.json new file mode 100644 index 0000000000000..b0032c96b3231 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/campaigns.json @@ -0,0 +1,49 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "created": { + "type": ["null", "string"], + "format": "date-time" + }, + "name": { + "type": ["null", "string"] + }, + "ownerEmail": { + "type": ["null", "string"] + }, + "deliveriesCount": { + "type": ["null", "integer"] + }, + "opensCount": { + "type": ["null", "integer"] + }, + "repliesCount": { + "type": ["null", "integer"] + }, + "bouncesCount": { + "type": ["null", "integer"] + }, + "optOutsCount": { + "type": ["null", "integer"] + }, + "outOfOfficeCount": { + "type": ["null", "integer"] + }, + "peopleCount": { + "type": ["null", "integer"] + }, + "peopleFinished": { + "type": ["null", "integer"] + }, + "peopleActive": { + "type": ["null", "integer"] + }, + "peoplePaused": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/email_accounts.json b/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/email_accounts.json new file mode 100644 index 0000000000000..8e7344f1b99fd --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/email_accounts.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "senderName": { + "type": ["null", "string"] + }, + "emailAddress": { + "type": ["null", "string"] + }, + "signature": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/people.json b/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/people.json new file mode 100644 index 0000000000000..52649e370f4e8 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/people.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "email": { + "type": ["null", "string"] + }, + "firstName": { + "type": ["null", "string"] + }, + "lastName": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "timeZoneId": { + "type": ["null", "string"] + }, + "title": { + "type": ["null", "string"] + }, + "phone": { + "type": ["null", "string"] + }, + "phoneStatus": { + "type": ["null", "string"] + }, + "linkedInProfile": { + "type": ["null", "string"] + }, + "addingDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "customFields": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/templates.json b/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/templates.json new file mode 100644 index 0000000000000..398e8a45677fc --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/source_reply_io/schemas/templates.json @@ -0,0 +1,21 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "body": { + "type": ["null", "string"] + }, + "subject": { + "type": ["null", "string"] + }, + "categoryId": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-reply-io/source_reply_io/source.py b/airbyte-integrations/connectors/source-reply-io/source_reply_io/source.py new file mode 100644 index 0000000000000..98b37e1deb955 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/source_reply_io/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceReplyIo(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "reply_io.yaml"}) diff --git a/airbyte-integrations/connectors/source-reply-io/source_reply_io/spec.yaml b/airbyte-integrations/connectors/source-reply-io/source_reply_io/spec.yaml new file mode 100644 index 0000000000000..6653b7617e7e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-reply-io/source_reply_io/spec.yaml @@ -0,0 +1,14 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/reply-io +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Reply Io Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + type: string + title: API Token + description: The API Token for Reply + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-rocket-chat/.dockerignore b/airbyte-integrations/connectors/source-rocket-chat/.dockerignore new file mode 100644 index 0000000000000..8f94f6ed505ed --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_rocket_chat +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-rocket-chat/Dockerfile b/airbyte-integrations/connectors/source-rocket-chat/Dockerfile new file mode 100644 index 0000000000000..73796475440fc --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_rocket_chat ./source_rocket_chat + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-rocket-chat diff --git a/airbyte-integrations/connectors/source-rocket-chat/README.md b/airbyte-integrations/connectors/source-rocket-chat/README.md new file mode 100644 index 0000000000000..ae5a39085d74f --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/README.md @@ -0,0 +1,79 @@ +# Rocket Chat Source + +This is the repository for the Rocket Chat configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/rocket-chat). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-rocket-chat:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/rocket-chat) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_rocket_chat/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source rocket-chat test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-rocket-chat:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-rocket-chat:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-rocket-chat:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-rocket-chat:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-rocket-chat:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-rocket-chat:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-rocket-chat:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-rocket-chat:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-rocket-chat/__init__.py b/airbyte-integrations/connectors/source-rocket-chat/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-rocket-chat/acceptance-test-config.yml b/airbyte-integrations/connectors/source-rocket-chat/acceptance-test-config.yml new file mode 100644 index 0000000000000..ffb0f7bebafa7 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/acceptance-test-config.yml @@ -0,0 +1,27 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-rocket-chat:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_rocket_chat/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + bypass_reason: "This connector does not implement incremental sync" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-rocket-chat/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-rocket-chat/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-rocket-chat/build.gradle b/airbyte-integrations/connectors/source-rocket-chat/build.gradle new file mode 100644 index 0000000000000..8eab93e3405c8 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_rocket_chat' +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/integration_tests/__init__.py b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-rocket-chat/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-rocket-chat/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..6194783e3d16b --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/configured_catalog.json @@ -0,0 +1,58 @@ +{ + "streams": [ + { + "stream": { + "name": "teams", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "rooms", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "channels", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "roles", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "subscriptions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..20aa95de436cf --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "endpoint": "wrong-endpoint", + "token": false +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/sample_config.json new file mode 100644 index 0000000000000..c6fd9e5491238 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "endpoint": "https://test.rocket.chat", + "token": "REPLACEME", + "user_id": "REPLACEME" +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/main.py b/airbyte-integrations/connectors/source-rocket-chat/main.py new file mode 100644 index 0000000000000..00071c2bbf420 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_rocket_chat import SourceRocketChat + +if __name__ == "__main__": + source = SourceRocketChat() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-rocket-chat/requirements.txt b/airbyte-integrations/connectors/source-rocket-chat/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-rocket-chat/rocket-chat.md b/airbyte-integrations/connectors/source-rocket-chat/rocket-chat.md new file mode 100644 index 0000000000000..220997e67565e --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/rocket-chat.md @@ -0,0 +1,41 @@ +# Rocket.chat API + +## Sync overview + +This source can sync data from the [Rocket.chat API](https://developer.rocket.chat/reference/api). At present this connector only supports full refresh syncs meaning that each time you use the connector it will sync all available records from scratch. Please use cautiously if you expect your API to have a lot of records. + +## This Source Supports the Following Streams + +* teams +* rooms +* channels +* roles +* subscriptions +* users + +### Features + +| Feature | Supported?\(Yes/No\) | Notes | +| :--* | :--* | :--* | +| Full Refresh Sync | Yes | | +| Incremental Sync | No | | + +### Performance considerations + +Rocket.chat APIs are under rate limits for the number of API calls allowed per API keys per second. If you reach a rate limit, API will return a 429 HTTP error code. See [here](https://developer.rocket.chat/reference/api/rest-api/endpoints/other-important-endpoints/rate-limiter-endpoints) + +## Getting started + +### Requirements + +You need to setup a personal access token within the Rocket.chat workspace, see [here](https://docs.rocket.chat/guides/user-guides/user-panel/managing-your-account/personal-access-token) for step-by-step. + +- token +- user_id +- endpoint + +## Changelog + +| Version | Date | Pull Request | Subject | +| :-----* | :--------* | :-------------------------------------------------------* | :----------------------------------------* | +| 0.1.0 | 2022-10-29 | [#18635](https://github.com/airbytehq/airbyte/pull/18635) | 🎉 New Source: Rocket.chat API [low-code CDK] | \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-rocket-chat/setup.py b/airbyte-integrations/connectors/source-rocket-chat/setup.py new file mode 100644 index 0000000000000..6255f91d41369 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_rocket_chat", + description="Source implementation for Rocket Chat.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/__init__.py b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/__init__.py new file mode 100644 index 0000000000000..db3fde4412fea --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceRocketChat + +__all__ = ["SourceRocketChat"] diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/rocket_chat.yaml b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/rocket_chat.yaml new file mode 100644 index 0000000000000..1a9507ed49ce3 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/rocket_chat.yaml @@ -0,0 +1,110 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: + - "{{ options['name'] }}" + update_selector: + extractor: + field_pointer: + - "update" + requester: + url_base: "{{ config['endpoint'] }}/api/v1" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "X-Auth-Token" + api_token: "{{ config['token'] }}" + request_options_provider: + request_headers: + X-User-Id: "{{ config['user_id'] }}" + offset_paginator: + type: DefaultPaginator + $options: + url_base: "*ref(definitions.requester.url_base)" + page_size: 1 + page_size_option: + inject_into: "request_parameter" + field_name: "count" + page_token_option: + inject_into: "request_parameter" + field_name: "offset" + pagination_strategy: + type: "OffsetIncrement" + custom_retriever: + record_selector: + $ref: "*ref(definitions.update_selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.offset_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + teams_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "teams" + primary_key: "_id" + path: "/teams.list" + rooms_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.custom_retriever)" + $options: + name: "rooms" + primary_key: "_id" + path: "/rooms.get" + channels_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "channels" + primary_key: "_id" + path: "/channels.list" + roles_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + $options: + name: "roles" + primary_key: "_id" + path: "/roles.list" + subscriptions_stream: + $ref: "*ref(definitions.base_stream)" + retriever: + $ref: "*ref(definitions.custom_retriever)" + $options: + name: "subscriptions" + primary_key: "_id" + path: "/subscriptions.get" + users_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "users" + primary_key: "_id" + path: "/users.list" + +streams: + - "*ref(definitions.teams_stream)" + - "*ref(definitions.rooms_stream)" + - "*ref(definitions.channels_stream)" + - "*ref(definitions.roles_stream)" + - "*ref(definitions.subscriptions_stream)" + - "*ref(definitions.users_stream)" + +check: + stream_names: + - "teams" diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/channels.json b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/channels.json new file mode 100644 index 0000000000000..6a085bf62f7f7 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/channels.json @@ -0,0 +1,57 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "fname": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "broadcast": { + "type": ["null", "boolean"] + }, + "encrypted": { + "type": ["null", "boolean"] + }, + "teamMain": { + "type": ["null", "boolean"] + }, + "name": { + "type": ["null", "string"] + }, + "t": { + "type": ["null", "string"] + }, + "msgs": { + "type": ["null", "integer"] + }, + "usersCount": { + "type": ["null", "integer"] + }, + "u": { + "type": ["null", "object"] + }, + "ts": { + "type": ["null", "string"] + }, + "ro": { + "type": ["null", "boolean"] + }, + "teamId": { + "type": ["null", "string"] + }, + "default": { + "type": ["null", "boolean"] + }, + "sysMes": { + "type": ["null", "boolean"] + }, + "_updatedAt": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/roles.json b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/roles.json new file mode 100644 index 0000000000000..15c9d3fe41557 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/roles.json @@ -0,0 +1,24 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "scope": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "mandatory2fa": { + "type": ["null", "boolean"] + }, + "name": { + "type": ["null", "string"] + }, + "protected": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/rooms.json b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/rooms.json new file mode 100644 index 0000000000000..e2647943359ef --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/rooms.json @@ -0,0 +1,63 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "fname": { + "type": ["null", "string"] + }, + "customFields": { + "type": ["null", "object"] + }, + "description": { + "type": ["null", "string"] + }, + "broadcast": { + "type": ["null", "boolean"] + }, + "encrypted": { + "type": ["null", "boolean"] + }, + "federated": { + "type": ["null", "boolean"] + }, + "name": { + "type": ["null", "string"] + }, + "t": { + "type": ["null", "string"] + }, + "msgs": { + "type": ["null", "integer"] + }, + "usersCount": { + "type": ["null", "integer"] + }, + "u": { + "type": ["null", "object"] + }, + "ts": { + "type": ["null", "string"] + }, + "ro": { + "type": ["null", "boolean"] + }, + "default": { + "type": ["null", "boolean"] + }, + "sysMes": { + "type": ["null", "boolean"] + }, + "_updatedAt": { + "type": ["null", "string"] + }, + "lastMessage": { + "type": ["null", "object"] + }, + "lm": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/subscriptions.json b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/subscriptions.json new file mode 100644 index 0000000000000..cb6837f768bd0 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/subscriptions.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "open": { + "type": ["null", "boolean"] + }, + "alert": { + "type": ["null", "boolean"] + }, + "unread": { + "type": ["null", "integer"] + }, + "userMentions": { + "type": ["null", "integer"] + }, + "groupMentions": { + "type": ["null", "integer"] + }, + "ts": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "t": { + "type": ["null", "string"] + }, + "fname": { + "type": ["null", "string"] + }, + "ls": { + "type": ["null", "string"] + }, + "u": { + "type": ["null", "object"] + }, + "rid": { + "type": ["null", "string"] + }, + "_updatedAt": { + "type": ["null", "string"] + }, + "roles": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/teams.json b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/teams.json new file mode 100644 index 0000000000000..b9e61de155bca --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/teams.json @@ -0,0 +1,33 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "integer"] + }, + "createdAt": { + "type": ["null", "string"] + }, + "createdBy": { + "type": ["null", "object"] + }, + "_updatedAt": { + "type": ["null", "string"] + }, + "roomId": { + "type": ["null", "string"] + }, + "rooms": { + "type": ["null", "integer"] + }, + "numberOfUsers": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/users.json b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/users.json new file mode 100644 index 0000000000000..d950e0020b079 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/schemas/users.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "_id": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "active": { + "type": ["null", "boolean"] + }, + "name": { + "type": ["null", "string"] + }, + "username": { + "type": ["null", "string"] + }, + "emails": { + "type": ["null", "array"], + "items": { + "type": "object" + } + }, + "roles": { + "type": ["null", "array"], + "items": { + "type": "string" + } + }, + "lastLogin": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "nameInsensitive": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/source.py b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/source.py new file mode 100644 index 0000000000000..9339ecbdb6ccc --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceRocketChat(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "rocket_chat.yaml"}) diff --git a/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/spec.yaml b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/spec.yaml new file mode 100644 index 0000000000000..76dcec99964e7 --- /dev/null +++ b/airbyte-integrations/connectors/source-rocket-chat/source_rocket_chat/spec.yaml @@ -0,0 +1,30 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/rocket-chat +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Rocket Chat Spec + type: object + required: + - token + - user_id + - endpoint + additionalProperties: true + properties: + endpoint: + title: Endpoint + type: string + description: Your rocket.chat instance URL. + examples: + - https://airbyte-connector-poc.rocket.chat + - https://hey.yoursite.com + token: + title: Token + type: string + description: >- + Your API Token. See here. The token is + case sensitive. + airbyte_secret: true + user_id: + title: User ID. + type: string + description: Your User Id. diff --git a/airbyte-integrations/connectors/source-rss/.dockerignore b/airbyte-integrations/connectors/source-rss/.dockerignore new file mode 100644 index 0000000000000..0e472e73c29b1 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_rss +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-rss/Dockerfile b/airbyte-integrations/connectors/source-rss/Dockerfile new file mode 100644 index 0000000000000..afeb0625ec217 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_rss ./source_rss + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-rss diff --git a/airbyte-integrations/connectors/source-rss/README.md b/airbyte-integrations/connectors/source-rss/README.md new file mode 100644 index 0000000000000..ecbe1a253a623 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/README.md @@ -0,0 +1,127 @@ +# RSS Source + +This is the repository for the RSS source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/rss). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-rss:build +``` + +#### Credentials + +Since this doesn't require auth, the config is just in `integration_tests/sample_config.json` instead of `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config integration_tests/sample_config.json +python main.py discover --config integration_tests/sample_config.json +python main.py read --config integration_tests/sample_config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-rss:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-rss:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-rss:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-rss:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-rss:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-rss:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-rss:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-rss:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-rss/acceptance-test-config.yml b/airbyte-integrations/connectors/source-rss/acceptance-test-config.yml new file mode 100644 index 0000000000000..92605a43cbe5c --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/acceptance-test-config.yml @@ -0,0 +1,31 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-rss:dev +acceptance_tests: + spec: + tests: + - config_path: "integration_tests/sample_config.json" + spec_path: "source_rss/spec.yaml" + connection: + tests: + - config_path: "integration_tests/sample_config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "integration_tests/sample_config.json" + basic_read: + tests: + - config_path: "integration_tests/sample_config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + tests: + - config_path: "integration_tests/sample_config.json" + configured_catalog_path: "integration_tests/incremental_configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "integration_tests/sample_config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-rss/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-rss/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-rss/build.gradle b/airbyte-integrations/connectors/source-rss/build.gradle new file mode 100644 index 0000000000000..42a7ced4cdbc0 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_rss' +} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/__init__.py b/airbyte-integrations/connectors/source-rss/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-rss/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..68ac9906773a4 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "items": { + "published": "3333-10-24T16:16:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-rss/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/catalog.json b/airbyte-integrations/connectors/source-rss/integration_tests/catalog.json new file mode 100644 index 0000000000000..6d00beed5497f --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/catalog.json @@ -0,0 +1,45 @@ +{ + "streams": [ + { + "name": "items", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "required": ["published"], + "properties": { + "title": { + "type": ["null", "string"] + }, + "link": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "author": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "comments": { + "type": ["null", "string"] + }, + "enclosure": { + "type": ["null", "string"] + }, + "guid": { + "type": ["null", "string"] + }, + "published": { + "type": ["string"], + "format": "date-time" + } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["published"] + } + ] +} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-rss/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..7ab22cc8dbba9 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/configured_catalog.json @@ -0,0 +1,47 @@ +{ + "streams": [ + { + "stream": { + "name": "items", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "required": ["published"], + "properties": { + "title": { + "type": ["null", "string"] + }, + "link": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "author": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "comments": { + "type": ["null", "string"] + }, + "enclosure": { + "type": ["null", "string"] + }, + "guid": { + "type": ["null", "string"] + }, + "published": { + "type": ["string"], + "format": "date-time" + } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/incremental_configured_catalog.json b/airbyte-integrations/connectors/source-rss/integration_tests/incremental_configured_catalog.json new file mode 100644 index 0000000000000..d4ff280c9201e --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/incremental_configured_catalog.json @@ -0,0 +1,47 @@ +{ + "streams": [ + { + "stream": { + "name": "items", + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "required": ["published"], + "properties": { + "title": { + "type": ["null", "string"] + }, + "link": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "author": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "comments": { + "type": ["null", "string"] + }, + "enclosure": { + "type": ["null", "string"] + }, + "guid": { + "type": ["null", "string"] + }, + "published": { + "type": ["string"], + "format": "date-time" + } + } + }, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-rss/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..0029691134c3e --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "url": "http://somewebsitethatdoesnotexistatall.com/something.rss" +} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-rss/integration_tests/sample_config.json new file mode 100644 index 0000000000000..457574262fc32 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "url": "https://www.nasa.gov/rss/dyn/breaking_news.rss" +} diff --git a/airbyte-integrations/connectors/source-rss/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-rss/integration_tests/sample_state.json new file mode 100644 index 0000000000000..e9493cafc5575 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "items": { + "published": "2022-10-24T16:16:00+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-rss/main.py b/airbyte-integrations/connectors/source-rss/main.py new file mode 100644 index 0000000000000..b1519bf535fe7 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_rss import SourceRss + +if __name__ == "__main__": + source = SourceRss() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-rss/requirements.txt b/airbyte-integrations/connectors/source-rss/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-rss/setup.py b/airbyte-integrations/connectors/source-rss/setup.py new file mode 100644 index 0000000000000..eee9fb4f9ae74 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/setup.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk~=0.2", "feedparser~=6.0.10", "pytz~=2022.6"] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_rss", + description="Source implementation for Rss.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-rss/source_rss/__init__.py b/airbyte-integrations/connectors/source-rss/source_rss/__init__.py new file mode 100644 index 0000000000000..aff462d761a3f --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/source_rss/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceRss + +__all__ = ["SourceRss"] diff --git a/airbyte-integrations/connectors/source-rss/source_rss/schemas/items.json b/airbyte-integrations/connectors/source-rss/source_rss/schemas/items.json new file mode 100644 index 0000000000000..88a12b9904f37 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/source_rss/schemas/items.json @@ -0,0 +1,35 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "required": ["published"], + "properties": { + "title": { + "type": ["null", "string"] + }, + "link": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "author": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "comments": { + "type": ["null", "string"] + }, + "enclosure": { + "type": ["null", "string"] + }, + "guid": { + "type": ["null", "string"] + }, + "published": { + "type": ["string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-rss/source_rss/source.py b/airbyte-integrations/connectors/source-rss/source_rss/source.py new file mode 100644 index 0000000000000..c9548d064acfc --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/source_rss/source.py @@ -0,0 +1,155 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from calendar import timegm +from datetime import datetime +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import feedparser +import pytz +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from dateutil.parser import parse + +item_keys = [ + "title", + "link", + "description", + "author", + "category", + "comments", + "enclosure", + "guid", +] + + +def convert_item_to_mapping(item) -> Mapping: + mapping = {} + + for item_key in item_keys: + try: + mapping[item_key] = item[item_key] + except (AttributeError, KeyError): + pass + + try: + # get datetime in UTC + dt = datetime.utcfromtimestamp(timegm(item.published_parsed)) + # make sure that the output string is labeled as UTC + dt_tz = dt.replace(tzinfo=pytz.UTC) + mapping["published"] = dt_tz.isoformat() + except (AttributeError, KeyError): + pass + + return mapping + + +def is_newer(item, initial_state_date) -> bool: + try: + current_record_date = parse(item["published"]) + except Exception: + current_record_date = None + + if initial_state_date is None: + # if we don't have initial state they are all new + return True + elif current_record_date is None: + # if we can't parse the item timestamp, we should return it + return True + else: + return current_record_date > initial_state_date + + +# Basic stream +class RssStream(HttpStream, ABC): + # empty URL base since the stream can have its own full URL + url_base = "" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + # no pagination enabled + return None + + # since we only have one response for the stream, we should only return records newer than the initial state object if incremental + def parse_response(self, response: requests.Response, stream_state: MutableMapping[str, Any], **kwargs) -> Iterable[Mapping]: + feed = feedparser.parse(response.text) + + try: + initial_state_date = parse(stream_state["published"]) + except Exception: + initial_state_date = None + + # go through in reverse order which helps the state comparisons + all_item_mappings = [convert_item_to_mapping(item) for item in feed.entries[::-1]] + + # will only filter if we have a state object, so it's incremental + yield from [item for item in all_item_mappings if is_newer(item, initial_state_date)] + + +# Basic incremental stream +class IncrementalRssStream(RssStream, ABC): + # no reason to checkpoint if it's reading individual files without pagination + state_checkpoint_interval = None + + @property + def cursor_field(self) -> str: + return "published" + + # this will fail if the dates aren't parseable, but that means incremental isn't possible anyway for that feed + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + try: + latest_record_date = parse(latest_record["published"]) + latest_record_state = {"published": latest_record["published"]} + except Exception: + latest_record_date = None + + try: + current_record_date = parse(current_stream_state["published"]) + except Exception: + current_record_date = None + + if latest_record_date and current_record_date: + if latest_record_date > current_record_date: + return latest_record_state + else: + return current_stream_state + if latest_record_date: + return latest_record_state + if current_record_date: + return current_stream_state + else: + return {} + + +class Items(IncrementalRssStream): + def __init__(self, url: str): + super().__init__() + self.url = url + + primary_key = None + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return self.url + + +# Source +class SourceRss(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + try: + resp = requests.get(config.get("url")) + status = resp.status_code + if status == 200: + return True, None + else: + return False, f"Unable to connect to RSS Feed (received status code: {status})" + except Exception as e: + return False, e + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + return [Items(config.get("url"))] diff --git a/airbyte-integrations/connectors/source-rss/source_rss/spec.yaml b/airbyte-integrations/connectors/source-rss/source_rss/spec.yaml new file mode 100644 index 0000000000000..c1128cf4c0d01 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/source_rss/spec.yaml @@ -0,0 +1,11 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/rss +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: RSS Spec + type: object + required: + - url + properties: + url: + type: string + description: RSS Feed URL diff --git a/airbyte-integrations/connectors/source-rss/unit_tests/__init__.py b/airbyte-integrations/connectors/source-rss/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-rss/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-rss/unit_tests/test_incremental_streams.py new file mode 100644 index 0000000000000..c331b00a29faa --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/unit_tests/test_incremental_streams.py @@ -0,0 +1,60 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_rss.source import IncrementalRssStream + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(IncrementalRssStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalRssStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalRssStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class): + stream = IncrementalRssStream() + expected_cursor_field = "published" + assert stream.cursor_field == expected_cursor_field + + +def test_get_updated_state(patch_incremental_base_class): + stream = IncrementalRssStream() + + inputs = { + "current_stream_state": {"published": "2022-10-24T16:16:00+00:00"}, + "latest_record": {"published": "2022-10-30T16:16:00+00:00"}, + } + + expected_state = {"published": "2022-10-30T16:16:00+00:00"} + assert stream.get_updated_state(**inputs) == expected_state + + +def test_stream_slices(patch_incremental_base_class): + stream = IncrementalRssStream() + # TODO: replace this with your input parameters + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": ["published"], "stream_state": {}} + # TODO: replace this with your expected stream slices list + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker): + mocker.patch.object(IncrementalRssStream, "cursor_field", "dummy_field") + stream = IncrementalRssStream() + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class): + stream = IncrementalRssStream() + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class): + stream = IncrementalRssStream() + expected_checkpoint_interval = None + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-rss/unit_tests/test_source.py b/airbyte-integrations/connectors/source-rss/unit_tests/test_source.py new file mode 100644 index 0000000000000..b3934613c96b6 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/unit_tests/test_source.py @@ -0,0 +1,15 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_rss.source import SourceRss + + +def test_streams(mocker): + source = SourceRss() + config_mock = MagicMock() + streams = source.streams(config_mock) + expected_streams_number = 1 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-rss/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-rss/unit_tests/test_streams.py new file mode 100644 index 0000000000000..be77e6c34fc89 --- /dev/null +++ b/airbyte-integrations/connectors/source-rss/unit_tests/test_streams.py @@ -0,0 +1,107 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import os +import time +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_rss.source import RssStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(RssStream, "path", "v0/example_endpoint") + mocker.patch.object(RssStream, "primary_key", "test_primary_key") + mocker.patch.object(RssStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = RssStream() + # TODO: replace this with your input parameters + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + # TODO: replace this with your expected request parameters + expected_params = {} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = RssStream() + inputs = {"response": MagicMock()} + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class): + stream = RssStream() + + class SampleResponse: + text = """"" + + + + + Test Title + http://testlink + Test Description + Fri, 28 Oct 2022 11:16 EDT + + + + """ + + expected_parsed_object = { + "title": "Test Title", + "link": "http://testlink", + "description": "Test Description", + "published": "2022-10-28T15:16:00+00:00", + } + + assert next(stream.parse_response(response=SampleResponse(), stream_state={})) == expected_parsed_object + + # test that the local timezone doesn't impact how this is computed + os.environ['TZ'] = 'Africa/Accra' + time.tzset() + assert next(stream.parse_response(response=SampleResponse(), stream_state={})) == expected_parsed_object + os.environ['TZ'] = 'Asia/Tokyo' + time.tzset() + assert next(stream.parse_response(response=SampleResponse(), stream_state={})) == expected_parsed_object + + +def test_request_headers(patch_base_class): + stream = RssStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = RssStream() + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = RssStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = RssStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-s3/Dockerfile b/airbyte-integrations/connectors/source-s3/Dockerfile index 2413c0c799b71..c38be4e387ef8 100644 --- a/airbyte-integrations/connectors/source-s3/Dockerfile +++ b/airbyte-integrations/connectors/source-s3/Dockerfile @@ -17,5 +17,5 @@ COPY source_s3 ./source_s3 ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.25 +LABEL io.airbyte.version=0.1.26 LABEL io.airbyte.name=airbyte/source-s3 diff --git a/airbyte-integrations/connectors/source-s3/integration_tests/config_minio.json b/airbyte-integrations/connectors/source-s3/integration_tests/config_minio.json index 6bf9fc09c36db..1a0863be9c8b6 100644 --- a/airbyte-integrations/connectors/source-s3/integration_tests/config_minio.json +++ b/airbyte-integrations/connectors/source-s3/integration_tests/config_minio.json @@ -6,7 +6,7 @@ "aws_access_key_id": "123456", "aws_secret_access_key": "123456key", "path_prefix": "", - "endpoint": "http://10.0.56.135:9000" + "endpoint": "http://10.0.229.255:9000" }, "format": { "filetype": "csv" diff --git a/airbyte-integrations/connectors/source-s3/source_s3/s3_utils.py b/airbyte-integrations/connectors/source-s3/source_s3/s3_utils.py index 607495951fa9a..eff5f76e63bb4 100644 --- a/airbyte-integrations/connectors/source-s3/source_s3/s3_utils.py +++ b/airbyte-integrations/connectors/source-s3/source_s3/s3_utils.py @@ -48,6 +48,7 @@ def _get_s3_client_args(provider: dict, config: Config) -> dict: client_kv_args["endpoint_url"] = endpoint client_kv_args["use_ssl"] = provider.get("use_ssl", True) client_kv_args["verify"] = provider.get("verify_ssl_cert", True) + client_kv_args["config"] = Config(s3={"addressing_style": provider.get("addressing_style", "auto")}) return client_kv_args diff --git a/airbyte-integrations/connectors/source-salesforce/Dockerfile b/airbyte-integrations/connectors/source-salesforce/Dockerfile index fea41e693626d..4d2d34725d201 100644 --- a/airbyte-integrations/connectors/source-salesforce/Dockerfile +++ b/airbyte-integrations/connectors/source-salesforce/Dockerfile @@ -13,5 +13,5 @@ RUN pip install . ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=1.0.24 +LABEL io.airbyte.version=1.0.26 LABEL io.airbyte.name=airbyte/source-salesforce diff --git a/airbyte-integrations/connectors/source-salesforce/acceptance-test-config.yml b/airbyte-integrations/connectors/source-salesforce/acceptance-test-config.yml index 3ad8172e6d61a..9494904b2b767 100644 --- a/airbyte-integrations/connectors/source-salesforce/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-salesforce/acceptance-test-config.yml @@ -1,25 +1,43 @@ # See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-salesforce:dev -tests: +test_strictness_level: high +acceptance_tests: spec: - - spec_path: "source_salesforce/spec.yaml" + tests: + - spec_path: "source_salesforce/spec.yaml" connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "secrets/config_sandbox.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "secrets/config_sandbox.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" discovery: - - config_path: "secrets/config.json" + tests: + - config_path: "secrets/config.json" basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + tests: + - config_path: "secrets/config.json" + expect_records: + path: "integration_tests/expected_records.txt" + empty_streams: + - name: "ActiveScratchOrg" + bypass_reason: "impossible to fill the stream with data because it is an organic traffic" + - name: "ActiveScratchOrgFeed" + bypass_reason: "impossible to fill the stream with data because it is an organic traffic" + - name: "ActiveScratchOrgHistory" + bypass_reason: "impossible to fill the stream with data because it is an organic traffic" + - name: "ActiveScratchOrgShare" + bypass_reason: "impossible to fill the stream with data because it is an organic traffic" incremental: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - future_state_path: "integration_tests/future_state.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/incremental_catalog.json" + future_state: + future_state_path: "integration_tests/future_state.json" full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-salesforce/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-salesforce/integration_tests/configured_catalog.json index c5f317729aa59..1d82d43b11a17 100644 --- a/airbyte-integrations/connectors/source-salesforce/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-salesforce/integration_tests/configured_catalog.json @@ -9,7 +9,7 @@ "default_cursor_field": ["SystemModstamp"], "source_defined_primary_key": [["Id"]] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -21,7 +21,7 @@ "default_cursor_field": ["SystemModstamp"], "source_defined_primary_key": [["Id"]] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -33,7 +33,7 @@ "default_cursor_field": ["SystemModstamp"], "source_defined_primary_key": [["Id"]] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -45,7 +45,7 @@ "default_cursor_field": ["SystemModstamp"], "source_defined_primary_key": [["Id"]] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -67,7 +67,7 @@ "default_cursor_field": ["SystemModstamp"], "source_defined_primary_key": [["Id"]] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -89,7 +89,7 @@ "default_cursor_field": ["SystemModstamp"], "source_defined_primary_key": [["Id"]] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -101,7 +101,7 @@ "default_cursor_field": ["SystemModstamp"], "source_defined_primary_key": [["Id"]] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" }, { @@ -113,7 +113,7 @@ "default_cursor_field": ["CreatedDate"], "source_defined_primary_key": [["Id"]] }, - "sync_mode": "incremental", + "sync_mode": "full_refresh", "destination_sync_mode": "append" } ] diff --git a/airbyte-integrations/connectors/source-salesforce/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-salesforce/integration_tests/expected_records.txt new file mode 100644 index 0000000000000..55bba40946023 --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/integration_tests/expected_records.txt @@ -0,0 +1,93 @@ +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6UwQAI"},"Id":"0014W000027f6UwQAI","IsDeleted":false,"MasterRecordId":null,"Name":"Edge Communications","Type":"Customer - Direct","ParentId":null,"BillingStreet":"312 Constitution Place\nAustin, TX 78767\nUSA","BillingCity":"Austin","BillingState":"TX","BillingPostalCode":null,"BillingCountry":null,"BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"Austin","country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":"TX","street":"312 Constitution Place\nAustin, TX 78767\nUSA"},"ShippingStreet":"312 Constitution Place\nAustin, TX 78767\nUSA","ShippingCity":null,"ShippingState":null,"ShippingPostalCode":null,"ShippingCountry":null,"ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":null,"country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":null,"street":"312 Constitution Place\nAustin, TX 78767\nUSA"},"Phone":"(512) 757-6000","Fax":"(512) 757-9000","AccountNumber":"CD451796","Website":"http://edgecomm.com","PhotoUrl":"/services/images/photo/0014W000027f6UwQAI","Sic":"6576","Industry":"Electronics","AnnualRevenue":139000000,"NumberOfEmployees":1000,"Ownership":"Public","TickerSymbol":"EDGE","Description":"Edge, founded in 1998, is a start-up based in Austin, TX. The company designs and manufactures a device to convert music from one digital format to another. Edge sells its product through retailers and its own website.","Rating":"Hot","Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":"2022-09-29T18:51:04.000+0000","LastReferencedDate":"2022-09-29T18:51:04.000+0000","Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":"Medium","SLA__c":"Silver","Active__c":"Yes","NumberofLocations__c":2,"UpsellOpportunity__c":"Maybe","SLASerialNumber__c":"2657","SLAExpirationDate__c":"2020-11-16"},"emitted_at":1668502796058} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6UxQAI"},"Id":"0014W000027f6UxQAI","IsDeleted":false,"MasterRecordId":null,"Name":"Burlington Textiles Corp of America","Type":"Customer - Direct","ParentId":null,"BillingStreet":"525 S. Lexington Ave","BillingCity":"Burlington","BillingState":"NC","BillingPostalCode":"27215","BillingCountry":"USA","BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"Burlington","country":"USA","geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":"27215","state":"NC","street":"525 S. Lexington Ave"},"ShippingStreet":null,"ShippingCity":null,"ShippingState":null,"ShippingPostalCode":null,"ShippingCountry":null,"ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":null,"Phone":"(336) 222-7000","Fax":"(336) 222-8000","AccountNumber":"CD656092","Website":"www.burlington.com","PhotoUrl":"/services/images/photo/0014W000027f6UxQAI","Sic":"546732","Industry":"Apparel","AnnualRevenue":350000000,"NumberOfEmployees":9000,"Ownership":"Public","TickerSymbol":"BTXT","Description":null,"Rating":"Warm","Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":null,"SLA__c":"Silver","Active__c":null,"NumberofLocations__c":6,"UpsellOpportunity__c":"Maybe","SLASerialNumber__c":"5367","SLAExpirationDate__c":"2020-11-16"},"emitted_at":1668502796063} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6UyQAI"},"Id":"0014W000027f6UyQAI","IsDeleted":false,"MasterRecordId":null,"Name":"Pyramid Construction Inc.","Type":"Customer - Channel","ParentId":null,"BillingStreet":"2 Place Jussieu","BillingCity":"Paris","BillingState":null,"BillingPostalCode":"75251","BillingCountry":"France","BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"Paris","country":"France","geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":"75251","state":null,"street":"2 Place Jussieu"},"ShippingStreet":"2 Place Jussieu","ShippingCity":"Paris","ShippingState":null,"ShippingPostalCode":"75251","ShippingCountry":"France","ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":"Paris","country":"France","geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":"75251","state":null,"street":"2 Place Jussieu"},"Phone":"(014) 427-4427","Fax":"(014) 427-4428","AccountNumber":"CC213425","Website":"www.pyramid.com","PhotoUrl":"/services/images/photo/0014W000027f6UyQAI","Sic":"4253","Industry":"Construction","AnnualRevenue":950000000,"NumberOfEmployees":2680,"Ownership":"Public","TickerSymbol":"PYR","Description":null,"Rating":null,"Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":null,"SLA__c":"Silver","Active__c":"Yes","NumberofLocations__c":17,"UpsellOpportunity__c":"Maybe","SLASerialNumber__c":"9840","SLAExpirationDate__c":"2021-05-19"},"emitted_at":1668502796067} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6UzQAI"},"Id":"0014W000027f6UzQAI","IsDeleted":false,"MasterRecordId":null,"Name":"Dickenson plc","Type":"Customer - Channel","ParentId":null,"BillingStreet":"1301 Hoch Drive","BillingCity":"Lawrence","BillingState":"KS","BillingPostalCode":"66045","BillingCountry":"USA","BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"Lawrence","country":"USA","geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":"66045","state":"KS","street":"1301 Hoch Drive"},"ShippingStreet":"1301 Hoch Drive","ShippingCity":"Lawrence","ShippingState":"KS","ShippingPostalCode":"66045","ShippingCountry":"USA","ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":"Lawrence","country":"USA","geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":"66045","state":"KS","street":"1301 Hoch Drive"},"Phone":"(785) 241-6200","Fax":"(785) 241-6201","AccountNumber":"CC634267","Website":"dickenson-consulting.com","PhotoUrl":"/services/images/photo/0014W000027f6UzQAI","Sic":"6752","Industry":"Consulting","AnnualRevenue":50000000,"NumberOfEmployees":120,"Ownership":"Private","TickerSymbol":null,"Description":null,"Rating":null,"Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":"Low","SLA__c":"Bronze","Active__c":"Yes","NumberofLocations__c":2,"UpsellOpportunity__c":"No","SLASerialNumber__c":"7425","SLAExpirationDate__c":"2021-05-19"},"emitted_at":1668502796071} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6V0QAI"},"Id":"0014W000027f6V0QAI","IsDeleted":false,"MasterRecordId":null,"Name":"Grand Hotels & Resorts Ltd","Type":"Customer - Direct","ParentId":null,"BillingStreet":"2334 N. Michigan Avenue, Suite 1500\nChicago, IL 60601, USA","BillingCity":"Chicago","BillingState":"IL","BillingPostalCode":null,"BillingCountry":null,"BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"Chicago","country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":"IL","street":"2334 N. Michigan Avenue, Suite 1500\nChicago, IL 60601, USA"},"ShippingStreet":"2334 N. Michigan Avenue, Suite 1500\nChicago, IL 60601, USA","ShippingCity":null,"ShippingState":null,"ShippingPostalCode":null,"ShippingCountry":null,"ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":null,"country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":null,"street":"2334 N. Michigan Avenue, Suite 1500\nChicago, IL 60601, USA"},"Phone":"(312) 596-1000","Fax":"(312) 596-1500","AccountNumber":"CD439877","Website":"www.grandhotels.com","PhotoUrl":"/services/images/photo/0014W000027f6V0QAI","Sic":"2268","Industry":"Hospitality","AnnualRevenue":500000000,"NumberOfEmployees":5600,"Ownership":"Public","TickerSymbol":"GHTL","Description":"Chain of hotels and resorts across the US, UK, Eastern Europe, Japan, and SE Asia.","Rating":"Warm","Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":"High","SLA__c":"Gold","Active__c":"Yes","NumberofLocations__c":57,"UpsellOpportunity__c":"Yes","SLASerialNumber__c":"5572","SLAExpirationDate__c":"2021-05-19"},"emitted_at":1668502796074} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6V1QAI"},"Id":"0014W000027f6V1QAI","IsDeleted":false,"MasterRecordId":null,"Name":"United Oil & Gas Corp.","Type":"Customer - Direct","ParentId":null,"BillingStreet":"1301 Avenue of the Americas \r\nNew York, NY 10019\r\nUSA","BillingCity":"New York","BillingState":"NY","BillingPostalCode":null,"BillingCountry":null,"BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"New York","country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":"NY","street":"1301 Avenue of the Americas \r\nNew York, NY 10019\r\nUSA"},"ShippingStreet":"1301 Avenue of the Americas \r\nNew York, NY 10019\r\nUSA","ShippingCity":null,"ShippingState":null,"ShippingPostalCode":null,"ShippingCountry":null,"ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":null,"country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":null,"street":"1301 Avenue of the Americas \r\nNew York, NY 10019\r\nUSA"},"Phone":"(212) 842-5500","Fax":"(212) 842-5501","AccountNumber":"CD355118","Website":"http://www.uos.com","PhotoUrl":"/services/images/photo/0014W000027f6V1QAI","Sic":"4437","Industry":"Energy","AnnualRevenue":5600000000,"NumberOfEmployees":145000,"Ownership":"Public","TickerSymbol":"UOS","Description":"World's third largest oil and gas company.","Rating":"Hot","Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":"High","SLA__c":"Platinum","Active__c":"Yes","NumberofLocations__c":955,"UpsellOpportunity__c":"Yes","SLASerialNumber__c":"6654","SLAExpirationDate__c":"2021-05-19"},"emitted_at":1668502796078} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6V2QAI"},"Id":"0014W000027f6V2QAI","IsDeleted":false,"MasterRecordId":null,"Name":"Express Logistics and Transport","Type":"Customer - Channel","ParentId":null,"BillingStreet":"620 SW 5th Avenue Suite 400\nPortland, Oregon 97204\nUnited States","BillingCity":"Portland","BillingState":"OR","BillingPostalCode":null,"BillingCountry":null,"BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"Portland","country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":"OR","street":"620 SW 5th Avenue Suite 400\nPortland, Oregon 97204\nUnited States"},"ShippingStreet":"620 SW 5th Avenue Suite 400\nPortland, Oregon 97204\nUnited States","ShippingCity":null,"ShippingState":null,"ShippingPostalCode":null,"ShippingCountry":null,"ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":null,"country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":null,"street":"620 SW 5th Avenue Suite 400\nPortland, Oregon 97204\nUnited States"},"Phone":"(503) 421-7800","Fax":"(503) 421-7801","AccountNumber":"CC947211","Website":"www.expressl&t.net","PhotoUrl":"/services/images/photo/0014W000027f6V2QAI","Sic":"8742","Industry":"Transportation","AnnualRevenue":950000000,"NumberOfEmployees":12300,"Ownership":"Public","TickerSymbol":"EXLT","Description":"Commerical logistics and transportation company.","Rating":"Cold","Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":"Medium","SLA__c":"Platinum","Active__c":"Yes","NumberofLocations__c":150,"UpsellOpportunity__c":"Maybe","SLASerialNumber__c":"4724","SLAExpirationDate__c":"2021-05-19"},"emitted_at":1668502796080} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6V3QAI"},"Id":"0014W000027f6V3QAI","IsDeleted":false,"MasterRecordId":null,"Name":"University of Arizona","Type":"Customer - Direct","ParentId":null,"BillingStreet":"888 N Euclid \nHallis Center, Room 501\nTucson, AZ 85721\nUnited States","BillingCity":"Tucson","BillingState":"AZ","BillingPostalCode":null,"BillingCountry":null,"BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"Tucson","country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":"AZ","street":"888 N Euclid \nHallis Center, Room 501\nTucson, AZ 85721\nUnited States"},"ShippingStreet":"888 N Euclid \nHallis Center, Room 501\nTucson, AZ 85721\nUnited States","ShippingCity":null,"ShippingState":null,"ShippingPostalCode":null,"ShippingCountry":null,"ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":null,"country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":null,"street":"888 N Euclid \nHallis Center, Room 501\nTucson, AZ 85721\nUnited States"},"Phone":"(520) 773-9050","Fax":"(520) 773-9060","AccountNumber":"CD736025","Website":"www.universityofarizona.com","PhotoUrl":"/services/images/photo/0014W000027f6V3QAI","Sic":"7321","Industry":"Education","AnnualRevenue":null,"NumberOfEmployees":39000,"Ownership":"Other","TickerSymbol":null,"Description":"Leading university in AZ offering undergraduate and graduate programs in arts and humanities, pure sciences, engineering, business, and medicine.","Rating":"Warm","Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":"Medium","SLA__c":"Gold","Active__c":"Yes","NumberofLocations__c":3,"UpsellOpportunity__c":"Yes","SLASerialNumber__c":"8350","SLAExpirationDate__c":"2020-11-16"},"emitted_at":1668502796083} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6V4QAI"},"Id":"0014W000027f6V4QAI","IsDeleted":false,"MasterRecordId":null,"Name":"United Oil & Gas, UK","Type":"Customer - Direct","ParentId":null,"BillingStreet":"Kings Park, 17th Avenue, Team Valley Trading Estate,\nGateshead, Tyne and Wear NE26 3HS\nUnited Kingdom","BillingCity":null,"BillingState":"UK","BillingPostalCode":null,"BillingCountry":null,"BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":null,"country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":"UK","street":"Kings Park, 17th Avenue, Team Valley Trading Estate,\nGateshead, Tyne and Wear NE26 3HS\nUnited Kingdom"},"ShippingStreet":"Kings Park, 17th Avenue, Team Valley Trading Estate,\nGateshead, Tyne and Wear NE26 3HS\nUnited Kingdom","ShippingCity":null,"ShippingState":null,"ShippingPostalCode":null,"ShippingCountry":null,"ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":null,"country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":null,"street":"Kings Park, 17th Avenue, Team Valley Trading Estate,\nGateshead, Tyne and Wear NE26 3HS\nUnited Kingdom"},"Phone":"+44 191 4956203","Fax":"+44 191 4956620","AccountNumber":"CD355119-A","Website":"http://www.uos.com","PhotoUrl":"/services/images/photo/0014W000027f6V4QAI","Sic":"4437","Industry":"Energy","AnnualRevenue":null,"NumberOfEmployees":24000,"Ownership":"Public","TickerSymbol":"UOS","Description":null,"Rating":null,"Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":"High","SLA__c":"Platinum","Active__c":"Yes","NumberofLocations__c":34,"UpsellOpportunity__c":"No","SLASerialNumber__c":"3479","SLAExpirationDate__c":"2020-11-16"},"emitted_at":1668502796085} +{"stream":"Account","data":{"attributes":{"type":"Account","url":"/services/data/v52.0/sobjects/Account/0014W000027f6V5QAI"},"Id":"0014W000027f6V5QAI","IsDeleted":false,"MasterRecordId":null,"Name":"United Oil & Gas, Singapore","Type":"Customer - Direct","ParentId":null,"BillingStreet":"9 Tagore Lane\nSingapore, Singapore 787472\nSingapore","BillingCity":"Singapore","BillingState":"Singapore","BillingPostalCode":null,"BillingCountry":null,"BillingLatitude":null,"BillingLongitude":null,"BillingGeocodeAccuracy":null,"BillingAddress":{"city":"Singapore","country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":"Singapore","street":"9 Tagore Lane\nSingapore, Singapore 787472\nSingapore"},"ShippingStreet":"9 Tagore Lane\nSingapore, Singapore 787472\nSingapore","ShippingCity":null,"ShippingState":null,"ShippingPostalCode":null,"ShippingCountry":null,"ShippingLatitude":null,"ShippingLongitude":null,"ShippingGeocodeAccuracy":null,"ShippingAddress":{"city":null,"country":null,"geocodeAccuracy":null,"latitude":null,"longitude":null,"postalCode":null,"state":null,"street":"9 Tagore Lane\nSingapore, Singapore 787472\nSingapore"},"Phone":"(650) 450-8810","Fax":"(650) 450-8820","AccountNumber":"CD355120-B","Website":"http://www.uos.com","PhotoUrl":"/services/images/photo/0014W000027f6V5QAI","Sic":"4437","Industry":"Energy","AnnualRevenue":null,"NumberOfEmployees":3000,"Ownership":"Public","TickerSymbol":"UOS","Description":null,"Rating":null,"Site":null,"OwnerId":"0054W00000BZkk0QAD","CreatedDate":"2020-10-22T21:03:23.000+0000","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2020-10-22T21:03:23.000+0000","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2020-10-22T21:03:23.000+0000","LastActivityDate":null,"LastViewedDate":null,"LastReferencedDate":null,"Jigsaw":null,"JigsawCompanyId":null,"CleanStatus":"Pending","AccountSource":null,"DunsNumber":null,"Tradestyle":null,"NaicsCode":null,"NaicsDesc":null,"YearStarted":null,"SicDesc":null,"DandbCompanyId":null,"OperatingHoursId":null,"CustomerPriority__c":"High","SLA__c":"Platinum","Active__c":"Yes","NumberofLocations__c":6,"UpsellOpportunity__c":"Maybe","SLASerialNumber__c":"2457","SLAExpirationDate__c":"2021-05-19"},"emitted_at":1668502796087} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8jSAG","MetricsDate":"2021-06-06","FeatureType":"MarketingUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":1,"ActiveUserCount":1,"TotalLicenseCount":2},"emitted_at":1668502799319} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8pSAG","MetricsDate":"2021-06-06","FeatureType":"OfflineUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":1,"ActiveUserCount":1,"TotalLicenseCount":2},"emitted_at":1668502799319} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8qSAG","MetricsDate":"2021-06-06","FeatureType":"MobileUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":1,"ActiveUserCount":1,"TotalLicenseCount":3},"emitted_at":1668502799320} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8rSAG","MetricsDate":"2021-06-06","FeatureType":"SFContentUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":1,"ActiveUserCount":1,"TotalLicenseCount":5},"emitted_at":1668502799320} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8sSAG","MetricsDate":"2021-06-06","FeatureType":"KnowledgeUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":0,"ActiveUserCount":0,"TotalLicenseCount":2},"emitted_at":1668502799320} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8tSAG","MetricsDate":"2021-06-06","FeatureType":"InteractionUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":0,"ActiveUserCount":0,"TotalLicenseCount":3},"emitted_at":1668502799321} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8uSAG","MetricsDate":"2021-06-06","FeatureType":"SupportUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":1,"ActiveUserCount":1,"TotalLicenseCount":2},"emitted_at":1668502799321} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8vSAG","MetricsDate":"2021-06-06","FeatureType":"JigsawProspectingUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":0,"ActiveUserCount":0,"TotalLicenseCount":2},"emitted_at":1668502799321} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8wSAG","MetricsDate":"2021-06-06","FeatureType":"LiveAgentUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":0,"ActiveUserCount":0,"TotalLicenseCount":2},"emitted_at":1668502799321} +{"stream":"ActiveFeatureLicenseMetric","data":{"Id":"5H24W000000DA8xSAG","MetricsDate":"2021-06-06","FeatureType":"ChatterAnswersUser","SystemModstamp":"2021-06-06T05:04:12.000Z","AssignedUserCount":0,"ActiveUserCount":0,"TotalLicenseCount":30},"emitted_at":1668502799322} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y0SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s3wWAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804243} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y1SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s3xWAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804244} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y2SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s3yWAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804244} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y3SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s3zWAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804244} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y4SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s40WAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804245} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y5SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s41WAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804245} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y6SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s42WAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804245} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y7SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s43WAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804246} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y8SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s44WAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804246} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7Y9SAK","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s45WAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804246} +{"stream":"ActivePermSetLicenseMetric","data":{"Id":"5H14W000000I7YASA0","MetricsDate":"2021-06-06","PermissionSetLicenseId":"0PL4W0000012s46WAA","SystemModstamp":"2021-06-06T05:23:44.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502804247} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000MrTCSA0","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000001VsqfQAC","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815218} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000MrTDSA0","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjMOQA0","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":1,"ActiveUserCount":1},"emitted_at":1668502815219} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000MrTESA0","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjMtQAK","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815219} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000MwuzSAC","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjMuQAK","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815219} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000Mwv0SAC","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjMvQAK","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815220} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000Mwv1SAC","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjMwQAK","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815220} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000Mwv2SAC","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjMxQAK","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815220} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000Mwv3SAC","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjMyQAK","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815221} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000Mwv4SAC","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjMzQAK","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815221} +{"stream":"ActiveProfileMetric","data":{"Id":"5H04W000000Mwv5SAC","MetricsDate":"2021-06-06","UserLicenseId":"1004W000001gXudQAE","ProfileId":"00e4W000002LjN0QAK","SystemModstamp":"2021-06-06T06:50:04.000Z","AssignedUserCount":0,"ActiveUserCount":0},"emitted_at":1668502815221} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIZQAY","Label":"Sales","MasterLabel":"salesforce","NamespacePrefix":"standard","DeveloperName":"Sales","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"The world's most popular sales force automation (SFA) solution","UiType":"Aloha","NavType":"Standard","UtilityBar":null,"HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":false,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":false,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900068} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIdQAI","Label":"Service","MasterLabel":"supportforce","NamespacePrefix":"standard","DeveloperName":"Service","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"Manage customer service with accounts, contacts, cases, and more","UiType":"Aloha","NavType":"Standard","UtilityBar":null,"HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":false,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":true,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900068} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIeQAI","Label":"Marketing","MasterLabel":"Marketing","NamespacePrefix":"standard","DeveloperName":"Marketing","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"Best-in-class on-demand marketing automation","UiType":"Aloha","NavType":"Standard","UtilityBar":null,"HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":false,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":true,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900068} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIjQAI","Label":"App Launcher","MasterLabel":"AppLauncher","NamespacePrefix":"standard","DeveloperName":"AppLauncher","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"App Launcher tabs","UiType":"Aloha","NavType":"Standard","UtilityBar":null,"HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":false,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":true,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900069} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIkQAI","Label":"Community","MasterLabel":"Community","NamespacePrefix":"standard","DeveloperName":"Community","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"Salesforce CRM Communities","UiType":"Aloha","NavType":"Standard","UtilityBar":null,"HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":false,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":true,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900069} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIlQAI","Label":"Site.com","MasterLabel":"Sites","NamespacePrefix":"standard","DeveloperName":"Sites","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"Build pixel-perfect, data-rich websites using the drag-and-drop Site.com application, and manage content and published sites.","UiType":"Aloha","NavType":"Standard","UtilityBar":null,"HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":false,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":false,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900069} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldImQAI","Label":"Salesforce Chatter","MasterLabel":"Collaboration","NamespacePrefix":"standard","DeveloperName":"Chatter","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"The Salesforce Chatter social network, including profiles and feeds","UiType":"Aloha","NavType":"Standard","UtilityBar":null,"HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":false,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":true,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900070} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIYQAY","Label":"Content","MasterLabel":"Content","NamespacePrefix":"standard","DeveloperName":"Content","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"Salesforce CRM Content","UiType":"Aloha","NavType":"Standard","UtilityBar":null,"HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":false,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":true,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900070} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIsQAI","Label":"Sales Console","MasterLabel":"Sales Console","NamespacePrefix":"standard","DeveloperName":"LightningSalesConsole","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"(Lightning Experience) Lets sales reps work with multiple records on one screen","UiType":"Lightning","NavType":"Console","UtilityBar":"LightningSalesConsole_UtilityBar","HeaderColor":"#0070D2","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":true,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":true,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900070} +{"stream":"AppDefinition","data":{"Id":"000000000000000AAA","DurableId":"06m4W000001ldIrQAI","Label":"Service Console","MasterLabel":"Service Console","NamespacePrefix":"standard","DeveloperName":"LightningService","LogoUrl":"/img/salesforce-noname-logo-v2.svg","Description":"(Lightning Experience) Lets support agents work with multiple records across customer service channels on one screen","UiType":"Lightning","NavType":"Console","UtilityBar":"LightningService_UtilityBar","HeaderColor":"#802ABE","IsOverrideOrgTheme":false,"IsSmallFormFactorSupported":true,"IsMediumFormFactorSupported":false,"IsLargeFormFactorSupported":true,"IsNavPersonalizationDisabled":false,"IsNavAutoTempTabsDisabled":false},"emitted_at":1668502900070} +{"stream":"Asset","data":{"Id":"02i4W00000EkJspQAF","ContactId":null,"AccountId":"0014W00002DkoWNQAZ","ParentId":null,"RootAssetId":"02i4W00000EkJspQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Radish - Black, Winter, Organic","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":"2022-09-29T18:52:45.000Z","LastReferencedDate":"2022-09-29T18:52:45.000Z"},"emitted_at":1668502910118} +{"stream":"Asset","data":{"Id":"02i4W00000EkJsqQAF","ContactId":null,"AccountId":"0014W00002DkoW0QAJ","ParentId":null,"RootAssetId":"02i4W00000EkJsqQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Cheese - Valancey","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910118} +{"stream":"Asset","data":{"Id":"02i4W00000EkJsrQAF","ContactId":null,"AccountId":"0014W00002DkoW5QAJ","ParentId":null,"RootAssetId":"02i4W00000EkJsrQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Truffle Cups Green","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910119} +{"stream":"Asset","data":{"Id":"02i4W00000EkJssQAF","ContactId":null,"AccountId":"0014W00002DkoWNQAZ","ParentId":null,"RootAssetId":"02i4W00000EkJssQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Plasticspoonblack","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910120} +{"stream":"Asset","data":{"Id":"02i4W00000EkJstQAF","ContactId":null,"AccountId":"0014W00002DkoWHQAZ","ParentId":null,"RootAssetId":"02i4W00000EkJstQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Broom Handle","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910120} +{"stream":"Asset","data":{"Id":"02i4W00000EkJsuQAF","ContactId":null,"AccountId":"0014W00002DkoWCQAZ","ParentId":null,"RootAssetId":"02i4W00000EkJsuQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Veal - Inside, Choice","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910121} +{"stream":"Asset","data":{"Id":"02i4W00000EkJsvQAF","ContactId":null,"AccountId":"0014W00002DkoW3QAJ","ParentId":null,"RootAssetId":"02i4W00000EkJsvQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Gherkin","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910121} +{"stream":"Asset","data":{"Id":"02i4W00000EkJswQAF","ContactId":null,"AccountId":"0014W00002DkoWJQAZ","ParentId":null,"RootAssetId":"02i4W00000EkJswQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Yoghurt Tubes","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910122} +{"stream":"Asset","data":{"Id":"02i4W00000EkJsxQAF","ContactId":null,"AccountId":"0014W00002DkoWEQAZ","ParentId":null,"RootAssetId":"02i4W00000EkJsxQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Sugar - Monocystal / Rock","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910123} +{"stream":"Asset","data":{"Id":"02i4W00000EkJsyQAF","ContactId":null,"AccountId":"0014W00002DkoWGQAZ","ParentId":null,"RootAssetId":"02i4W00000EkJsyQAF","Product2Id":null,"ProductCode":null,"IsCompetitorProduct":false,"CreatedDate":"2021-01-18T21:44:57.000Z","CreatedById":"0054W00000BZkk0QAD","LastModifiedDate":"2021-01-18T21:44:57.000Z","LastModifiedById":"0054W00000BZkk0QAD","SystemModstamp":"2021-01-18T21:44:57.000Z","IsDeleted":false,"Name":"Broccoli - Fresh","SerialNumber":null,"InstallDate":null,"PurchaseDate":null,"UsageEndDate":null,"LifecycleStartDate":null,"LifecycleEndDate":null,"Status":null,"Price":null,"Quantity":null,"Description":null,"OwnerId":"0054W00000BZkk0QAD","AssetProvidedById":null,"AssetServicedById":null,"IsInternal":false,"AssetLevel":1,"StockKeepingUnit":null,"HasLifecycleManagement":false,"CurrentMrr":null,"CurrentLifecycleEndDate":null,"CurrentQuantity":null,"CurrentAmount":null,"TotalLifecycleAmount":null,"LastViewedDate":null,"LastReferencedDate":null},"emitted_at":1668502910123} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-ABS","FunctionId":"ABS","Type":"VALIDATION"},"emitted_at":1668502913066} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-ACOS","FunctionId":"ACOS","Type":"VALIDATION"},"emitted_at":1668502913066} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-ADDMONTHS","FunctionId":"ADDMONTHS","Type":"VALIDATION"},"emitted_at":1668502913066} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-AND","FunctionId":"AND","Type":"VALIDATION"},"emitted_at":1668502913067} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-ASCII","FunctionId":"ASCII","Type":"VALIDATION"},"emitted_at":1668502913067} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-ASIN","FunctionId":"ASIN","Type":"VALIDATION"},"emitted_at":1668502913067} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-ATAN","FunctionId":"ATAN","Type":"VALIDATION"},"emitted_at":1668502913068} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-ATAN2","FunctionId":"ATAN2","Type":"VALIDATION"},"emitted_at":1668502913068} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-BEGINS","FunctionId":"BEGINS","Type":"VALIDATION"},"emitted_at":1668502913068} +{"stream":"FormulaFunctionAllowedType","data":{"Id":"000000000000000AAA","DurableId":"VALIDATION-BLANKVALUE","FunctionId":"BLANKVALUE","Type":"VALIDATION"},"emitted_at":1668502913068} +{"stream":"LeadHistory","data":{"Id":"0174W00010EpxkSQAR","IsDeleted":false,"LeadId":"00Q4W00001WGXdDUAX","CreatedById":"0054W00000BZkk0QAD","CreatedDate":"2021-11-02T00:20:05.000Z","Field":"Title","DataType":"Text","OldValue":"Co-Founder","NewValue":"History Track"},"emitted_at":1668502915669} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002Fjqc6QAB","ParentId":"0PS4W000002mq70WAA","SobjectType":"AuthorizationFormConsent","PermissionsCreate":true,"PermissionsRead":true,"PermissionsEdit":true,"PermissionsDelete":true,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921232} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002Fjqc7QAB","ParentId":"0PS4W000002mq70WAA","SobjectType":"AuthorizationFormDataUse","PermissionsCreate":true,"PermissionsRead":true,"PermissionsEdit":true,"PermissionsDelete":true,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921233} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002Fjqc8QAB","ParentId":"0PS4W000002mq70WAA","SobjectType":"AuthorizationFormText","PermissionsCreate":true,"PermissionsRead":true,"PermissionsEdit":true,"PermissionsDelete":true,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921233} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002Fjqc9QAB","ParentId":"0PS4W000002mq70WAA","SobjectType":"AuthorizationForm","PermissionsCreate":true,"PermissionsRead":true,"PermissionsEdit":true,"PermissionsDelete":true,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921233} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002FjqcAQAR","ParentId":"0PS4W000002mq70WAA","SobjectType":"ContactPointConsent","PermissionsCreate":true,"PermissionsRead":true,"PermissionsEdit":true,"PermissionsDelete":true,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921234} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002FjqcBQAR","ParentId":"0PS4W000002mq70WAA","SobjectType":"ContactPointTypeConsent","PermissionsCreate":true,"PermissionsRead":true,"PermissionsEdit":true,"PermissionsDelete":true,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921234} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002FjqcCQAR","ParentId":"0PS4W000002mq70WAA","SobjectType":"DataUseLegalBasis","PermissionsCreate":true,"PermissionsRead":true,"PermissionsEdit":true,"PermissionsDelete":true,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921234} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002FjqcDQAR","ParentId":"0PS4W000002mq70WAA","SobjectType":"DataUsePurpose","PermissionsCreate":true,"PermissionsRead":true,"PermissionsEdit":true,"PermissionsDelete":true,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921235} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002FjqcEQAR","ParentId":"0PS4W000002mq6zWAA","SobjectType":"AuthorizationFormConsent","PermissionsCreate":false,"PermissionsRead":true,"PermissionsEdit":false,"PermissionsDelete":false,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921235} +{"stream":"ObjectPermissions","data":{"Id":"1104W00002FjqcFQAR","ParentId":"0PS4W000002mq6zWAA","SobjectType":"AuthorizationFormDataUse","PermissionsCreate":false,"PermissionsRead":true,"PermissionsEdit":false,"PermissionsDelete":false,"PermissionsViewAllRecords":false,"PermissionsModifyAllRecords":false,"CreatedDate":"2020-10-22T21:03:23.000Z","CreatedById":"0054W00000CVeyaQAD","LastModifiedDate":"2020-10-22T21:03:23.000Z","LastModifiedById":"0054W00000CVeyaQAD","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502921235} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pKXUAZ","ParentId":"0PS4W000002mq7WWAQ","Visibility":"DefaultOn","Name":"standard-ConsumptionSchedule","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937601} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pLCUAZ","ParentId":"0PS4W000002mq7OWAQ","Visibility":"DefaultOn","Name":"standard-AppLauncher","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937602} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pLFUAZ","ParentId":"0PS4W000002mq7PWAQ","Visibility":"DefaultOn","Name":"standard-AppLauncher","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937602} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pLGUAZ","ParentId":"0PS4W000002mq7QWAQ","Visibility":"DefaultOn","Name":"standard-AppLauncher","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937602} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pLIUAZ","ParentId":"0PS4W000002mq7WWAQ","Visibility":"DefaultOn","Name":"standard-AppLauncher","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937603} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pLUUAZ","ParentId":"0PS4W000002mq7WWAQ","Visibility":"DefaultOn","Name":"standard-Sites","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937603} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pMFUAZ","ParentId":"0PS4W000002mq7OWAQ","Visibility":"DefaultOn","Name":"standard-Idea","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937603} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pMIUAZ","ParentId":"0PS4W000002mq7PWAQ","Visibility":"DefaultOn","Name":"standard-Idea","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937603} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pMJUAZ","ParentId":"0PS4W000002mq7QWAQ","Visibility":"DefaultOn","Name":"standard-Idea","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937604} +{"stream":"PermissionSetTabSetting","data":{"Id":"01P4W00005O7pMMUAZ","ParentId":"0PS4W000002mq7WWAQ","Visibility":"DefaultOn","Name":"standard-Idea","SystemModstamp":"2020-10-22T21:03:23.000Z"},"emitted_at":1668502937604} +{"stream":"Describe","data":{"actionOverrides":[],"activateable":false,"associateEntityType":null,"associateParentEntity":null,"childRelationships":[],"compactLayoutable":false,"createable":false,"custom":false,"customSetting":false,"deepCloneable":false,"defaultImplementation":null,"deletable":false,"deprecatedAndHidden":false,"extendedBy":null,"extendsInterfaces":null,"feedEnabled":false,"fields":[{"aggregatable":true,"aiPredictionField":false,"autoNumber":false,"byteLength":18,"calculated":false,"calculatedFormula":null,"cascadeDelete":false,"caseSensitive":false,"compoundFieldName":null,"controllerName":null,"createable":false,"custom":false,"defaultValue":null,"defaultValueFormula":null,"defaultedOnCreate":true,"dependentPicklist":false,"deprecatedAndHidden":false,"digits":0,"displayLocationInDecimal":false,"encrypted":false,"externalId":false,"extraTypeInfo":null,"filterable":true,"filteredLookupInfo":null,"formulaTreatNullNumberAsZero":false,"groupable":true,"highScaleNumber":false,"htmlFormatted":false,"idLookup":true,"inlineHelpText":null,"label":"Active Feature License Metric ID","length":18,"mask":null,"maskType":null,"name":"Id","nameField":false,"namePointing":false,"nillable":false,"permissionable":false,"picklistValues":[],"polymorphicForeignKey":false,"precision":0,"queryByDistance":false,"referenceTargetField":null,"referenceTo":[],"relationshipName":null,"relationshipOrder":null,"restrictedDelete":false,"restrictedPicklist":false,"scale":0,"searchPrefilterable":false,"soapType":"tns:ID","sortable":true,"type":"id","unique":false,"updateable":false,"writeRequiresMasterRead":false},{"aggregatable":true,"aiPredictionField":false,"autoNumber":false,"byteLength":0,"calculated":false,"calculatedFormula":null,"cascadeDelete":false,"caseSensitive":false,"compoundFieldName":null,"controllerName":null,"createable":false,"custom":false,"defaultValue":null,"defaultValueFormula":null,"defaultedOnCreate":false,"dependentPicklist":false,"deprecatedAndHidden":false,"digits":0,"displayLocationInDecimal":false,"encrypted":false,"externalId":false,"extraTypeInfo":null,"filterable":true,"filteredLookupInfo":null,"formulaTreatNullNumberAsZero":false,"groupable":true,"highScaleNumber":false,"htmlFormatted":false,"idLookup":false,"inlineHelpText":null,"label":"Metrics Date","length":0,"mask":null,"maskType":null,"name":"MetricsDate","nameField":false,"namePointing":false,"nillable":false,"permissionable":false,"picklistValues":[],"polymorphicForeignKey":false,"precision":0,"queryByDistance":false,"referenceTargetField":null,"referenceTo":[],"relationshipName":null,"relationshipOrder":null,"restrictedDelete":false,"restrictedPicklist":false,"scale":0,"searchPrefilterable":false,"soapType":"xsd:date","sortable":true,"type":"date","unique":false,"updateable":false,"writeRequiresMasterRead":false},{"aggregatable":true,"aiPredictionField":false,"autoNumber":false,"byteLength":120,"calculated":false,"calculatedFormula":null,"cascadeDelete":false,"caseSensitive":false,"compoundFieldName":null,"controllerName":null,"createable":false,"custom":false,"defaultValue":null,"defaultValueFormula":null,"defaultedOnCreate":false,"dependentPicklist":false,"deprecatedAndHidden":false,"digits":0,"displayLocationInDecimal":false,"encrypted":false,"externalId":false,"extraTypeInfo":null,"filterable":true,"filteredLookupInfo":null,"formulaTreatNullNumberAsZero":false,"groupable":true,"highScaleNumber":false,"htmlFormatted":false,"idLookup":false,"inlineHelpText":null,"label":"Primary Grain","length":40,"mask":null,"maskType":null,"name":"FeatureType","nameField":false,"namePointing":false,"nillable":false,"permissionable":false,"picklistValues":[{"active":true,"defaultValue":false,"label":"Marketing User","validFor":null,"value":"MarketingUser"},{"active":true,"defaultValue":false,"label":"Offline User","validFor":null,"value":"OfflineUser"},{"active":true,"defaultValue":false,"label":"Wireless User","validFor":null,"value":"WirelessUser"},{"active":true,"defaultValue":false,"label":"AvantGo User","validFor":null,"value":"AvantgoUser"},{"active":true,"defaultValue":false,"label":"Apex Mobile User","validFor":null,"value":"MobileUser"},{"active":true,"defaultValue":false,"label":"Salesforce CRM Content User","validFor":null,"value":"SFContentUser"},{"active":true,"defaultValue":false,"label":"Knowledge User","validFor":null,"value":"KnowledgeUser"},{"active":true,"defaultValue":false,"label":"Flow User","validFor":null,"value":"InteractionUser"},{"active":true,"defaultValue":false,"label":"Service Cloud User","validFor":null,"value":"SupportUser"},{"active":true,"defaultValue":false,"label":"Data.com User","validFor":null,"value":"JigsawProspectingUser"},{"active":true,"defaultValue":false,"label":"Chat User","validFor":null,"value":"LiveAgentUser"},{"active":true,"defaultValue":false,"label":"Site.com Contributor User","validFor":null,"value":"SiteforceContributorUser"},{"active":true,"defaultValue":false,"label":"Site.com Publisher User","validFor":null,"value":"SiteforcePublisherUser"},{"active":true,"defaultValue":false,"label":"Chatter Answers User","validFor":null,"value":"ChatterAnswersUser"},{"active":true,"defaultValue":false,"label":"WDC User","validFor":null,"value":"WorkDotComUserFeature"}],"polymorphicForeignKey":false,"precision":0,"queryByDistance":false,"referenceTargetField":null,"referenceTo":[],"relationshipName":null,"relationshipOrder":null,"restrictedDelete":false,"restrictedPicklist":true,"scale":0,"searchPrefilterable":false,"soapType":"xsd:string","sortable":true,"type":"picklist","unique":false,"updateable":false,"writeRequiresMasterRead":false},{"aggregatable":true,"aiPredictionField":false,"autoNumber":false,"byteLength":0,"calculated":false,"calculatedFormula":null,"cascadeDelete":false,"caseSensitive":false,"compoundFieldName":null,"controllerName":null,"createable":false,"custom":false,"defaultValue":null,"defaultValueFormula":null,"defaultedOnCreate":true,"dependentPicklist":false,"deprecatedAndHidden":false,"digits":0,"displayLocationInDecimal":false,"encrypted":false,"externalId":false,"extraTypeInfo":null,"filterable":true,"filteredLookupInfo":null,"formulaTreatNullNumberAsZero":false,"groupable":false,"highScaleNumber":false,"htmlFormatted":false,"idLookup":false,"inlineHelpText":null,"label":"System Modstamp","length":0,"mask":null,"maskType":null,"name":"SystemModstamp","nameField":false,"namePointing":false,"nillable":false,"permissionable":false,"picklistValues":[],"polymorphicForeignKey":false,"precision":0,"queryByDistance":false,"referenceTargetField":null,"referenceTo":[],"relationshipName":null,"relationshipOrder":null,"restrictedDelete":false,"restrictedPicklist":false,"scale":0,"searchPrefilterable":false,"soapType":"xsd:dateTime","sortable":true,"type":"datetime","unique":false,"updateable":false,"writeRequiresMasterRead":false},{"aggregatable":true,"aiPredictionField":false,"autoNumber":false,"byteLength":0,"calculated":false,"calculatedFormula":null,"cascadeDelete":false,"caseSensitive":false,"compoundFieldName":null,"controllerName":null,"createable":false,"custom":false,"defaultValue":null,"defaultValueFormula":null,"defaultedOnCreate":false,"dependentPicklist":false,"deprecatedAndHidden":false,"digits":9,"displayLocationInDecimal":false,"encrypted":false,"externalId":false,"extraTypeInfo":null,"filterable":true,"filteredLookupInfo":null,"formulaTreatNullNumberAsZero":false,"groupable":true,"highScaleNumber":false,"htmlFormatted":false,"idLookup":false,"inlineHelpText":null,"label":"Assigned User Count","length":0,"mask":null,"maskType":null,"name":"AssignedUserCount","nameField":false,"namePointing":false,"nillable":true,"permissionable":false,"picklistValues":[],"polymorphicForeignKey":false,"precision":0,"queryByDistance":false,"referenceTargetField":null,"referenceTo":[],"relationshipName":null,"relationshipOrder":null,"restrictedDelete":false,"restrictedPicklist":false,"scale":0,"searchPrefilterable":false,"soapType":"xsd:int","sortable":true,"type":"int","unique":false,"updateable":false,"writeRequiresMasterRead":false},{"aggregatable":true,"aiPredictionField":false,"autoNumber":false,"byteLength":0,"calculated":false,"calculatedFormula":null,"cascadeDelete":false,"caseSensitive":false,"compoundFieldName":null,"controllerName":null,"createable":false,"custom":false,"defaultValue":null,"defaultValueFormula":null,"defaultedOnCreate":false,"dependentPicklist":false,"deprecatedAndHidden":false,"digits":9,"displayLocationInDecimal":false,"encrypted":false,"externalId":false,"extraTypeInfo":null,"filterable":true,"filteredLookupInfo":null,"formulaTreatNullNumberAsZero":false,"groupable":true,"highScaleNumber":false,"htmlFormatted":false,"idLookup":false,"inlineHelpText":null,"label":"Active User Count","length":0,"mask":null,"maskType":null,"name":"ActiveUserCount","nameField":false,"namePointing":false,"nillable":true,"permissionable":false,"picklistValues":[],"polymorphicForeignKey":false,"precision":0,"queryByDistance":false,"referenceTargetField":null,"referenceTo":[],"relationshipName":null,"relationshipOrder":null,"restrictedDelete":false,"restrictedPicklist":false,"scale":0,"searchPrefilterable":false,"soapType":"xsd:int","sortable":true,"type":"int","unique":false,"updateable":false,"writeRequiresMasterRead":false},{"aggregatable":true,"aiPredictionField":false,"autoNumber":false,"byteLength":0,"calculated":false,"calculatedFormula":null,"cascadeDelete":false,"caseSensitive":false,"compoundFieldName":null,"controllerName":null,"createable":false,"custom":false,"defaultValue":null,"defaultValueFormula":null,"defaultedOnCreate":false,"dependentPicklist":false,"deprecatedAndHidden":false,"digits":9,"displayLocationInDecimal":false,"encrypted":false,"externalId":false,"extraTypeInfo":null,"filterable":true,"filteredLookupInfo":null,"formulaTreatNullNumberAsZero":false,"groupable":true,"highScaleNumber":false,"htmlFormatted":false,"idLookup":false,"inlineHelpText":null,"label":"Total License Count","length":0,"mask":null,"maskType":null,"name":"TotalLicenseCount","nameField":false,"namePointing":false,"nillable":true,"permissionable":false,"picklistValues":[],"polymorphicForeignKey":false,"precision":0,"queryByDistance":false,"referenceTargetField":null,"referenceTo":[],"relationshipName":null,"relationshipOrder":null,"restrictedDelete":false,"restrictedPicklist":false,"scale":0,"searchPrefilterable":false,"soapType":"xsd:int","sortable":true,"type":"int","unique":false,"updateable":false,"writeRequiresMasterRead":false}],"hasSubtypes":false,"implementedBy":null,"implementsInterfaces":null,"isInterface":false,"isSubtype":false,"keyPrefix":"5H2","label":"Active Feature License Metric","labelPlural":"Active Feature License Metrics","layoutable":false,"listviewable":null,"lookupLayoutable":null,"mergeable":false,"mruEnabled":false,"name":"ActiveFeatureLicenseMetric","namedLayoutInfos":[],"networkScopeFieldName":null,"queryable":true,"recordTypeInfos":[],"replicateable":false,"retrieveable":true,"searchLayoutable":false,"searchable":false,"sobjectDescribeOption":"FULL","supportedScopes":[{"label":"All active feature license metrics","name":"everything"}],"triggerable":false,"undeletable":false,"updateable":false,"urls":{"rowTemplate":"/services/data/v52.0/sobjects/ActiveFeatureLicenseMetric/{ID}","describe":"/services/data/v52.0/sobjects/ActiveFeatureLicenseMetric/describe","sobject":"/services/data/v52.0/sobjects/ActiveFeatureLicenseMetric"}},"emitted_at":1668502938496} diff --git a/airbyte-integrations/connectors/source-salesforce/integration_tests/incremental_catalog.json b/airbyte-integrations/connectors/source-salesforce/integration_tests/incremental_catalog.json new file mode 100644 index 0000000000000..7bcaf66dfe236 --- /dev/null +++ b/airbyte-integrations/connectors/source-salesforce/integration_tests/incremental_catalog.json @@ -0,0 +1,100 @@ +{ + "streams": [ + { + "stream": { + "name": "Account", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["SystemModstamp"], + "source_defined_primary_key": [["Id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "ActiveFeatureLicenseMetric", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["SystemModstamp"], + "source_defined_primary_key": [["Id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "ActivePermSetLicenseMetric", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["SystemModstamp"], + "source_defined_primary_key": [["Id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "ActiveProfileMetric", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["SystemModstamp"], + "source_defined_primary_key": [["Id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "Asset", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["SystemModstamp"], + "source_defined_primary_key": [["Id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "ObjectPermissions", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["SystemModstamp"], + "source_defined_primary_key": [["Id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "PermissionSetTabSetting", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["SystemModstamp"], + "source_defined_primary_key": [["Id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "LeadHistory", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["CreatedDate"], + "source_defined_primary_key": [["Id"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/api.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/api.py index 8efa5276444d9..17840169568df 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/api.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/api.py @@ -154,6 +154,30 @@ "TaskStatus", "TaskWhoRelation", "UndecidedEventRelation", + "WorkOrderLineItemStatus", + "WorkOrderStatus", + "UserRecordAccess", + "OwnedContentDocument", + "OpenActivity", + "NoteAndAttachment", + "Name", + "LookedUpFromActivity", + "FolderedContentDocument", + "ContractStatus", + "ContentFolderItem", + "CombinedAttachment", + "CaseTeamTemplateRecord", + "CaseTeamTemplateMember", + "CaseTeamTemplate", + "CaseTeamRole", + "CaseTeamMember", + "AttachedContentDocument", + "AggregateResult", + "AccountHistory", + "ChannelProgramLevelShare", + "AccountBrandShare", + "AccountFeed", + "AssetFeed", ] UNSUPPORTED_FILTERING_STREAMS = [ diff --git a/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py b/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py index d141ce57a76e2..fb5df8ee196e4 100644 --- a/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py +++ b/airbyte-integrations/connectors/source-salesforce/source_salesforce/streams.py @@ -251,7 +251,7 @@ def execute_job(self, query: str, url: str) -> Tuple[Optional[str], Optional[str for i in range(0, self.MAX_RETRY_NUMBER): job_id = self.create_stream_job(query=query, url=url) if not job_id: - return None, None + return None, job_status job_full_url = f"{url}/{job_id}" job_status = self.wait_for_job(url=job_full_url) if job_status not in ["UploadComplete", "InProgress"]: @@ -284,7 +284,7 @@ def download_data(self, url: str, chunk_size: int = 1024) -> tuple[str, str]: # set filepath for binary data from response tmp_file = os.path.realpath(os.path.basename(url)) with closing(self._send_http_request("GET", f"{url}/results", stream=True)) as response, open(tmp_file, "wb") as data_file: - response_encoding = response.encoding or response.apparent_encoding or self.encoding + response_encoding = response.apparent_encoding or response.encoding or self.encoding for chunk in response.iter_content(chunk_size=chunk_size): data_file.write(self.filter_null_bytes(chunk)) # check the file exists diff --git a/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py b/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py index 1a997e00bb10d..249b930dbb94a 100644 --- a/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py +++ b/airbyte-integrations/connectors/source-salesforce/unit_tests/api_test.py @@ -33,6 +33,33 @@ def test_bulk_sync_creation_failed(stream_config, stream_api): assert err.value.response.json()[0]["message"] == "test_error" +def test_bulk_stream_fallback_to_rest(mocker, requests_mock, stream_config, stream_api): + """ + Here we mock BULK API with response returning error, saying BULK is not supported for this kind of entity. + On the other hand, we mock REST API for this same entity with a successful response. + After having instantiated a BulkStream, sync should succeed in case it falls back to REST API. Otherwise it would throw an error. + """ + stream = generate_stream("CustomEntity", stream_config, stream_api) + # mock a BULK API + requests_mock.register_uri( + "POST", + "https://fase-account.salesforce.com/services/data/v52.0/jobs/query", + status_code=400, + json=[{ + "errorCode": "INVALIDENTITY", + "message": "CustomEntity is not supported by the Bulk API" + }] + ) + rest_stream_records = [ + {"id": 1, "name": "custom entity", "created": "2010-11-11"}, + {"id": 11, "name": "custom entity", "created": "2020-01-02"} + ] + # mock REST API + mocker.patch("source_salesforce.source.SalesforceStream.read_records", Mock(return_value=rest_stream_records)) + assert type(stream) is BulkIncrementalSalesforceStream + assert list(stream.read_records(sync_mode=SyncMode.full_refresh)) == rest_stream_records + + def test_stream_unsupported_by_bulk(stream_config, stream_api, caplog): """ Stream `AcceptedEventRelation` is not supported by BULK API, so that REST API stream will be used for it. diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/.dockerignore b/airbyte-integrations/connectors/source-sap-fieldglass/.dockerignore new file mode 100644 index 0000000000000..c744112a322f1 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_sap_fieldglass +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/Dockerfile b/airbyte-integrations/connectors/source-sap-fieldglass/Dockerfile new file mode 100644 index 0000000000000..4f7e7ca215a50 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_sap_fieldglass ./source_sap_fieldglass + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-sap-fieldglass diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/README.md b/airbyte-integrations/connectors/source-sap-fieldglass/README.md new file mode 100644 index 0000000000000..b0399eaa2e15d --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/README.md @@ -0,0 +1,79 @@ +# Sap Fieldglass Source + +This is the repository for the Sap Fieldglass configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/sap-fieldglass). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-sap-fieldglass:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/sap-fieldglass) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_sap_fieldglass/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source sap-fieldglass test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-sap-fieldglass:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-sap-fieldglass:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-sap-fieldglass:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-sap-fieldglass:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-sap-fieldglass:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-sap-fieldglass:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-sap-fieldglass:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-sap-fieldglass:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/__init__.py b/airbyte-integrations/connectors/source-sap-fieldglass/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-config.yml new file mode 100644 index 0000000000000..9552ff7a2432a --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-sap-fieldglass:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_sap_fieldglass/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/build.gradle b/airbyte-integrations/connectors/source-sap-fieldglass/build.gradle new file mode 100644 index 0000000000000..9d8b48cb2ad74 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_sap_fieldglass' +} diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/__init__.py b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..0eeeff21b3564 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "data": { + "date": "31/12/2999" + } +} diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..f3c122db9c8de --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "data", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..dc521ade7acf9 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/sample_config.json new file mode 100644 index 0000000000000..73112f5a25a73 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/main.py b/airbyte-integrations/connectors/source-sap-fieldglass/main.py new file mode 100644 index 0000000000000..2070225dc5625 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_sap_fieldglass import SourceSapFieldglass + +if __name__ == "__main__": + source = SourceSapFieldglass() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/requirements.txt b/airbyte-integrations/connectors/source-sap-fieldglass/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/setup.py b/airbyte-integrations/connectors/source-sap-fieldglass/setup.py new file mode 100644 index 0000000000000..3c43a8d935bc4 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_sap_fieldglass", + description="Source implementation for Sap Fieldglass.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/__init__.py b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/__init__.py new file mode 100644 index 0000000000000..610529d5c6871 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceSapFieldglass + +__all__ = ["SourceSapFieldglass"] diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/sap_fieldglass.yaml b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/sap_fieldglass.yaml new file mode 100644 index 0000000000000..e6d651bfe560f --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/sap_fieldglass.yaml @@ -0,0 +1,38 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://sandbox.api.sap.com/fieldglass/api/vc/connector" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "apikey" + api_token: "{{ config['api_key'] }}" + requrest_options_provider: + request_parameters: + base: "{{ config['base'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + data_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "data" + path: "/Active Worker Download" + +streams: + - "*ref(definitions.data_stream)" + +check: + stream_names: + - "data" diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/schemas/data.json b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/schemas/data.json new file mode 100644 index 0000000000000..bfbd735ad2a05 --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/schemas/data.json @@ -0,0 +1,210 @@ +{ + "Data": { + "type": "array", + "items": { + "type": "object", + "required": [ + "Bill Rate", + "Business Unit Code", + "Business Unit Name", + "Buyer Code", + "Cost Center Code", + "Currency", + "End Date", + "First Name", + "Job Posting Title", + "Job Seeker ID", + "Last Name", + "Pay Rate", + "Rate Category /UOM", + "Sequence", + "Site Code", + "Site Name", + "Start Date", + "Status", + "Vendor Name", + "Vendor Number", + "Work Order ID", + "Work Order/Work Order Revision Owner", + "Worker Email", + "Worker ID" + ], + "properties": { + "[c]Worker Custom Fields": { + "type": "string", + "description": "Custom fields found on the worker.\nIf there are many custom fields, there will be many\ncolumns.Column header format will be: \n\"[c]modulename_custom field name\" (i.e. [c] followed by\nmodule custom text lowercase with no spaces, followed by\nunderscore, followed by the custom field name text as\ndefined by users in the SAP Fieldglass application.)", + "maxLength": 4000 + }, + "Job Seeker ID": { + "type": "string", + "description": "14-character alphanumeric SAP Fieldglass Job Seeker ID.", + "maxLength": 14 + }, + "[c]Buyer or Supplier custom fields": { + "type": "string", + "description": "Custom fields found on the supplier. (For supplier side\ndownload, only those fields required by buyer for supplier\nto be entered and viewed are downloaded.)If there are many\ncustom fields, there will be many columns.Column header\nformat will be: \n?[c]modulename_custom field name? (i.e. [c] followed by\nmodule custom text lowercase with no spaces, followed by\nunderscore, followed by the custom field name text as\ndefined by users in the SAP Fieldglass application.)", + "maxLength": 4000 + }, + "Billable Per Diem": { + "type": "integer", + "format": "float", + "description": "" + }, + "First Name": { + "type": "string", + "description": "First name of the Worker as defined in SAP Fieldglass.", + "maxLength": 100 + }, + "Remit To address Code": { + "type": "string", + "description": "Code as assigned to the Remit To Address by supplier.", + "maxLength": 100 + }, + "Cost Center Name": { + "type": "string", + "description": "Cost Center Name in SAP Fieldglass.", + "maxLength": 200 + }, + "Work Order/Work Order Revision Owner": { + "type": "string", + "description": "Work Order Owner?s name.", + "maxLength": 100 + }, + "Vendor Name": { + "type": "string", + "description": "Supplier name.", + "maxLength": 200 + }, + "Work Order ID": { + "type": "string", + "description": "Work Order ID.", + "maxLength": 14 + }, + "Bill Rate": { + "type": "integer", + "format": "float", + "description": "Worker?s Bill Rate that is associated to the Rate\nCategory/UOM listed in the previous field." + }, + "Start Date": { + "type": "string", + "format": "date", + "description": "Work order start date." + }, + "Security ID": { + "type": "string", + "description": "This column will only appear in the file if the security ID\nfield functionality is activated in SAP Fieldglass for the\nbuyer. If the functionality is not activated, this column\nwill not be in the file.", + "maxLength": 100 + }, + "Currency": { + "type": "string", + "description": "ISO currency designation (ex. USD).", + "maxLength": 100 + }, + "[c]Work Order Custom Fields": { + "type": "string", + "description": "Custom fields found on the work order/work order revision. \nIf there are many custom fields, there will be many columns.\nColumn header format will be:\n?[c]modulename_custom field name? (i.e. [c] followed by\nmodule custom text lowercase with no spaces, followed by\nunderscore, followed by the custom field name text as\ndefined by users in the SAP Fieldglass application.)", + "maxLength": 4000 + }, + "Job Posting Title": { + "type": "string", + "description": "Job Posting?s Title or SOW Name for SOW Workers.", + "maxLength": 100 + }, + "Worker ID": { + "type": "string", + "description": "14-character alphanumeric SAP Fieldglass Worker ID.", + "maxLength": 14 + }, + "Business Unit Name": { + "type": "string", + "description": "Business Unit Name.", + "maxLength": 100 + }, + "Site Name": { + "type": "string", + "description": "Site Name.", + "maxLength": 100 + }, + "Buyer Code": { + "type": "string", + "description": "Buyer Company Code.", + "maxLength": 4 + }, + "Status": { + "type": "string", + "description": "Status of the worker.", + "maxLength": 50 + }, + "Pay Rate": { + "type": "integer", + "format": "float", + "description": "Worker?s Pay Rate that is associated to the Rate Category/UOM\nlisted in the previous field. \nIf Supplier did not enter a Pay Rate, 0.00 will be\ndisplayed." + }, + "Vendor Number": { + "type": "string", + "description": "SAP Fieldglass Supplier code for buyer.", + "maxLength": 4 + }, + "Sequence": { + "type": "number", + "description": "Work Order Revision Number." + }, + "Person ID": { + "type": "string", + "description": "Unique identifier for the worker as a person.", + "maxLength": 24 + }, + "Cost Center Code": { + "type": "string", + "description": "Cost Center Code in SAP Fieldglass.", + "maxLength": 200 + }, + "[c]Worker User Person Custom Fields": { + "type": "string", + "description": "Custom fields found on the worker user person\nIf there are many custom fields, there will be many columns.\nColumn header format will be: \n?[c]modulename_custom field name? (i.e. [c] followed by\nmodule custom text lowercase with no spaces, followed by\nunderscore, followed by the custom field name text as\ndefined by users in the SAP Fieldglass application.)", + "maxLength": 4000 + }, + "Rate Category /UOM": { + "type": "string", + "description": "Worker?s Rate Category/Unit Of Measure for which the\ncorresponding rate will be displayed. \nMore than one Rate Category/UOM may exist for a single\nWorker. If this is the case a new line will appear for each\nassociated Rate Category/UOM in this file.", + "maxLength": 100 + }, + "Segmented Object Detail": { + "type": "string", + "description": "Segmented Object Detail string. Segments will be separated by\ndelimiter on the Segmented Object.\nValid delimiters are colon (:), semi-colon (;), pipe (|), and\ndash (-).\nRequired if ?Enable Segmented Object Detail? configuration is\nset.\nThis field is mutually exclusive with GL Account fields.\nMore than one string can appear for a cost center.", + "maxLength": 2000 + }, + "End Date": { + "type": "string", + "format": "date", + "description": "Work order end date." + }, + "Worker Email": { + "type": "string", + "description": "Worker?s e-mail address.", + "maxLength": 100 + }, + "Work Order/Work Order Revision Owner Employee ID": { + "type": "string", + "description": "Work Order Owner?s Employee ID.", + "maxLength": 50 + }, + "Last Name": { + "type": "string", + "description": "Last name of the Worker as defined in SAP Fieldglass.", + "maxLength": 100 + }, + "Site Code": { + "type": "string", + "description": "Site Code.", + "maxLength": 100 + }, + "Business Unit Code": { + "type": "string", + "description": "Business Unit Code.", + "maxLength": 100 + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/source.py b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/source.py new file mode 100644 index 0000000000000..2c0741158432f --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceSapFieldglass(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "sap_fieldglass.yaml"}) diff --git a/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/spec.yaml b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/spec.yaml new file mode 100644 index 0000000000000..0360b26ca25ff --- /dev/null +++ b/airbyte-integrations/connectors/source-sap-fieldglass/source_sap_fieldglass/spec.yaml @@ -0,0 +1,13 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/sap-fieldglass +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Sap Fieldglass Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + type: string + description: API Key + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-scaffold-source-http/acceptance-test-docker.sh old mode 100644 new mode 100755 diff --git a/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-scaffold-source-python/acceptance-test-docker.sh old mode 100644 new mode 100755 diff --git a/airbyte-integrations/connectors/source-secoda/.dockerignore b/airbyte-integrations/connectors/source-secoda/.dockerignore new file mode 100644 index 0000000000000..85035b698a6bd --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_secoda +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-secoda/Dockerfile b/airbyte-integrations/connectors/source-secoda/Dockerfile new file mode 100644 index 0000000000000..c4ec6e9b49189 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_secoda ./source_secoda + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-secoda diff --git a/airbyte-integrations/connectors/source-secoda/README.md b/airbyte-integrations/connectors/source-secoda/README.md new file mode 100644 index 0000000000000..57f14fc233128 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/README.md @@ -0,0 +1,79 @@ +# Secoda Source + +This is the repository for the Secoda configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/secoda). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-secoda:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/secoda) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_secoda/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source secoda test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-secoda:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-secoda:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-secoda:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-secoda:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-secoda:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-secoda:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-secoda:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-secoda:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-secoda/__init__.py b/airbyte-integrations/connectors/source-secoda/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-secoda/acceptance-test-config.yml b/airbyte-integrations/connectors/source-secoda/acceptance-test-config.yml new file mode 100644 index 0000000000000..7b7fe506a8893 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/acceptance-test-config.yml @@ -0,0 +1,26 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-secoda:dev +tests: + spec: + - spec_path: "source_secoda/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + # discovery: + # - config_path: "secrets/config.json" + # basic_read: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # empty_streams: [] + # # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # # expect_records: + # # path: "integration_tests/expected_records.txt" + # # extra_fields: no + # # exact_order: no + # # extra_records: yes + # full_refresh: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-secoda/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-secoda/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-secoda/build.gradle b/airbyte-integrations/connectors/source-secoda/build.gradle new file mode 100644 index 0000000000000..68a93dbb4e34e --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_secoda' +} diff --git a/airbyte-integrations/connectors/source-secoda/integration_tests/__init__.py b/airbyte-integrations/connectors/source-secoda/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-secoda/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-secoda/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-secoda/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-secoda/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-secoda/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-secoda/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..8c3550853e175 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/integration_tests/configured_catalog.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "stream": { + "name": "tables", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "terms", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "collections", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-secoda/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-secoda/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..6016942564e8a --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "wrong-api-key" +} diff --git a/airbyte-integrations/connectors/source-secoda/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-secoda/integration_tests/sample_config.json new file mode 100644 index 0000000000000..f3027573f9731 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "488c388c-e1fg-01fb-f1d0-e7de1a07be70" +} diff --git a/airbyte-integrations/connectors/source-secoda/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-secoda/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-secoda/main.py b/airbyte-integrations/connectors/source-secoda/main.py new file mode 100644 index 0000000000000..4f35a26b96791 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_secoda import SourceSecoda + +if __name__ == "__main__": + source = SourceSecoda() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-secoda/requirements.txt b/airbyte-integrations/connectors/source-secoda/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-secoda/setup.py b/airbyte-integrations/connectors/source-secoda/setup.py new file mode 100644 index 0000000000000..f3f1d43c98877 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_secoda", + description="Source implementation for Secoda.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-secoda/source_secoda/__init__.py b/airbyte-integrations/connectors/source-secoda/source_secoda/__init__.py new file mode 100644 index 0000000000000..880f898ef6111 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/source_secoda/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceSecoda + +__all__ = ["SourceSecoda"] diff --git a/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/collections.json b/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/collections.json new file mode 100644 index 0000000000000..2ef7041bf0d51 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/collections.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "created_at": { + "type": "string" + }, + "updated_at": { + "type": "string" + }, + "entity_type": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "definition": { + "type": "string" + }, + "workspace_id": { + "type": "string" + }, + "published": { + "type": "boolean" + }, + "archived": { + "type": "boolean" + }, + "pii": { + "type": "boolean" + }, + "verified": { + "type": "boolean" + } + } +} diff --git a/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/tables.json b/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/tables.json new file mode 100644 index 0000000000000..d89780b2f65d4 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/tables.json @@ -0,0 +1,63 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "published": { + "type": "boolean" + }, + "pii": { + "type": "boolean" + }, + "created_at": { + "type": "string" + }, + "updated_at": { + "type": "string" + }, + "tags": { + "type": "array" + }, + "name": { + "type": "string" + }, + "schema": { + "type": "string" + }, + "cluster": { + "type": "string" + }, + "database": { + "type": "string" + }, + "owners": { + "type": "array" + }, + "description": { + "type": ["null", "string"] + }, + "columns": { + "type": "array" + }, + "parent_id": { + "type": "string" + }, + "integration": { + "type": "string" + }, + "entity_type": { + "type": "string" + }, + "pristine": { + "type": "boolean" + }, + "verified": { + "type": "boolean" + } + } +} diff --git a/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/terms.json b/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/terms.json new file mode 100644 index 0000000000000..92ce37b3d3528 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/source_secoda/schemas/terms.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "created_at": { + "type": "string" + }, + "updated_at": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "collections": { + "type": "array" + }, + "owners": { + "type": "array" + }, + "published": { + "type": "boolean" + }, + "pristine": { + "type": "boolean" + }, + "viewers_delta": { + "type": "object" + }, + "entity_type": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-secoda/source_secoda/secoda.yaml b/airbyte-integrations/connectors/source-secoda/source_secoda/secoda.yaml new file mode 100644 index 0000000000000..45d2dbe44ce13 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/source_secoda/secoda.yaml @@ -0,0 +1,69 @@ +version: "0.1.0" + +definitions: + schema_loader: + type: JsonSchema + file_path: "./source_secoda/schemas/{{ options.name }}.json" + selector: + extractor: + field_pointer: ["results"] + requester: + url_base: "https://api.secoda.co" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + error_handler: + response_filters: + - http_codes: [500] + action: FAIL + cursor_paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + page_token_option: + inject_into: path + page_size_option: + inject_into: body_data + field_name: "page_size" + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response.links.next }}" + stop_condition: "{{ response.links.next is none}}" + page_size: 1 + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.cursor_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + tables_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tables" + primary_key: "id" + path: "/table/tables/" + terms_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "terms" + primary_key: "id" + path: "/dictionary/terms/" + collections_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "collections" + primary_key: "id" + path: "/collection/collections/" + +streams: + - "*ref(definitions.tables_stream)" + - "*ref(definitions.terms_stream)" + - "*ref(definitions.collections_stream)" + +check: + stream_names: + - "tables" diff --git a/airbyte-integrations/connectors/source-secoda/source_secoda/source.py b/airbyte-integrations/connectors/source-secoda/source_secoda/source.py new file mode 100644 index 0000000000000..b4cf6d6ce8617 --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/source_secoda/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceSecoda(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "secoda.yaml"}) diff --git a/airbyte-integrations/connectors/source-secoda/source_secoda/spec.yaml b/airbyte-integrations/connectors/source-secoda/source_secoda/spec.yaml new file mode 100644 index 0000000000000..a511f4713bc8b --- /dev/null +++ b/airbyte-integrations/connectors/source-secoda/source_secoda/spec.yaml @@ -0,0 +1,18 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/secoda +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Secoda Spec + type: object + required: + - api_key + additionalProperties: true + properties: + # 'TODO: This schema defines the configuration required for the source. This usually involves metadata such as database and/or authentication information.': + api_key: + title: Api Key + type: string + description: >- + Your API Access Key. See here. The key is + case sensitive. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-sendinblue/.dockerignore b/airbyte-integrations/connectors/source-sendinblue/.dockerignore new file mode 100644 index 0000000000000..e8cf785645736 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_sendinblue +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-sendinblue/Dockerfile b/airbyte-integrations/connectors/source-sendinblue/Dockerfile new file mode 100644 index 0000000000000..80211cef5582f --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_sendinblue ./source_sendinblue + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-sendinblue diff --git a/airbyte-integrations/connectors/source-sendinblue/README.md b/airbyte-integrations/connectors/source-sendinblue/README.md new file mode 100644 index 0000000000000..f2d119174f461 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/README.md @@ -0,0 +1,79 @@ +# Sendinblue Source + +This is the repository for the Sendinblue configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/sendinblue). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-sendinblue:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/sendinblue) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_sendinblue/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source sendinblue test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-sendinblue:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-sendinblue:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-sendinblue:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-sendinblue:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-sendinblue:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-sendinblue:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-sendinblue:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-sendinblue:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-sendinblue/__init__.py b/airbyte-integrations/connectors/source-sendinblue/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-sendinblue/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sendinblue/acceptance-test-config.yml new file mode 100644 index 0000000000000..debac6d01dd91 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-sendinblue:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_sendinblue/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-sendinblue/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-sendinblue/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-sendinblue/build.gradle b/airbyte-integrations/connectors/source-sendinblue/build.gradle new file mode 100644 index 0000000000000..625ab277f4bca --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_sendinblue' +} diff --git a/airbyte-integrations/connectors/source-sendinblue/integration_tests/__init__.py b/airbyte-integrations/connectors/source-sendinblue/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-sendinblue/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-sendinblue/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-sendinblue/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-sendinblue/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-sendinblue/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-sendinblue/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..8b74ccf7cd933 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/integration_tests/configured_catalog.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "stream": { + "name": "campaigns", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "templates", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "contacts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-sendinblue/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-sendinblue/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..d0ce30f189af7 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-sendinblue/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-sendinblue/integration_tests/sample_config.json new file mode 100644 index 0000000000000..ee7ac3b615169 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "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" +} diff --git a/airbyte-integrations/connectors/source-sendinblue/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-sendinblue/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-sendinblue/main.py b/airbyte-integrations/connectors/source-sendinblue/main.py new file mode 100644 index 0000000000000..719a3f46cbc1f --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_sendinblue import SourceSendinblue + +if __name__ == "__main__": + source = SourceSendinblue() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-sendinblue/requirements.txt b/airbyte-integrations/connectors/source-sendinblue/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-sendinblue/setup.py b/airbyte-integrations/connectors/source-sendinblue/setup.py new file mode 100644 index 0000000000000..cd0901c5eca89 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_sendinblue", + description="Source implementation for Sendinblue.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/__init__.py b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/__init__.py new file mode 100644 index 0000000000000..edee84a63a189 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceSendinblue + +__all__ = ["SourceSendinblue"] diff --git a/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/campaigns.json b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/campaigns.json new file mode 100644 index 0000000000000..02ab48afdaf06 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/campaigns.json @@ -0,0 +1,75 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + }, + "testSent": { + "type": ["null", "boolean"] + }, + "header": { + "type": ["null", "string"] + }, + "footer": { + "type": ["null", "string"] + }, + "sender": { + "type": ["null", "object"] + }, + "replyTo": { + "type": ["null", "string"] + }, + "toField": { + "type": ["null", "string"] + }, + "htmlContent": { + "type": ["null", "string"] + }, + "tag": { + "type": ["null", "string"] + }, + "inlineImageActivation": { + "type": ["null", "boolean"] + }, + "mirrorActive": { + "type": ["null", "boolean"] + }, + "recipients": { + "type": ["null", "object"] + }, + "statistics": { + "type": ["null", "object"] + }, + "subject": { + "type": ["null", "string"] + }, + "scheduledAt": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"] + }, + "modifiedAt": { + "type": ["null", "string"] + }, + "shareLink": { + "type": ["null", "string"] + }, + "sendAtBestTime": { + "type": ["null", "boolean"] + }, + "abTesting": { + "type": ["null", "boolean"] + } + } +} diff --git a/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/contacts.json b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/contacts.json new file mode 100644 index 0000000000000..e23ac3c423a2e --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/contacts.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "email": { + "type": ["null", "string"] + }, + "emailBlacklisted": { + "type": ["null", "boolean"] + }, + "smsBlacklisted": { + "type": ["null", "boolean"] + }, + "listIds": { + "type": ["null", "array"] + }, + "createdAt": { + "type": ["null", "string"] + }, + "modifiedAt": { + "type": ["null", "string"] + }, + "attributes": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/templates.json b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/templates.json new file mode 100644 index 0000000000000..8c4fd441f2085 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/schemas/templates.json @@ -0,0 +1,42 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "integer"] + }, + "name": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "testSent": { + "type": ["null", "boolean"] + }, + "isActive": { + "type": ["null", "boolean"] + }, + "sender": { + "type": ["null", "object"] + }, + "replyTo": { + "type": ["null", "string"] + }, + "toField": { + "type": ["null", "string"] + }, + "htmlContent": { + "type": ["null", "string"] + }, + "tag": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"] + }, + "modifiedAt": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/sendinblue.yaml b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/sendinblue.yaml new file mode 100644 index 0000000000000..cb2974c65358c --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/sendinblue.yaml @@ -0,0 +1,69 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["{{ options['name'] }}"] + requester: + url_base: "https://api.sendinblue.com/v3" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "api-key" + api_token: "{{ config['api_key'] }}" + offset_paginator: + type: DefaultPaginator + $options: + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "OffsetIncrement" + page_size: 100 + page_token_option: + field_name: "offset" + inject_into: "request_parameter" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.offset_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + campaigns_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "campaigns" + primary_key: "id" + path: "/emailCampaigns" + campaign_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.campaigns_stream)" + parent_key: id + stream_slice_field: campaign_id + templates_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: templates + primary_key: id + path: "/smtp/templates" + contacts_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: contacts + primary_key: id + path: "/contacts" + +streams: + - "*ref(definitions.campaigns_stream)" + - "*ref(definitions.templates_stream)" + - "*ref(definitions.contacts_stream)" + +check: + stream_names: + - "campaigns" diff --git a/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/source.py b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/source.py new file mode 100644 index 0000000000000..924ae8ab35b46 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceSendinblue(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "sendinblue.yaml"}) diff --git a/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/spec.yaml b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/spec.yaml new file mode 100644 index 0000000000000..a89baa99efc90 --- /dev/null +++ b/airbyte-integrations/connectors/source-sendinblue/source_sendinblue/spec.yaml @@ -0,0 +1,16 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/sendinblue +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Sendinblue Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + title: API Key + type: string + description: >- + Your API Key. See here. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-senseforce/.dockerignore b/airbyte-integrations/connectors/source-senseforce/.dockerignore new file mode 100644 index 0000000000000..7a01bc9234b9a --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_senseforce +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-senseforce/Dockerfile b/airbyte-integrations/connectors/source-senseforce/Dockerfile new file mode 100644 index 0000000000000..a70abc3c5d7c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_senseforce ./source_senseforce + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-senseforce diff --git a/airbyte-integrations/connectors/source-senseforce/README.md b/airbyte-integrations/connectors/source-senseforce/README.md new file mode 100644 index 0000000000000..27f1b43cb0d27 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/README.md @@ -0,0 +1,79 @@ +# Senseforce Source + +This is the repository for the Senseforce configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/senseforce). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-senseforce:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/senseforce) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_senseforce/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source senseforce test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-senseforce:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-senseforce:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-senseforce:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-senseforce:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-senseforce:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-senseforce:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-senseforce:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-senseforce:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-senseforce/__init__.py b/airbyte-integrations/connectors/source-senseforce/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-senseforce/acceptance-test-config.yml b/airbyte-integrations/connectors/source-senseforce/acceptance-test-config.yml new file mode 100644 index 0000000000000..8f77ab750b6f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/acceptance-test-config.yml @@ -0,0 +1,35 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-senseforce:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_senseforce/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-senseforce/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-senseforce/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-senseforce/build.gradle b/airbyte-integrations/connectors/source-senseforce/build.gradle new file mode 100644 index 0000000000000..3007db5540477 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_senseforce' +} diff --git a/airbyte-integrations/connectors/source-senseforce/integration_tests/__init__.py b/airbyte-integrations/connectors/source-senseforce/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-senseforce/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-senseforce/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..5f2f6ab2bd302 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "dataset": { + "airbyte_cursor": "5595570341.224" + } +} diff --git a/airbyte-integrations/connectors/source-senseforce/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-senseforce/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-senseforce/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-senseforce/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..78fa7f192f66c --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/integration_tests/configured_catalog.json @@ -0,0 +1,15 @@ +{ + "streams": [ + { + "stream": { + "name": "dataset", + "json_schema": {}, + "supported_defined_cursor": true, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["airbyte_cursor"] + } + ] +} diff --git a/airbyte-integrations/connectors/source-senseforce/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-senseforce/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..d8643f17cf432 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/integration_tests/invalid_config.json @@ -0,0 +1,7 @@ +{ + "access_token": "somekey", + "backend_url": "https://galaxyapi.senseforce.io", + "dataset_id": "8f418098-ca28-4df5-9498-0df9fe78eda7", + "start_date": "2020-07-22", + "slice_range": 1 +} diff --git a/airbyte-integrations/connectors/source-senseforce/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-senseforce/integration_tests/sample_config.json new file mode 100644 index 0000000000000..d8643f17cf432 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/integration_tests/sample_config.json @@ -0,0 +1,7 @@ +{ + "access_token": "somekey", + "backend_url": "https://galaxyapi.senseforce.io", + "dataset_id": "8f418098-ca28-4df5-9498-0df9fe78eda7", + "start_date": "2020-07-22", + "slice_range": 1 +} diff --git a/airbyte-integrations/connectors/source-senseforce/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-senseforce/integration_tests/sample_state.json new file mode 100644 index 0000000000000..71a0154d2f7fd --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "dataset": { + "airbyte_cursor": "1595570341.224" + } +} diff --git a/airbyte-integrations/connectors/source-senseforce/main.py b/airbyte-integrations/connectors/source-senseforce/main.py new file mode 100644 index 0000000000000..15e64333cfc05 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_senseforce import SourceSenseforce + +if __name__ == "__main__": + source = SourceSenseforce() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-senseforce/requirements.txt b/airbyte-integrations/connectors/source-senseforce/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-senseforce/setup.py b/airbyte-integrations/connectors/source-senseforce/setup.py new file mode 100644 index 0000000000000..889be8c9c2b06 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_senseforce", + description="Source implementation for Senseforce.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-senseforce/source_senseforce/__init__.py b/airbyte-integrations/connectors/source-senseforce/source_senseforce/__init__.py new file mode 100644 index 0000000000000..1f67b7f1427da --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/source_senseforce/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceSenseforce + +__all__ = ["SourceSenseforce"] diff --git a/airbyte-integrations/connectors/source-senseforce/source_senseforce/schemas/dataset.json b/airbyte-integrations/connectors/source-senseforce/source_senseforce/schemas/dataset.json new file mode 100644 index 0000000000000..ec97dd7409526 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/source_senseforce/schemas/dataset.json @@ -0,0 +1,19 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "additionalProperties": true, + "properties": { + "timestamp": { + "type": "integer" + }, + "thing": { + "type": "string" + }, + "id": { + "type": ["null", "string"] + }, + "airbyte_cursor": { + "type": "number" + } + } +} diff --git a/airbyte-integrations/connectors/source-senseforce/source_senseforce/senseforce.yaml b/airbyte-integrations/connectors/source-senseforce/source_senseforce/senseforce.yaml new file mode 100644 index 0000000000000..d1a6db08a29ee --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/source_senseforce/senseforce.yaml @@ -0,0 +1,80 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + + requester: + # url_base: "http://localhost:8080" + url_base: "{{ config['backend_url'] }}" + http_method: "POST" + request_options_provider: + request_body_data: | + [{"clause": {"type": "timestamp", "operator": 10, "parameters": + [{"value": {{ stream_slice['start_time'] | int * 1000 }} }, + {"value": {{ stream_slice['end_time'] | int * 1000 + (86400000 - 1) }} } + ] + + }, "orderBy": 1, "columnName": "Timestamp"}]/ + request_headers: + Content-Type: application/json + authenticator: + type: BearerAuthenticator + api_token: "{{ config['access_token'] }}" + + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ now_utc() }}" + datetime_format: "%Y-%m-%d %H:%M:%S.%f+00:00" + step: "100d" #TODO: Add {{ config['slice_range'] ~ d }} here, once it's possible to use config-values for step definition + datetime_format: "%s" + cursor_field: "{{ options['stream_cursor_field'] }}" + + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + pagination_strategy: + type: "OffsetIncrement" + page_size: 10000 + page_token_option: + field_name: "offset" + inject_into: "request_parameter" + stream_slicer: + $ref: "*ref(definitions.stream_slicer)" + requester: + $ref: "*ref(definitions.requester)" + + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + dataset_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "dataset" + primary_key: + - "id" + path: "/api/dataset/execute/{{ config['dataset_id']}}" + stream_cursor_field: "airbyte_cursor" + transformations: + - type: AddFields + fields: + - path: ["airbyte_cursor"] + value: "{{ record['timestamp'] | int / 1000 }}" + +streams: + - "*ref(definitions.dataset_stream)" + +check: + stream_names: + - "dataset" diff --git a/airbyte-integrations/connectors/source-senseforce/source_senseforce/source.py b/airbyte-integrations/connectors/source-senseforce/source_senseforce/source.py new file mode 100644 index 0000000000000..26d5c1ee8bd1c --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/source_senseforce/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceSenseforce(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "senseforce.yaml"}) diff --git a/airbyte-integrations/connectors/source-senseforce/source_senseforce/spec.yaml b/airbyte-integrations/connectors/source-senseforce/source_senseforce/spec.yaml new file mode 100644 index 0000000000000..fcd893f70a826 --- /dev/null +++ b/airbyte-integrations/connectors/source-senseforce/source_senseforce/spec.yaml @@ -0,0 +1,69 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/senseforce +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Senseforce Source Spec + type: object + required: + - access_token + - backend_url + - dataset_id + - start_date + additionalProperties: true + properties: + access_token: + type: string + title: API Access Token + description: >- + Your API access token. See here. + The toke is case sensitive. + airbyte_secret: true + backend_url: + type: string + title: Senseforce backend URL + examples: + - "https://galaxyapi.senseforce.io" + description: >- + Your Senseforce API backend URL. This is the URL shown during the Login screen. See here + for more details. + (Note: Most Senseforce backend APIs have the term 'galaxy' in their ULR) + dataset_id: + type: string + title: Dataset ID + examples: + - 8f418098-ca28-4df5-9498-0df9fe78eda7 + description: >- + The ID of the dataset you want to synchronize. The ID can be found in the URL when opening the dataset. + See here + for more details. + (Note: As the Senseforce API only allows to synchronize a specific dataset, each dataset you + want to synchronize needs to be implemented as a separate airbyte source). + start_date: + type: string + title: The first day (in UTC) when to read data from. + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + description: >- + UTC date and time in the format 2017-01-25. Only data with "Timestamp" + after this date will be replicated. + Important note: This start date must be set to the first day of where your dataset provides data. + If your dataset has data from 2020-10-10 10:21:10, set the start_date to 2020-10-10 or later + examples: + - "2017-01-25" + slice_range: + type: integer + title: Data request time increment in days + default: 10 + minimum: 1 + maximum: 365 + examples: [1, 3, 10, 30, 180, 360] + airbyte_hidden: true + description: >- + The time increment used by the connector when requesting data from the Senseforce API. The bigger the value is, + the less requests will be made and faster the sync will be. On the other hand, the more seldom + the state is persisted and the more likely one could run into rate limites. + Furthermore, consider that large chunks of time might take a long time for the Senseforce query to return + data - meaning it could take in effect longer than with more smaller time slices. + If there are a lot of data per day, set this setting to 1. If there is only very little data per day, you might + change the setting to 10 or more. diff --git a/airbyte-integrations/connectors/source-sftp/acceptance-test-config.yml b/airbyte-integrations/connectors/source-sftp/acceptance-test-config.yml new file mode 100644 index 0000000000000..6af33aacf9869 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-sftp:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-sftp/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-sftp/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-sftp/build.gradle b/airbyte-integrations/connectors/source-sftp/build.gradle index 410e4f1b8dfa2..501383230d395 100644 --- a/airbyte-integrations/connectors/source-sftp/build.gradle +++ b/airbyte-integrations/connectors/source-sftp/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { diff --git a/airbyte-integrations/connectors/source-sftp/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-sftp/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..ed5c98b1ed4e6 --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/dummy_config.json @@ -0,0 +1,5 @@ +{ + "host": "default", + "port": 5555, + "user": "default" +} diff --git a/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..fef8e5b343fbf --- /dev/null +++ b/airbyte-integrations/connectors/source-sftp/src/test-integration/resources/expected_spec.json @@ -0,0 +1,109 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/source/sftp", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "SFTP Source Spec", + "type": "object", + "required": ["user", "host", "port"], + "additionalProperties": true, + "properties": { + "user": { + "title": "User Name", + "description": "The server user", + "type": "string", + "order": 0 + }, + "host": { + "title": "Host Address", + "description": "The server host address", + "type": "string", + "examples": ["www.host.com", "192.0.2.1"], + "order": 1 + }, + "port": { + "title": "Port", + "description": "The server port", + "type": "integer", + "default": 22, + "examples": ["22"], + "order": 2 + }, + "credentials": { + "type": "object", + "title": "Authentication", + "description": "The server authentication method", + "order": 3, + "oneOf": [ + { + "title": "Password Authentication", + "required": ["auth_method", "auth_user_password"], + "properties": { + "auth_method": { + "description": "Connect through password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "auth_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 1 + } + } + }, + { + "title": "SSH Key Authentication", + "required": ["auth_method", "auth_ssh_key"], + "properties": { + "auth_method": { + "description": "Connect through ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "auth_ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 1 + } + } + } + ] + }, + "file_types": { + "title": "File types", + "description": "Coma separated file types. Currently only 'csv' and 'json' types are supported.", + "type": "string", + "default": "csv,json", + "order": 4, + "examples": ["csv,json", "csv"] + }, + "folder_path": { + "title": "Folder Path", + "description": "The directory to search files for sync", + "type": "string", + "default": "", + "examples": ["/logs/2022"], + "order": 5 + }, + "file_pattern": { + "title": "File Pattern", + "description": "The regular expression to specify files for sync in a chosen Folder Path", + "type": "string", + "default": "", + "examples": [ + "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + ], + "order": 6 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-smaily/.dockerignore b/airbyte-integrations/connectors/source-smaily/.dockerignore new file mode 100644 index 0000000000000..3e910911bfed3 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_smaily +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-smaily/Dockerfile b/airbyte-integrations/connectors/source-smaily/Dockerfile new file mode 100644 index 0000000000000..4eed4aa5d4615 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_smaily ./source_smaily + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-smaily diff --git a/airbyte-integrations/connectors/source-smaily/README.md b/airbyte-integrations/connectors/source-smaily/README.md new file mode 100644 index 0000000000000..3d33800341a98 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/README.md @@ -0,0 +1,79 @@ +# Smaily Source + +This is the repository for the Smaily configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/smaily). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-smaily:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/smaily) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_smaily/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source smaily test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-smaily:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-smaily:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-smaily:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-smaily:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-smaily:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-smaily:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-smaily:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-smaily:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-smaily/__init__.py b/airbyte-integrations/connectors/source-smaily/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-smaily/acceptance-test-config.yml b/airbyte-integrations/connectors/source-smaily/acceptance-test-config.yml new file mode 100644 index 0000000000000..7e19a792a8c15 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-smaily:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_smaily/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-smaily/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-smaily/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-smaily/build.gradle b/airbyte-integrations/connectors/source-smaily/build.gradle new file mode 100644 index 0000000000000..80cc96d71fa22 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_smaily' +} diff --git a/airbyte-integrations/connectors/source-smaily/integration_tests/__init__.py b/airbyte-integrations/connectors/source-smaily/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-smaily/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-smaily/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-smaily/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-smaily/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..32b2a0fda33c6 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/integration_tests/configured_catalog.json @@ -0,0 +1,58 @@ +{ + "streams": [ + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "segments", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "campaigns", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "templates", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "automations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "ab_tests", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-smaily/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-smaily/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..29439fa04279f --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "api_subdomain": "invalid_domain", + "api_username": "invalid_username", + "api_password": "invalid_password" +} diff --git a/airbyte-integrations/connectors/source-smaily/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-smaily/integration_tests/sample_config.json new file mode 100644 index 0000000000000..6239900e68d6d --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "api_subdomain": "ea6l2xfv", + "api_username": "uvzmmg", + "api_password": "" +} diff --git a/airbyte-integrations/connectors/source-smaily/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-smaily/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-smaily/main.py b/airbyte-integrations/connectors/source-smaily/main.py new file mode 100644 index 0000000000000..d9d758860f7e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_smaily import SourceSmaily + +if __name__ == "__main__": + source = SourceSmaily() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-smaily/requirements.txt b/airbyte-integrations/connectors/source-smaily/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-smaily/setup.py b/airbyte-integrations/connectors/source-smaily/setup.py new file mode 100644 index 0000000000000..5193a5b2eb6dd --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_smaily", + description="Source implementation for Smaily.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/__init__.py b/airbyte-integrations/connectors/source-smaily/source_smaily/__init__.py new file mode 100644 index 0000000000000..4a74fb5c7ad29 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceSmaily + +__all__ = ["SourceSmaily"] diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/ab_tests.json b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/ab_tests.json new file mode 100644 index 0000000000000..b835874ee5a37 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/ab_tests.json @@ -0,0 +1,55 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": ["string", "null"] + }, + "sections": { + "type": ["array", "null"], + "items": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "subject": { + "type": ["string", "null"] + }, + "template": { + "type": ["object", "null"], + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": ["string", "null"] + }, + "preview_url": { + "type": ["string", "null"] + } + } + } + } + } + }, + "tags": { + "type": ["array", "null"], + "items": { + "type": ["string", "null"] + } + }, + "created_at": { + "type": ["string", "null"] + }, + "completed_at": { + "type": ["string", "null"] + }, + "status": { + "type": ["string", "null"] + } + }, + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/automations.json b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/automations.json new file mode 100644 index 0000000000000..28d71420c3cda --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/automations.json @@ -0,0 +1,40 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": ["string", "null"] + }, + "sections": { + "type": ["array", "null"], + "items": { + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "subject": { + "type": ["string", "null"] + }, + "template": { + "type": ["object", "null"], + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": ["string", "null"] + }, + "preview_url": { + "type": ["string", "null"] + } + } + } + } + } + } + }, + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/campaigns.json b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/campaigns.json new file mode 100644 index 0000000000000..d2c8de14fda08 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/campaigns.json @@ -0,0 +1,41 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": ["string", "null"] + }, + "template": { + "type": ["object", "null"], + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": ["string", "null"] + }, + "preview_url": { + "type": ["string", "null"] + } + } + }, + "tags": { + "type": ["array", "null"], + "items": { + "type": "string" + } + }, + "created_at": { + "type": ["string", "null"] + }, + "completed_at": { + "type": ["string", "null"] + }, + "status": { + "type": ["string", "null"] + } + }, + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/segments.json b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/segments.json new file mode 100644 index 0000000000000..04043e887136f --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/segments.json @@ -0,0 +1,15 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": ["string", "null"] + }, + "subscribers_count": { + "type": ["integer", "null"] + } + }, + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/templates.json b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/templates.json new file mode 100644 index 0000000000000..8fcf966293335 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/templates.json @@ -0,0 +1,24 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "type": { + "type": ["string", "null"] + }, + "name": { + "type": ["string", "null"] + }, + "created_at": { + "type": ["string", "null"] + }, + "modified_at": { + "type": ["string", "null"] + }, + "preview_url": { + "type": ["string", "null"] + } + }, + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/users.json b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/users.json new file mode 100644 index 0000000000000..5171e1eb86025 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/schemas/users.json @@ -0,0 +1,32 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "integer" + }, + "email": { + "type": ["string", "null"] + }, + "first_name": { + "type": ["string", "null"] + }, + "last_name": { + "type": ["string", "null"] + }, + "is_owner": { + "type": ["boolean", "null"] + }, + "role": { + "type": ["object", "null"], + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": ["string", "null"] + } + } + } + }, + "required": ["id"] +} diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/smaily.yaml b/airbyte-integrations/connectors/source-smaily/source_smaily/smaily.yaml new file mode 100644 index 0000000000000..e52dec4b7dc00 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/smaily.yaml @@ -0,0 +1,99 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://{{config['api_subdomain']}}.sendsmaily.net/api" + http_method: "GET" + authenticator: + type: BasicHttpAuthenticator + username: "{{config['api_username']}}" + password: "{{config['api_password']}}" + increment_paginator: + type: DefaultPaginator + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + pagination_strategy: + type: PageIncrement + page_size: 250 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + # API Docs: https://smaily.com/help/api/organizations/list-users-of-an-organization/ + users_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "users" + primary_key: "id" + path: "/organizations/users.php" + retriever: + $ref: "*ref(definitions.retriever)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + # API Docs: https://smaily.com/help/api/segments/list-segments/ + segments_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "segments" + primary_key: "id" + path: "/list.php" + # API Docs: https://smaily.com/help/api/campaigns-3/list-campaigns/ + campaigns_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "campaigns" + primary_key: "id" + path: "/campaign.php" + # API Docs: https://smaily.com/help/api/templates-2/list-templates/ + templates_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "templates" + primary_key: "id" + path: "/templates.php" + retriever: + $ref: "*ref(definitions.retriever)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + pagination_strategy: + type: PageIncrement + page_size: 1000 + # API Docs: https://smaily.com/help/api/automations-2/list-automation-workflows/ + automations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "automations" + primary_key: "id" + path: "/autoresponder.php" + # API Docs: https://smaily.com/help/api/a-b-tests/list-a-b-tests/ + ab_tests_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "ab_tests" + primary_key: "id" + path: "/split.php" + +streams: + - "*ref(definitions.users_stream)" + - "*ref(definitions.segments_stream)" + - "*ref(definitions.campaigns_stream)" + - "*ref(definitions.templates_stream)" + - "*ref(definitions.automations_stream)" + - "*ref(definitions.ab_tests_stream)" + +check: + stream_names: ["users"] diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/source.py b/airbyte-integrations/connectors/source-smaily/source_smaily/source.py new file mode 100644 index 0000000000000..9104fe3575eed --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceSmaily(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "smaily.yaml"}) diff --git a/airbyte-integrations/connectors/source-smaily/source_smaily/spec.yaml b/airbyte-integrations/connectors/source-smaily/source_smaily/spec.yaml new file mode 100644 index 0000000000000..d22c139eb7096 --- /dev/null +++ b/airbyte-integrations/connectors/source-smaily/source_smaily/spec.yaml @@ -0,0 +1,24 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/smaily +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Smaily Spec + type: object + required: + - api_subdomain + - api_username + - api_password + additionalProperties: true + properties: + api_subdomain: + type: string + title: API Subdomain + description: API Subdomain. See https://smaily.com/help/api/general/create-api-user/ + api_username: + type: string + title: API User Username + description: API user username. See https://smaily.com/help/api/general/create-api-user/ + api_password: + type: string + title: API User Password + description: API user password. See https://smaily.com/help/api/general/create-api-user/ + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-smartengage/.dockerignore b/airbyte-integrations/connectors/source-smartengage/.dockerignore new file mode 100644 index 0000000000000..4dafb44b3d8db --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_smartengage +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-smartengage/Dockerfile b/airbyte-integrations/connectors/source-smartengage/Dockerfile new file mode 100644 index 0000000000000..4e42844d9de15 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_smartengage ./source_smartengage + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-smartengage diff --git a/airbyte-integrations/connectors/source-smartengage/README.md b/airbyte-integrations/connectors/source-smartengage/README.md new file mode 100644 index 0000000000000..c44708f3ea164 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/README.md @@ -0,0 +1,79 @@ +# Smartengage Source + +This is the repository for the Smartengage configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/smartengage). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-smartengage:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/smartengage) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_smartengage/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source smartengage test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-smartengage:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-smartengage:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-smartengage:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-smartengage:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-smartengage:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-smartengage:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-smartengage:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-smartengage:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-smartengage/__init__.py b/airbyte-integrations/connectors/source-smartengage/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-smartengage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-smartengage/acceptance-test-config.yml new file mode 100644 index 0000000000000..b9ea1506feece --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-smartengage:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_smartengage/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-smartengage/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-smartengage/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-smartengage/build.gradle b/airbyte-integrations/connectors/source-smartengage/build.gradle new file mode 100644 index 0000000000000..d8b668f76f858 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_smartengage' +} diff --git a/airbyte-integrations/connectors/source-smartengage/integration_tests/__init__.py b/airbyte-integrations/connectors/source-smartengage/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-smartengage/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-smartengage/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-smartengage/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-smartengage/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..05b05c2b9689c --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/integration_tests/configured_catalog.json @@ -0,0 +1,40 @@ +{ + "streams": [ + { + "stream": { + "name": "avatars", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "tags", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "custom_fields", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "sequences", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-smartengage/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-smartengage/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..efc25166ade22 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "invalid_key" +} diff --git a/airbyte-integrations/connectors/source-smartengage/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-smartengage/integration_tests/sample_config.json new file mode 100644 index 0000000000000..73112f5a25a73 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-smartengage/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-smartengage/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-smartengage/main.py b/airbyte-integrations/connectors/source-smartengage/main.py new file mode 100644 index 0000000000000..555c0bd4d9acb --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_smartengage import SourceSmartengage + +if __name__ == "__main__": + source = SourceSmartengage() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-smartengage/requirements.txt b/airbyte-integrations/connectors/source-smartengage/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-smartengage/setup.py b/airbyte-integrations/connectors/source-smartengage/setup.py new file mode 100644 index 0000000000000..35e2c31efbd9e --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_smartengage", + description="Source implementation for Smartengage.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-smartengage/source_smartengage/__init__.py b/airbyte-integrations/connectors/source-smartengage/source_smartengage/__init__.py new file mode 100644 index 0000000000000..7c5a3451ec084 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/source_smartengage/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceSmartengage + +__all__ = ["SourceSmartengage"] diff --git a/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/avatars.json b/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/avatars.json new file mode 100644 index 0000000000000..f2b5f2292f4d4 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/avatars.json @@ -0,0 +1,21 @@ +{ + "type": "object", + "properties": { + "brand_name": { + "type": ["string", "null"] + }, + "avatar_id": { + "type": "string" + }, + "brand_image": { + "type": ["string", "null"] + }, + "user_role": { + "type": ["string", "null"] + }, + "facebook_page_id": { + "type": ["string", "null"] + } + }, + "required": ["avatar_id"] +} diff --git a/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/custom_fields.json b/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/custom_fields.json new file mode 100644 index 0000000000000..1da4bf627c04d --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/custom_fields.json @@ -0,0 +1,12 @@ +{ + "type": "object", + "properties": { + "custom_field_id": { + "type": "string" + }, + "custom_field_name": { + "type": ["string", "null"] + } + }, + "required": ["custom_field_id"] +} diff --git a/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/sequences.json b/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/sequences.json new file mode 100644 index 0000000000000..1eb621766dd9b --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/sequences.json @@ -0,0 +1,12 @@ +{ + "type": "object", + "properties": { + "sequence_id": { + "type": "string" + }, + "sequence_name": { + "type": ["string", "null"] + } + }, + "required": ["sequence_id"] +} diff --git a/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/tags.json b/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/tags.json new file mode 100644 index 0000000000000..8cf7a13b4c72b --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/source_smartengage/schemas/tags.json @@ -0,0 +1,12 @@ +{ + "type": "object", + "properties": { + "tag_id": { + "type": "string" + }, + "tag_name": { + "type": ["string", "null"] + } + }, + "required": ["tag_id"] +} diff --git a/airbyte-integrations/connectors/source-smartengage/source_smartengage/smartengage.yaml b/airbyte-integrations/connectors/source-smartengage/source_smartengage/smartengage.yaml new file mode 100644 index 0000000000000..03f4f6332436b --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/source_smartengage/smartengage.yaml @@ -0,0 +1,93 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://api.smartengage.com" + http_method: "GET" + # API Docs: https://smartengage.com/docs/#authentication + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + # API Docs: https://smartengage.com/docs/#list-all-avatars + avatars_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "avatars" + primary_key: "avatar_id" + path: "/avatars/list" + avatars_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.avatars_stream)" + parent_key: "avatar_id" + stream_slice_field: "avatar_id" + # API Docs: https://smartengage.com/docs/#list-all-tags + tags_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tags" + primary_key: "tag_id" + path: "/tags/list" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + request_options_provider: + request_parameters: + avatar_id: "{{ stream_slice.avatar_id }}" + stream_slicer: + $ref: "*ref(definitions.avatars_stream_slicer)" + # API Docs: https://smartengage.com/docs/#list-all-custom-fields + custom_fields_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "custom_fields" + primary_key: "custom_field_id" + path: "/customfields/list" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + request_options_provider: + request_parameters: + avatar_id: "{{ stream_slice.avatar_id }}" + stream_slicer: + $ref: "*ref(definitions.avatars_stream_slicer)" + # API Docs: https://smartengage.com/docs/#list-all-sequences + sequences_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "sequences" + primary_key: "sequence_id" + path: "/sequences/list" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + request_options_provider: + request_parameters: + avatar_id: "{{ stream_slice.avatar_id }}" + stream_slicer: + $ref: "*ref(definitions.avatars_stream_slicer)" + +streams: + - "*ref(definitions.avatars_stream)" + - "*ref(definitions.tags_stream)" + - "*ref(definitions.custom_fields_stream)" + - "*ref(definitions.sequences_stream)" + +check: + stream_names: ["tags"] diff --git a/airbyte-integrations/connectors/source-smartengage/source_smartengage/source.py b/airbyte-integrations/connectors/source-smartengage/source_smartengage/source.py new file mode 100644 index 0000000000000..7ba8a096b2a45 --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/source_smartengage/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceSmartengage(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "smartengage.yaml"}) diff --git a/airbyte-integrations/connectors/source-smartengage/source_smartengage/spec.yaml b/airbyte-integrations/connectors/source-smartengage/source_smartengage/spec.yaml new file mode 100644 index 0000000000000..575720a20349e --- /dev/null +++ b/airbyte-integrations/connectors/source-smartengage/source_smartengage/spec.yaml @@ -0,0 +1,14 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/smartengage +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: SmartEngage Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + title: API Key + type: string + description: API Key + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-snowflake/Dockerfile b/airbyte-integrations/connectors/source-snowflake/Dockerfile index 863444b58ab0f..b38b022ea00e2 100644 --- a/airbyte-integrations/connectors/source-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/source-snowflake/Dockerfile @@ -16,5 +16,5 @@ ENV APPLICATION source-snowflake COPY --from=build /airbyte /airbyte -LABEL io.airbyte.version=0.1.24 +LABEL io.airbyte.version=0.1.26 LABEL io.airbyte.name=airbyte/source-snowflake diff --git a/airbyte-integrations/connectors/source-snowflake/acceptance-test-config.yml b/airbyte-integrations/connectors/source-snowflake/acceptance-test-config.yml new file mode 100644 index 0000000000000..d359de152b5e2 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-snowflake:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-snowflake/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-snowflake/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-snowflake/build.gradle b/airbyte-integrations/connectors/source-snowflake/build.gradle index 909275a03402b..5be316625a599 100644 --- a/airbyte-integrations/connectors/source-snowflake/build.gradle +++ b/airbyte-integrations/connectors/source-snowflake/build.gradle @@ -2,10 +2,11 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { - mainClass = 'io.airbyte.integrations.source.snowflake.SnowflakeSource' + mainClass = 'io.airbyte.integrations.source.snowflake.SnowflakeSourceRunner' applicationDefaultJvmArgs = ['-XX:+ExitOnOutOfMemoryError', '-XX:MaxRAMPercentage=75.0'] } diff --git a/airbyte-integrations/connectors/source-snowflake/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-snowflake/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java index e0e6f1a1d5fdf..d9e45feee5651 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeDataSourceUtils.java @@ -34,8 +34,10 @@ public class SnowflakeDataSourceUtils { public static final String OAUTH_METHOD = "OAuth"; public static final String USERNAME_PASSWORD_METHOD = "username/password"; public static final String UNRECOGNIZED = "Unrecognized"; + public static final String AIRBYTE_OSS = "airbyte_oss"; + public static final String AIRBYTE_CLOUD = "airbyte_cloud"; private static final String JDBC_CONNECTION_STRING = - "role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s&application=Airbyte_Connector"; + "role=%s&warehouse=%s&database=%s&schema=%s&JDBC_QUERY_RESULT_FORMAT=%s&CLIENT_SESSION_KEEP_ALIVE=%s&application=%s"; private static final Logger LOGGER = LoggerFactory.getLogger(SnowflakeDataSourceUtils.class); private static final int PAUSE_BETWEEN_TOKEN_REFRESH_MIN = 7; // snowflake access token's TTL is 10min and can't be modified @@ -53,9 +55,9 @@ public class SnowflakeDataSourceUtils { * @param config source config JSON * @return datasource */ - public static HikariDataSource createDataSource(final JsonNode config) { + public static HikariDataSource createDataSource(final JsonNode config, final String airbyteEnvironment) { final HikariDataSource dataSource = new HikariDataSource(); - dataSource.setJdbcUrl(buildJDBCUrl(config)); + dataSource.setJdbcUrl(buildJDBCUrl(config, airbyteEnvironment)); if (config.has("credentials")) { final JsonNode credentials = config.get("credentials"); @@ -130,7 +132,7 @@ public static String getAccessTokenUsingRefreshToken(final String hostName, } } - public static String buildJDBCUrl(final JsonNode config) { + public static String buildJDBCUrl(final JsonNode config, final String airbyteEnvironment) { final StringBuilder jdbcUrl = new StringBuilder(String.format("jdbc:snowflake://%s/?", config.get(JdbcUtils.HOST_KEY).asText())); @@ -143,7 +145,8 @@ public static String buildJDBCUrl(final JsonNode config) { // Needed for JDK17 - see // https://stackoverflow.com/questions/67409650/snowflake-jdbc-driver-internal-error-fail-to-retrieve-row-count-for-first-arrow "JSON", - true)); + true, + airbyteEnvironment)); // https://docs.snowflake.com/en/user-guide/jdbc-configure.html#jdbc-driver-connection-string if (config.has(JdbcUtils.JDBC_URL_PARAMS_KEY)) { diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java index fc7a755390221..ddb3a3110e357 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSource.java @@ -16,7 +16,6 @@ import io.airbyte.db.jdbc.JdbcUtils; import io.airbyte.db.jdbc.StreamingJdbcDatabase; import io.airbyte.db.jdbc.streaming.AdaptiveStreamingQueryConfig; -import io.airbyte.integrations.base.IntegrationRunner; import io.airbyte.integrations.base.Source; import io.airbyte.integrations.source.jdbc.AbstractJdbcSource; import java.io.IOException; @@ -37,16 +36,11 @@ public class SnowflakeSource extends AbstractJdbcSource implements Sou public static final String DRIVER_CLASS = DatabaseDriver.SNOWFLAKE.getDriverClassName(); public static final ScheduledExecutorService SCHEDULED_EXECUTOR_SERVICE = Executors.newScheduledThreadPool(1); - public SnowflakeSource() { - super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, new SnowflakeSourceOperations()); - } + private final String airbyteEnvironment; - public static void main(final String[] args) throws Exception { - final Source source = new SnowflakeSource(); - LOGGER.info("starting source: {}", SnowflakeSource.class); - new IntegrationRunner(source).run(args); - SCHEDULED_EXECUTOR_SERVICE.shutdownNow(); - LOGGER.info("completed source: {}", SnowflakeSource.class); + public SnowflakeSource(final String airbyteEnvironment) { + super(DRIVER_CLASS, AdaptiveStreamingQueryConfig::new, new SnowflakeSourceOperations()); + this.airbyteEnvironment = airbyteEnvironment; } @Override @@ -59,14 +53,14 @@ public JdbcDatabase createDatabase(final JsonNode config) throws SQLException { @Override protected DataSource createDataSource(final JsonNode config) { - final DataSource dataSource = SnowflakeDataSourceUtils.createDataSource(config); + final DataSource dataSource = SnowflakeDataSourceUtils.createDataSource(config, airbyteEnvironment); dataSources.add(dataSource); return dataSource; } @Override public JsonNode toDatabaseConfig(final JsonNode config) { - final String jdbcUrl = SnowflakeDataSourceUtils.buildJDBCUrl(config); + final String jdbcUrl = SnowflakeDataSourceUtils.buildJDBCUrl(config, airbyteEnvironment); if (config.has("credentials")) { final JsonNode credentials = config.get("credentials"); diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSourceRunner.java b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSourceRunner.java new file mode 100644 index 0000000000000..ead0e5dc735e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/main/java/io.airbyte.integrations.source.snowflake/SnowflakeSourceRunner.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.source.snowflake; + +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.AIRBYTE_CLOUD; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.AIRBYTE_OSS; +import static io.airbyte.integrations.source.snowflake.SnowflakeSource.SCHEDULED_EXECUTOR_SERVICE; + +import io.airbyte.integrations.base.adaptive.AdaptiveSourceRunner; + +public class SnowflakeSourceRunner { + + public static void main(final String[] args) throws Exception { + AdaptiveSourceRunner.baseOnEnv() + .withOssSource(() -> new SnowflakeSource(AIRBYTE_OSS)) + .withCloudSource(() -> new SnowflakeSource(AIRBYTE_CLOUD)) + .run(args); + SCHEDULED_EXECUTOR_SERVICE.shutdownNow(); + } + +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json index 40c912a340ba5..796d4572c16f6 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json +++ b/airbyte-integrations/connectors/source-snowflake/src/main/resources/spec.json @@ -19,7 +19,6 @@ "auth_type": { "type": "string", "const": "OAuth", - "default": "OAuth", "order": 0 }, "client_id": { @@ -61,7 +60,6 @@ "auth_type": { "type": "string", "const": "username/password", - "default": "username/password", "order": 0 }, "username": { diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java index c3e21bbd75ef0..c0c066cc662f5 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/java/io/airbyte/integrations/io/airbyte/integration_tests/sources/SnowflakeJdbcSourceAcceptanceTest.java @@ -4,6 +4,7 @@ package io.airbyte.integrations.io.airbyte.integration_tests.sources; +import static io.airbyte.integrations.source.snowflake.SnowflakeDataSourceUtils.AIRBYTE_OSS; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -102,7 +103,7 @@ public String getDriverClass() { @Override public AbstractJdbcSource getJdbcSource() { - return new SnowflakeSource(); + return new SnowflakeSource(AIRBYTE_OSS); } @Test diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..ed5c98b1ed4e6 --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/dummy_config.json @@ -0,0 +1,5 @@ +{ + "host": "default", + "port": 5555, + "user": "default" +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..282a585ffe6fe --- /dev/null +++ b/airbyte-integrations/connectors/source-snowflake/src/test-integration/resources/expected_spec.json @@ -0,0 +1,187 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/snowflake", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Snowflake Source Spec", + "type": "object", + "required": ["host", "role", "warehouse", "database", "schema"], + "properties": { + "credentials": { + "title": "Authorization Method", + "type": "object", + "oneOf": [ + { + "type": "object", + "title": "OAuth2.0", + "order": 0, + "required": ["client_id", "client_secret", "auth_type"], + "properties": { + "auth_type": { + "type": "string", + "const": "OAuth", + "order": 0 + }, + "client_id": { + "type": "string", + "title": "Client ID", + "description": "The Client ID of your Snowflake developer application.", + "airbyte_secret": true, + "order": 1 + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "The Client Secret of your Snowflake developer application.", + "airbyte_secret": true, + "order": 2 + }, + "access_token": { + "type": "string", + "title": "Access Token", + "description": "Access Token for making authenticated requests.", + "airbyte_secret": true, + "order": 3 + }, + "refresh_token": { + "type": "string", + "title": "Refresh Token", + "description": "Refresh Token for making authenticated requests.", + "airbyte_secret": true, + "order": 4 + } + } + }, + { + "title": "Username and Password", + "type": "object", + "required": ["username", "password", "auth_type"], + "order": 1, + "properties": { + "auth_type": { + "type": "string", + "const": "username/password", + "order": 0 + }, + "username": { + "description": "The username you created to allow Airbyte to access the database.", + "examples": ["AIRBYTE_USER"], + "type": "string", + "title": "Username", + "order": 1 + }, + "password": { + "description": "The password associated with the username.", + "type": "string", + "airbyte_secret": true, + "title": "Password", + "order": 2 + } + } + } + ], + "order": 0 + }, + "host": { + "description": "The host domain of the snowflake instance (must include the account, region, cloud environment, and end with snowflakecomputing.com).", + "examples": ["accountname.us-east-2.aws.snowflakecomputing.com"], + "type": "string", + "title": "Account Name", + "order": 1 + }, + "role": { + "description": "The role you created for Airbyte to access Snowflake.", + "examples": ["AIRBYTE_ROLE"], + "type": "string", + "title": "Role", + "order": 2 + }, + "warehouse": { + "description": "The warehouse you created for Airbyte to access data.", + "examples": ["AIRBYTE_WAREHOUSE"], + "type": "string", + "title": "Warehouse", + "order": 3 + }, + "database": { + "description": "The database you created for Airbyte to access data.", + "examples": ["AIRBYTE_DATABASE"], + "type": "string", + "title": "Database", + "order": 4 + }, + "schema": { + "description": "The source Snowflake schema tables.", + "examples": ["AIRBYTE_SCHEMA"], + "type": "string", + "title": "Schema", + "order": 5 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).", + "title": "JDBC URL Params", + "type": "string", + "order": 6 + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [], + "advanced_auth": { + "auth_flow_type": "oauth2.0", + "predicate_key": ["credentials", "auth_type"], + "predicate_value": "OAuth", + "oauth_config_specification": { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "properties": { + "host": { + "type": "string", + "path_in_connector_config": ["host"] + }, + "role": { + "type": "string", + "path_in_connector_config": ["role"] + } + } + }, + "complete_oauth_output_specification": { + "type": "object", + "properties": { + "access_token": { + "type": "string", + "path_in_connector_config": ["credentials", "access_token"] + }, + "refresh_token": { + "type": "string", + "path_in_connector_config": ["credentials", "refresh_token"] + } + } + }, + "complete_oauth_server_input_specification": { + "type": "object", + "properties": { + "client_id": { + "type": "string" + }, + "client_secret": { + "type": "string" + } + } + }, + "complete_oauth_server_output_specification": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "path_in_connector_config": ["credentials", "client_id"] + }, + "client_secret": { + "type": "string", + "path_in_connector_config": ["credentials", "client_secret"] + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java b/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java index 5e38210095125..1005718daad4a 100644 --- a/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java +++ b/airbyte-integrations/connectors/source-snowflake/src/test/java/io/airbyte/integrations/source/snowflake/SnowflakeDataSourceUtilsTest.java @@ -30,24 +30,24 @@ class SnowflakeDataSourceUtilsTest { } """; private final String expectedJdbcUrl = - "jdbc:snowflake://host/?role=role&warehouse=WAREHOUSE&database=DATABASE&schema=SOURCE_SCHEMA&JDBC_QUERY_RESULT_FORMAT=JSON&CLIENT_SESSION_KEEP_ALIVE=true&application=Airbyte_Connector"; + "jdbc:snowflake://host/?role=role&warehouse=WAREHOUSE&database=DATABASE&schema=SOURCE_SCHEMA&JDBC_QUERY_RESULT_FORMAT=JSON&CLIENT_SESSION_KEEP_ALIVE=true&application=airbyte_oss"; @Test void testBuildJDBCUrl() { - JsonNode expectedConfig = Jsons.deserialize(config); + final JsonNode expectedConfig = Jsons.deserialize(config); - String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig); + final String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig, SnowflakeDataSourceUtils.AIRBYTE_OSS); assertEquals(expectedJdbcUrl, jdbcURL); } @Test void testBuildJDBCUrlWithParams() { - JsonNode expectedConfig = Jsons.deserialize(config); - String params = "someParameter1¶m2=someParameter2"; + final JsonNode expectedConfig = Jsons.deserialize(config); + final String params = "someParameter1¶m2=someParameter2"; ((ObjectNode) expectedConfig).put("jdbc_url_params", params); - String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig); + final String jdbcURL = SnowflakeDataSourceUtils.buildJDBCUrl(expectedConfig, SnowflakeDataSourceUtils.AIRBYTE_OSS); assertEquals(expectedJdbcUrl + "&" + params, jdbcURL); } diff --git a/airbyte-integrations/connectors/source-statuspage/.dockerignore b/airbyte-integrations/connectors/source-statuspage/.dockerignore new file mode 100644 index 0000000000000..5d03d864c2c4d --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_statuspage +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-statuspage/Dockerfile b/airbyte-integrations/connectors/source-statuspage/Dockerfile new file mode 100644 index 0000000000000..51b1856444aa0 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_statuspage ./source_statuspage + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-statuspage diff --git a/airbyte-integrations/connectors/source-statuspage/README.md b/airbyte-integrations/connectors/source-statuspage/README.md new file mode 100644 index 0000000000000..4e7e016af822f --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/README.md @@ -0,0 +1,79 @@ +# Statuspage Source + +This is the repository for the Statuspage configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/statuspage). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-statuspage:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/statuspage) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_statuspage/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source statuspage test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-statuspage:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-statuspage:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-statuspage:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-statuspage:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-statuspage:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-statuspage:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-statuspage:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-statuspage:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-statuspage/__init__.py b/airbyte-integrations/connectors/source-statuspage/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-statuspage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-statuspage/acceptance-test-config.yml new file mode 100644 index 0000000000000..4045edb3e839c --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/acceptance-test-config.yml @@ -0,0 +1,43 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-statuspage:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_statuspage/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: + - name: metrics + bypass_reason: "This stream can't be seeded in our sandbox account" + - name: subscribers + bypass_reason: "This stream can't be seeded in our sandbox account" + +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-statuspage/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-statuspage/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-statuspage/build.gradle b/airbyte-integrations/connectors/source-statuspage/build.gradle new file mode 100644 index 0000000000000..d12fe827a8039 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_statuspage' +} diff --git a/airbyte-integrations/connectors/source-statuspage/integration_tests/__init__.py b/airbyte-integrations/connectors/source-statuspage/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-statuspage/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-statuspage/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-statuspage/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-statuspage/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-statuspage/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-statuspage/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..4583cca85fda4 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/integration_tests/configured_catalog.json @@ -0,0 +1,67 @@ +{ + "streams": [ + { + "stream": { + "name": "pages", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "subscribers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "subscribers_histogram_by_state", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "incident_templates", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "incidents", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "components", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "metrics", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-statuspage/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-statuspage/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..d0ce30f189af7 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "" +} diff --git a/airbyte-integrations/connectors/source-statuspage/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-statuspage/integration_tests/sample_config.json new file mode 100644 index 0000000000000..cf846744f5073 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "api_key": "76d3af6d-85dc-42db-adcc-7e9ec336e234" +} diff --git a/airbyte-integrations/connectors/source-statuspage/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-statuspage/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-statuspage/main.py b/airbyte-integrations/connectors/source-statuspage/main.py new file mode 100644 index 0000000000000..7ee1bd7681718 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_statuspage import SourceStatuspage + +if __name__ == "__main__": + source = SourceStatuspage() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-statuspage/requirements.txt b/airbyte-integrations/connectors/source-statuspage/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-statuspage/setup.py b/airbyte-integrations/connectors/source-statuspage/setup.py new file mode 100644 index 0000000000000..2e30767f7b8d7 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_statuspage", + description="Source implementation for Statuspage.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/__init__.py b/airbyte-integrations/connectors/source-statuspage/source_statuspage/__init__.py new file mode 100644 index 0000000000000..85047deeaa776 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceStatuspage + +__all__ = ["SourceStatuspage"] diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/components.json b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/components.json new file mode 100644 index 0000000000000..4945c1154b251 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/components.json @@ -0,0 +1,73 @@ +{ + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "description": "Identifier for component" + }, + "page_id": { + "type": ["null", "string"], + "description": "Page identifier" + }, + "group_id": { + "type": ["null", "string"], + "description": "Component Group identifier" + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "group": { + "type": ["null", "boolean"], + "description": "Is this component a group" + }, + "name": { + "type": ["null", "string"], + "description": "Display name for component" + }, + "description": { + "type": ["null", "string"], + "description": "More detailed description for component" + }, + "position": { + "type": "integer", + "format": "int32", + "description": "Order the component will appear on the page" + }, + "status": { + "type": ["null", "string"], + "enum": [ + "operational", + "under_maintenance", + "degraded_performance", + "partial_outage", + "major_outage", + "" + ], + "description": "Status of component" + }, + "showcase": { + "type": ["null", "boolean"], + "description": "Should this component be showcased" + }, + "only_show_if_degraded": { + "type": ["null", "boolean"], + "description": "Requires a special feature flag to be enabled" + }, + "automation_email": { + "type": ["null", "string"], + "description": "Requires a special feature flag to be enabled" + }, + "start_date": { + "type": ["null", "string"], + "format": "date", + "description": "The date this component started being used" + } + }, + "description": "Add page access groups to a component", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/incident_templates.json b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/incident_templates.json new file mode 100644 index 0000000000000..789b4a0958588 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/incident_templates.json @@ -0,0 +1,118 @@ +{ + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "example": "p08qyqtz4fnq", + "description": "Incident Template Identifier" + }, + "components": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "description": "Identifier for component" + }, + "page_id": { + "type": ["null", "string"], + "description": "Page identifier" + }, + "group_id": { + "type": ["null", "string"], + "description": "Component Group identifier" + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "group": { + "type": ["null", "boolean"], + "description": "Is this component a group" + }, + "name": { + "type": ["null", "string"], + "description": "Display name for component" + }, + "description": { + "type": ["null", "string"], + "description": "More detailed description for component" + }, + "position": { + "type": "integer", + "format": "int32", + "description": "Order the component will appear on the page" + }, + "status": { + "type": ["null", "string"], + "enum": [ + "operational", + "under_maintenance", + "degraded_performance", + "partial_outage", + "major_outage", + "" + ], + "description": "Status of component" + }, + "showcase": { + "type": ["null", "boolean"], + "description": "Should this component be showcased" + }, + "only_show_if_degraded": { + "type": ["null", "boolean"], + "description": "Requires a special feature flag to be enabled" + }, + "automation_email": { + "type": ["null", "string"], + "description": "Requires a special feature flag to be enabled" + }, + "start_date": { + "type": ["null", "string"], + "format": "date", + "description": "The date this component started being used" + } + }, + "description": "Add page access groups to a component", + "$schema": "http://json-schema.org/schema#" + }, + "description": "Affected components" + }, + "name": { + "type": ["null", "string"], + "description": "Name of the template, as shown in the list on the \"Templates\" tab of the \"Incidents\" page" + }, + "title": { + "type": ["null", "string"], + "description": "Title to be applied to the incident or maintenance when selecting this template" + }, + "body": { + "type": ["null", "string"], + "description": "Body of the incident or maintenance update to be applied when selecting this template" + }, + "group_id": { + "type": ["null", "string"], + "example": "mntdyll6bk4c", + "description": "Identifier of Template Group this template belongs to" + }, + "update_status": { + "type": ["null", "string"], + "description": "The status the incident or maintenance should transition to when selecting this template" + }, + "should_tweet": { + "type": ["null", "boolean"], + "description": "Whether the \"tweet update\" checkbox should be selected when selecting this template" + }, + "should_send_notifications": { + "type": ["null", "boolean"], + "description": "Whether the \"deliver notifications\" checkbox should be selected when selecting this template" + } + }, + "description": "Get a list of templates", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/incidents.json b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/incidents.json new file mode 100644 index 0000000000000..3d7ac9b8dd57c --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/incidents.json @@ -0,0 +1,306 @@ +{ + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "example": "p31zjtct2jer", + "description": "Incident Identifier" + }, + "components": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "description": "Identifier for component" + }, + "page_id": { + "type": ["null", "string"], + "description": "Page identifier" + }, + "group_id": { + "type": ["null", "string"], + "description": "Component Group identifier" + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "group": { + "type": ["null", "boolean"], + "description": "Is this component a group" + }, + "name": { + "type": ["null", "string"], + "description": "Display name for component" + }, + "description": { + "type": ["null", "string"], + "description": "More detailed description for component" + }, + "position": { + "type": "integer", + "format": "int32", + "description": "Order the component will appear on the page" + }, + "status": { + "type": ["null", "string"], + "enum": [ + "operational", + "under_maintenance", + "degraded_performance", + "partial_outage", + "major_outage", + "" + ], + "description": "Status of component" + }, + "showcase": { + "type": ["null", "boolean"], + "description": "Should this component be showcased" + }, + "only_show_if_degraded": { + "type": ["null", "boolean"], + "description": "Requires a special feature flag to be enabled" + }, + "automation_email": { + "type": ["null", "string"], + "description": "Requires a special feature flag to be enabled" + }, + "start_date": { + "type": ["null", "string"], + "format": "date", + "description": "The date this component started being used" + } + }, + "description": "Add page access groups to a component", + "$schema": "http://json-schema.org/schema#" + }, + "description": "Incident components" + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when the incident was created at." + }, + "impact": { + "type": ["null", "string"], + "example": "critical", + "enum": ["none", "maintenance", "minor", "major", "critical"], + "description": "The impact of the incident." + }, + "impact_override": { + "type": ["null", "object"], + "example": "minor", + "description": "value to override calculated impact value" + }, + "incident_updates": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "description": "Incident Update Identifier." + }, + "incident_id": { + "type": ["null", "string"], + "description": "Incident Identifier." + }, + "affected_components": { + "type": ["null", "array"], + "description": "Affected components associated with the incident update." + }, + "body": { + "type": ["null", "string"], + "description": "Incident update body." + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when the incident update was created at." + }, + "custom_tweet": { + "type": ["null", "string"], + "description": "An optional customized tweet message for incident postmortem." + }, + "deliver_notifications": { + "type": ["null", "boolean"], + "description": "Controls whether to delivery notifications." + }, + "display_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "Timestamp when incident update is happened." + }, + "status": { + "type": ["null", "string"], + "enum": [ + "investigating", + "identified", + "monitoring", + "resolved", + "scheduled", + "in_progress", + "verifying", + "completed" + ], + "description": "The incident status. For realtime incidents, valid values are investigating, identified, monitoring, and resolved. For scheduled incidents, valid values are scheduled, in_progress, verifying, and completed." + }, + "tweet_id": { + "type": ["null", "string"], + "description": "Tweet identifier associated to this incident update." + }, + "twitter_updated_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when twitter updated at." + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when the incident update is updated." + }, + "wants_twitter_update": { + "type": ["null", "boolean"], + "description": "Controls whether to create twitter update." + } + }, + "description": "Update a previous incident update", + "$schema": "http://json-schema.org/schema#" + }, + "description": "The incident updates for incident." + }, + "metadata": { + "type": "object", + "example": { + "jira": { + "issue_id": "value" + } + }, + "description": "Metadata attached to the incident. Top level values must be objects." + }, + "monitoring_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when incident entered monitoring state." + }, + "name": { + "type": ["null", "string"], + "example": "Data Layer Migration", + "description": "Incident Name" + }, + "page_id": { + "type": ["null", "string"], + "description": "Incident Page Identifier" + }, + "postmortem_body": { + "type": ["null", "string"], + "example": "##### Issue\nAt approximately 17:02 UTC on 2013-04-21, our master database server unexpectedly went unresponsive to all network.\nA reboot of the machine at 17:05 UTC resulted in a failed mount of a corrupted EBS volume, and we made the decision\nat that time to fail over the slave database.\n\n##### Resolution\nAt 17:12 UTC, the slave database had been successfully promoted to master and the application recovered enough to\naccept web traffic again. A new slave database node was created and placed into the rotation to guard against future\nmaster failures. The promoted slave database performed slowly for the next couple of hours as the query cache began\nto warm up, and eventually settled into a reasonable performance profile around 20:00 UTC.\n\n##### Future Mitigation Plans\nOver the past few months, we have been working on an overhaul to our data storage layer with a migration from a Postgres\nsetup to a distributed, fault-tolerant, multi-region data layer using Riak. This initiative has been prioritized, and\nthe migration will be performed in the coming weeks. We will notify our clients of the scheduled downtime via an\nincident on this status site, and via a blog post.\n", + "description": "Body of the Postmortem." + }, + "postmortem_body_last_updated_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when the incident postmortem body was last updated at." + }, + "postmortem_ignored": { + "type": ["null", "boolean"], + "description": "Controls whether the incident will have postmortem." + }, + "postmortem_notified_subscribers": { + "type": ["null", "boolean"], + "description": "Indicates whether subscribers are already notificed about postmortem." + }, + "postmortem_notified_twitter": { + "type": ["null", "boolean"], + "description": "Controls whether to decide if notify postmortem on twitter." + }, + "postmortem_published_at": { + "type": ["null", "boolean"], + "description": "The timestamp when the postmortem was published." + }, + "resolved_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when incident was resolved." + }, + "scheduled_auto_completed": { + "type": ["null", "boolean"], + "description": "Controls whether the incident is scheduled to automatically change to complete." + }, + "scheduled_auto_in_progress": { + "type": ["null", "boolean"], + "description": "Controls whether the incident is scheduled to automatically change to in progress." + }, + "scheduled_for": { + "type": ["null", "string"], + "format": "date-time", + "example": "2013-05-07T03:00:00.007Z", + "description": "The timestamp the incident is scheduled for." + }, + "auto_transition_deliver_notifications_at_end": { + "type": ["null", "boolean"], + "description": "Controls whether send notification when scheduled maintenances auto transition to completed." + }, + "auto_transition_deliver_notifications_at_start": { + "type": ["null", "boolean"], + "description": "Controls whether send notification when scheduled maintenances auto transition to started." + }, + "auto_transition_to_maintenance_state": { + "type": ["null", "boolean"], + "description": "Controls whether change components status to under_maintenance once scheduled maintenance is in progress." + }, + "auto_transition_to_operational_state": { + "type": ["null", "boolean"], + "description": "Controls whether change components status to operational once scheduled maintenance completes." + }, + "scheduled_remind_prior": { + "type": ["null", "boolean"], + "description": "Controls whether to remind subscribers prior to scheduled incidents." + }, + "scheduled_reminded_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when the scheduled incident reminder was sent at." + }, + "scheduled_until": { + "type": ["null", "string"], + "format": "date-time", + "example": "2013-05-07T06:00:00.007Z", + "description": "The timestamp the incident is scheduled until." + }, + "shortlink": { + "type": ["null", "string"], + "example": "http://stspg.io/803310a12", + "description": "Incident Shortlink." + }, + "status": { + "type": ["null", "string"], + "example": "scheduled", + "enum": [ + "investigating", + "identified", + "monitoring", + "resolved", + "scheduled", + "in_progress", + "verifying", + "completed" + ], + "description": "The incident status. For realtime incidents, valid values are investigating, identified, monitoring, and resolved. For scheduled incidents, valid values are scheduled, in_progress, verifying, and completed." + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when the incident was updated at." + } + }, + "description": "Get an incident", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/metrics.json b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/metrics.json new file mode 100644 index 0000000000000..e634a3d6d162a --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/metrics.json @@ -0,0 +1,76 @@ +{ + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "description": "Metric identifier" + }, + "metrics_provider_id": { + "type": ["null", "string"], + "description": "Metric Provider identifier" + }, + "metric_identifier": { + "type": ["null", "string"], + "description": "Metric Display identifier used to look up the metric data from the provider" + }, + "name": { + "type": ["null", "string"], + "description": "Name of metric" + }, + "display": { + "type": ["null", "boolean"], + "description": "Should the metric be displayed" + }, + "tooltip_description": { + "type": ["null", "string"] + }, + "backfilled": { + "type": ["null", "boolean"] + }, + "y_axis_min": { + "type": "number", + "format": "float" + }, + "y_axis_max": { + "type": "number", + "format": "float" + }, + "y_axis_hidden": { + "type": ["null", "boolean"], + "description": "Should the values on the y axis be hidden on render" + }, + "suffix": { + "type": ["null", "string"], + "description": "Suffix to describe the units on the graph" + }, + "decimal_places": { + "type": "integer", + "format": "int32" + }, + "most_recent_data_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "last_fetched_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "backfill_percentage": { + "type": "integer", + "format": "int32" + }, + "reference_name": { + "type": ["null", "string"] + } + }, + "description": "Create a metric for a metric provider", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/pages.json b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/pages.json new file mode 100644 index 0000000000000..9ec552ab3cb1f --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/pages.json @@ -0,0 +1,182 @@ +{ + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "example": "j7m9j8brdt3h", + "description": "Page identifier" + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "Timestamp the record was created" + }, + "updated_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "Timestamp the record was last updated" + }, + "name": { + "type": ["null", "string"], + "example": "My Company Status", + "description": "Name of your page to be displayed" + }, + "page_description": { + "type": ["null", "string"] + }, + "headline": { + "type": ["null", "string"] + }, + "branding": { + "type": ["null", "string"], + "description": "The main template your statuspage will use" + }, + "subdomain": { + "type": ["null", "string"], + "example": "your-subdomain.statuspage.io", + "description": "Subdomain at which to access your status page" + }, + "domain": { + "type": ["null", "string"], + "example": "status.mycompany.com", + "description": "CNAME alias for your status page" + }, + "url": { + "type": ["null", "string"], + "example": "https://www.mycompany.com", + "description": "Website of your page. Clicking on your statuspage image will link here." + }, + "support_url": { + "type": ["null", "string"] + }, + "hidden_from_search": { + "type": ["null", "boolean"], + "description": "Should your page hide itself from search engines" + }, + "allow_page_subscribers": { + "type": ["null", "boolean"], + "description": "Can your users subscribe to all notifications on the page" + }, + "allow_incident_subscribers": { + "type": ["null", "boolean"], + "description": "Can your users subscribe to notifications for a single incident" + }, + "allow_email_subscribers": { + "type": ["null", "boolean"], + "description": "Can your users choose to receive notifications via email" + }, + "allow_sms_subscribers": { + "type": ["null", "boolean"], + "description": "Can your users choose to receive notifications via SMS" + }, + "allow_rss_atom_feeds": { + "type": ["null", "boolean"], + "description": "Can your users choose to access incident feeds via RSS/Atom (not functional on Audience-Specific pages)" + }, + "allow_webhook_subscribers": { + "type": ["null", "boolean"], + "description": "Can your users choose to receive notifications via Webhooks" + }, + "notifications_from_email": { + "type": ["null", "string"], + "example": "no-reply@status.mycompany.com", + "description": "Allows you to customize the email address your page notifications come from" + }, + "notifications_email_footer": { + "type": ["null", "string"], + "description": "Allows you to customize the footer appearing on your notification emails. Accepts Markdown for formatting" + }, + "activity_score": { + "type": "number", + "format": "float" + }, + "twitter_username": { + "type": ["null", "string"] + }, + "viewers_must_be_team_members": { + "type": ["null", "boolean"] + }, + "ip_restrictions": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "state": { + "type": ["null", "string"] + }, + "country": { + "type": ["null", "string"] + }, + "time_zone": { + "type": ["null", "string"], + "example": "UTC", + "description": "Timezone configured for your page" + }, + "css_body_background_color": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_font_color": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_light_font_color": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_greens": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_yellows": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_oranges": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_blues": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_reds": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_border_color": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_graph_color": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_link_color": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "css_no_data": { + "type": ["null", "string"], + "description": "CSS Color" + }, + "favicon_logo": { + "type": ["null", "object"] + }, + "transactional_logo": { + "type": ["null", "object"] + }, + "hero_cover": { + "type": ["null", "object"] + }, + "email_logo": { + "type": ["null", "object"] + }, + "twitter_logo": { + "type": ["null", "object"] + } + }, + "description": "Get a page", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/subscribers.json b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/subscribers.json new file mode 100644 index 0000000000000..3d0d03715ab6f --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/subscribers.json @@ -0,0 +1,80 @@ +{ + "type": "object", + "properties": { + "id": { + "type": ["null", "string"], + "example": "x2wy916b0s4l", + "description": "Subscriber Identifier" + }, + "skip_confirmation_notification": { + "type": ["null", "boolean"], + "description": "If this is true, do not notify the user with changes to their subscription." + }, + "mode": { + "type": ["null", "string"], + "example": "email", + "description": "The communication mode of the subscriber." + }, + "email": { + "type": ["null", "string"], + "example": "foo@example2.com", + "description": "The email address to use to contact the subscriber. Used for Email and Webhook subscribers." + }, + "endpoint": { + "type": ["null", "string"], + "example": "http://example.com/", + "description": "The URL where a webhook subscriber elects to receive updates." + }, + "phone_number": { + "type": ["null", "string"], + "example": "202-555-0105", + "description": "The phone number used to contact an SMS subscriber" + }, + "phone_country": { + "type": ["null", "string"], + "example": "US", + "description": "The two-character country code representing the country of which the phone_number is a part." + }, + "display_phone_number": { + "type": ["null", "string"], + "example": "+1 (202) 555-0105", + "description": "A formatted version of the phone_number and phone_country pair, nicely formatted for display." + }, + "obfuscated_channel_name": { + "type": ["null", "string"], + "example": "#*******an", + "description": "Obfuscated slack channel name" + }, + "workspace_name": { + "type": ["null", "string"], + "example": "Atlassian", + "description": "The workspace name of the slack subscriber." + }, + "quarantined_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when the subscriber was quarantined due to an issue reaching them." + }, + "purge_at": { + "type": ["null", "string"], + "format": "date-time", + "description": "The timestamp when a quarantined subscriber will be purged (unsubscribed)." + }, + "components": { + "type": ["null", "string"], + "example": ["vp5sp4b1rl2l"], + "description": "The components for which the subscriber has elected to receive updates." + }, + "page_access_user_id": { + "type": ["null", "string"], + "example": "pwq31gns60j3", + "description": "The Page Access user this subscriber belongs to (only for audience-specific pages)." + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "description": "Get an incident subscriber", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/subscribers_histogram_by_state.json b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/subscribers_histogram_by_state.json new file mode 100644 index 0000000000000..afe3490e832e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/schemas/subscribers_histogram_by_state.json @@ -0,0 +1,137 @@ +{ + "type": "object", + "properties": { + "email": { + "type": "object", + "properties": { + "active": { + "type": "integer", + "format": "int32", + "description": "The number of active subscribers found by the query." + }, + "unconfirmed": { + "type": "integer", + "format": "int32", + "description": "The number of unconfirmed subscribers found by the query." + }, + "quarantined": { + "type": "integer", + "format": "int32", + "description": "The number of quarantined subscribers found by the query." + }, + "total": { + "type": "integer", + "format": "int32", + "description": "The total number of subscribers found by the query." + } + }, + "$schema": "http://json-schema.org/schema#" + }, + "sms": { + "type": "object", + "properties": { + "active": { + "type": "integer", + "format": "int32", + "description": "The number of active subscribers found by the query." + }, + "unconfirmed": { + "type": "integer", + "format": "int32", + "description": "The number of unconfirmed subscribers found by the query." + }, + "quarantined": { + "type": "integer", + "format": "int32", + "description": "The number of quarantined subscribers found by the query." + }, + "total": { + "type": "integer", + "format": "int32", + "description": "The total number of subscribers found by the query." + } + }, + "$schema": "http://json-schema.org/schema#" + }, + "webhook": { + "type": "object", + "properties": { + "active": { + "type": "integer", + "format": "int32", + "description": "The number of active subscribers found by the query." + }, + "unconfirmed": { + "type": "integer", + "format": "int32", + "description": "The number of unconfirmed subscribers found by the query." + }, + "quarantined": { + "type": "integer", + "format": "int32", + "description": "The number of quarantined subscribers found by the query." + }, + "total": { + "type": "integer", + "format": "int32", + "description": "The total number of subscribers found by the query." + } + }, + "$schema": "http://json-schema.org/schema#" + }, + "integration_partner": { + "type": "object", + "properties": { + "active": { + "type": "integer", + "format": "int32", + "description": "The number of active subscribers found by the query." + }, + "unconfirmed": { + "type": "integer", + "format": "int32", + "description": "The number of unconfirmed subscribers found by the query." + }, + "quarantined": { + "type": "integer", + "format": "int32", + "description": "The number of quarantined subscribers found by the query." + }, + "total": { + "type": "integer", + "format": "int32", + "description": "The total number of subscribers found by the query." + } + }, + "$schema": "http://json-schema.org/schema#" + }, + "slack": { + "type": "object", + "properties": { + "active": { + "type": "integer", + "format": "int32", + "description": "The number of active subscribers found by the query." + }, + "unconfirmed": { + "type": "integer", + "format": "int32", + "description": "The number of unconfirmed subscribers found by the query." + }, + "quarantined": { + "type": "integer", + "format": "int32", + "description": "The number of quarantined subscribers found by the query." + }, + "total": { + "type": "integer", + "format": "int32", + "description": "The total number of subscribers found by the query." + } + }, + "$schema": "http://json-schema.org/schema#" + } + }, + "description": "Get a histogram of subscribers by type and then state", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/source.py b/airbyte-integrations/connectors/source-statuspage/source_statuspage/source.py new file mode 100644 index 0000000000000..1d8d85b315ba5 --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceStatuspage(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "statuspage.yaml"}) diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/spec.yaml b/airbyte-integrations/connectors/source-statuspage/source_statuspage/spec.yaml new file mode 100644 index 0000000000000..87b2d04df342c --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/spec.yaml @@ -0,0 +1,16 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/statuspage +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Statuspage Spec + type: object + required: + - api_key + additionalProperties: true + properties: + api_key: + title: API Key + type: string + description: >- + Your API Key. See here. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-statuspage/source_statuspage/statuspage.yaml b/airbyte-integrations/connectors/source-statuspage/source_statuspage/statuspage.yaml new file mode 100644 index 0000000000000..9632741c6bccf --- /dev/null +++ b/airbyte-integrations/connectors/source-statuspage/source_statuspage/statuspage.yaml @@ -0,0 +1,151 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://api.statuspage.io/v1" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "Authorization" + api_token: "OAuth {{ config['api_key'] }}" + error_handler: + type: "CompositeErrorHandler" + error_handlers: + - response_filters: + - http_codes: [420, 429] + action: RETRY + backoff_strategies: + - type: "ConstantBackoffStrategy" + backoff_time_in_seconds: 60 + offset_paginator: + type: DefaultPaginator + $options: + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "OffsetIncrement" + page_size: 100 + page_token_option: + field_name: "page" + inject_into: "request_parameter" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.offset_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + pages_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "pages" + primary_key: "id" + path: "/pages" + page_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.pages_stream)" + parent_key: id + stream_slice_field: page_id + subscribers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "subscribers" + primary_key: "id" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/pages/{{ stream_slice.page_id }}/subscribers" + stream_slicer: + $ref: "*ref(definitions.page_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + subscribers_histogram_by_state_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "subscribers_histogram_by_state" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/pages/{{ stream_slice.page_id }}/subscribers/histogram_by_state" + stream_slicer: + $ref: "*ref(definitions.page_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + incident_templates_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "incident_templates" + primary_key: "id" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/pages/{{ stream_slice.page_id }}/incident_templates" + stream_slicer: + $ref: "*ref(definitions.page_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + incidents_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "incidents" + primary_key: "id" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/pages/{{ stream_slice.page_id }}/incidents" + stream_slicer: + $ref: "*ref(definitions.page_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + components_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "components" + primary_key: "id" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/pages/{{ stream_slice.page_id }}/components" + stream_slicer: + $ref: "*ref(definitions.page_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" + metrics_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "metrics" + primary_key: "id" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + path: "/pages/{{ stream_slice.page_id }}/metrics" + stream_slicer: + $ref: "*ref(definitions.page_stream_slicer)" + record_selector: + $ref: "*ref(definitions.selector)" +streams: + - "*ref(definitions.pages_stream)" + - "*ref(definitions.subscribers_stream)" + - "*ref(definitions.subscribers_histogram_by_state_stream)" + - "*ref(definitions.incident_templates_stream)" + - "*ref(definitions.incidents_stream)" + - "*ref(definitions.components_stream)" + - "*ref(definitions.metrics_stream)" +check: + stream_names: + - "pages" diff --git a/airbyte-integrations/connectors/source-stock-ticker-api-tutorial/source.py b/airbyte-integrations/connectors/source-stock-ticker-api-tutorial/source.py index 4d401eb06c89c..c9ec489513c23 100644 --- a/airbyte-integrations/connectors/source-stock-ticker-api-tutorial/source.py +++ b/airbyte-integrations/connectors/source-stock-ticker-api-tutorial/source.py @@ -33,7 +33,7 @@ def read(config, catalog): # Assert required configuration was provided if "api_key" not in config or "stock_ticker" not in config: - log("Input config must contain the properties 'api_key' and 'stock_ticker'") + log_error("Input config must contain the properties 'api_key' and 'stock_ticker'") sys.exit(1) # Find the stock_prices stream if it is present in the input catalog @@ -43,22 +43,22 @@ def read(config, catalog): stock_prices_stream = configured_stream if stock_prices_stream is None: - log("No streams selected") + log_error("No streams selected") return # We only support full_refresh at the moment, so verify the user didn't ask for another sync mode if stock_prices_stream["sync_mode"] != "full_refresh": - log("This connector only supports full refresh syncs! (for now)") + log_error("This connector only supports full refresh syncs! (for now)") sys.exit(1) # If we've made it this far, all the configuration is good and we can pull the last 7 days of market data response = _call_api(ticker=config["stock_ticker"], token = config["api_key"]) if response.status_code != 200: # In a real scenario we'd handle this error better :) - log("Failure occurred when calling Polygon.io API") + log_error("Failure occurred when calling Polygon.io API") sys.exit(1) else: - # Stock prices are returned sorted by by date in ascending order + # Stock prices are returned sorted by date in ascending order # We want to output them one by one as AirbyteMessages results = response.json()["results"] for result in results: @@ -83,7 +83,7 @@ def _call_api(ticker, token): def check(config): # Assert required configuration was provided if "api_key" not in config or "stock_ticker" not in config: - log("Input config must contain the properties 'api_key' and 'stock_ticker'") + log_error("Input config must contain the properties 'api_key' and 'stock_ticker'") sys.exit(1) else: # Validate input configuration by attempting to get the daily closing prices of the input stock ticker @@ -107,6 +107,12 @@ def log(message): print(json.dumps(log_json)) +def log_error(error_message): + current_time_in_ms = int(datetime.datetime.now().timestamp()) * 1000 + log_json = {"type": "TRACE", "trace": {"type": "ERROR", "emitted_at": current_time_in_ms, "error": {"message": error_message}}} + print(json.dumps(log_json)) + + def discover(): catalog = { "streams": [{ diff --git a/airbyte-integrations/connectors/source-tempo/Dockerfile b/airbyte-integrations/connectors/source-tempo/Dockerfile index 473969d0c257f..3992057642845 100644 --- a/airbyte-integrations/connectors/source-tempo/Dockerfile +++ b/airbyte-integrations/connectors/source-tempo/Dockerfile @@ -34,5 +34,5 @@ COPY source_tempo ./source_tempo ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.2.6 +LABEL io.airbyte.version=0.3.0 LABEL io.airbyte.name=airbyte/source-tempo diff --git a/airbyte-integrations/connectors/source-tempo/acceptance-test-config.yml b/airbyte-integrations/connectors/source-tempo/acceptance-test-config.yml index b2e97c303c7e0..bd08026cb2d0c 100644 --- a/airbyte-integrations/connectors/source-tempo/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-tempo/acceptance-test-config.yml @@ -1,19 +1,40 @@ # See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-tempo:dev -tests: +acceptance_tests: spec: - - spec_path: "source_tempo/spec.json" + tests: + - spec_path: "source_tempo/spec.json" connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" discovery: - - config_path: "secrets/config.json" + tests: + - config_path: "secrets/config.json" + backward_compatibility_tests_config: + disable_for_version: "0.2.6" basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + expect_records: + path: "integration_tests/expected_records.txt" + - config_path: "secrets/accounts_only_config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: + - name: "worklogs" + bypass_reason: "token scope does not include this stream" + - name: "workload-schemes" + bypass_reason: "token scope does not include this stream" full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" diff --git a/airbyte-integrations/connectors/source-tempo/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-tempo/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..c43afad8508d3 --- /dev/null +++ b/airbyte-integrations/connectors/source-tempo/integration_tests/abnormal_state.json @@ -0,0 +1,9 @@ +[ + { + "type": "STREAM", + "stream": { + "stream_state": { "startDate": "2031-04-14" }, + "stream_descriptor": { "name": "worklogs" } + } + } +] diff --git a/airbyte-integrations/connectors/source-tempo/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-tempo/integration_tests/configured_catalog.json index 4ebb36abf7cd7..58be8a0abec1b 100644 --- a/airbyte-integrations/connectors/source-tempo/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-tempo/integration_tests/configured_catalog.json @@ -4,8 +4,10 @@ "stream": { "name": "accounts", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] }, + "primary_key": [["id"]], "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" }, @@ -13,8 +15,10 @@ "stream": { "name": "customers", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] }, + "primary_key": [["id"]], "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" }, @@ -22,8 +26,10 @@ "stream": { "name": "workload-schemes", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["id"]] }, + "primary_key": [["id"]], "sync_mode": "full_refresh", "destination_sync_mode": "overwrite" }, @@ -31,9 +37,14 @@ "stream": { "name": "worklogs", "json_schema": {}, - "supported_sync_modes": ["full_refresh"] + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["startDate"], + "source_defined_primary_key": [["tempoWorklogId"]] }, - "sync_mode": "full_refresh", + "primary_key": [["tempoWorklogId"]], + "cursor_field": ["startDate"], + "sync_mode": "incremental", "destination_sync_mode": "overwrite" } ] diff --git a/airbyte-integrations/connectors/source-tempo/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-tempo/integration_tests/expected_records.txt new file mode 100644 index 0000000000000..f0896073c195b --- /dev/null +++ b/airbyte-integrations/connectors/source-tempo/integration_tests/expected_records.txt @@ -0,0 +1,283 @@ +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/4", "key": "ACCOUNT3", "id": 4, "name": "Account 3", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT3/links"}}, "emitted_at": 1667382519806} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/2", "key": "ACCOUNT1", "id": 2, "name": "Account 1", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT1/links"}}, "emitted_at": 1667382519817} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/3", "key": "ACCOUNT2", "id": 3, "name": "Account 2", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT2/links"}}, "emitted_at": 1667382519817} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/5", "key": "ACCOUNT4", "id": 5, "name": "Account 4", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=600f111765f20b0070a8ac03", "accountId": "600f111765f20b0070a8ac03"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT4/links"}}, "emitted_at": 1667382519817} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/6", "key": "ACCOUNT5", "id": 6, "name": "Account 5", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=600f111765f20b0070a8ac03", "accountId": "600f111765f20b0070a8ac03"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT5/links"}}, "emitted_at": 1667382519817} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/7", "key": "ACCOUNT6", "id": 7, "name": "Account 6", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT6/links"}}, "emitted_at": 1667382519817} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/8", "key": "ACCOUNT7", "id": 8, "name": "Account 7", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=600f111765f20b0070a8ac03", "accountId": "600f111765f20b0070a8ac03"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT7/links"}}, "emitted_at": 1667382519817} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/9", "key": "ACCOUNT8", "id": 9, "name": "Account 8", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=600f14121959540115a069d9", "accountId": "600f14121959540115a069d9"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT8/links"}}, "emitted_at": 1667382519817} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/10", "key": "ACCOUNT9", "id": 10, "name": "Account 9", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=600f14121959540115a069d9", "accountId": "600f14121959540115a069d9"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT9/links"}}, "emitted_at": 1667382519817} +{"stream": "accounts", "data": {"self": "https://api.tempo.io/4/accounts/11", "key": "ACCOUNT10", "id": 11, "name": "Account 10", "status": "OPEN", "global": false, "lead": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=600f111765f20b0070a8ac03", "accountId": "600f111765f20b0070a8ac03"}, "links": {"self": "https://api.tempo.io/4/accounts/ACCOUNT10/links"}}, "emitted_at": 1667382519817} +{"stream": "customers", "data": {"self": "https://api.tempo.io/4/customers/2", "key": "Customer1", "id": 2, "name": "Customer 1"}, "emitted_at": 1667382520445} +{"stream": "customers", "data": {"self": "https://api.tempo.io/4/customers/3", "key": "Customer2", "id": 3, "name": "Customer 2"}, "emitted_at": 1667382520455} +{"stream": "customers", "data": {"self": "https://api.tempo.io/4/customers/4", "key": "Customer3", "id": 4, "name": "Customer 3"}, "emitted_at": 1667382520455} +{"stream": "customers", "data": {"self": "https://api.tempo.io/4/customers/5", "key": "Customer4", "id": 5, "name": "Customer 4"}, "emitted_at": 1667382520455} +{"stream": "customers", "data": {"self": "https://api.tempo.io/4/customers/6", "key": "Customer5", "id": 6, "name": "Customer 5"}, "emitted_at": 1667382520455} +{"stream": "customers", "data": {"self": "https://api.tempo.io/4/customers/7", "key": "Customer6", "id": 7, "name": "Customer 6"}, "emitted_at": 1667382520455} +{"stream": "workload-schemes", "data": {"self": "https://api.tempo.io/4/workload-schemes/2", "id": 2, "name": "Tempo Default Workload Scheme", "description": "Required working hours per day, as specified in Jira Time Tracking configuration", "defaultScheme": true, "memberCount": 2, "days": [{"day": "MONDAY", "requiredSeconds": 28800}, {"day": "TUESDAY", "requiredSeconds": 28800}, {"day": "WEDNESDAY", "requiredSeconds": 28800}, {"day": "THURSDAY", "requiredSeconds": 28800}, {"day": "FRIDAY", "requiredSeconds": 28800}, {"day": "SATURDAY", "requiredSeconds": 0}, {"day": "SUNDAY", "requiredSeconds": 0}]}, "emitted_at": 1667382521280} +{"stream": "workload-schemes", "data": {"self": "https://api.tempo.io/4/workload-schemes/3", "id": 3, "name": "Test Workload", "description": "Test workload", "defaultScheme": false, "memberCount": 1, "days": [{"day": "MONDAY", "requiredSeconds": 28800}, {"day": "TUESDAY", "requiredSeconds": 21600}, {"day": "WEDNESDAY", "requiredSeconds": 14400}, {"day": "THURSDAY", "requiredSeconds": 10800}, {"day": "FRIDAY", "requiredSeconds": 18000}, {"day": "SATURDAY", "requiredSeconds": 28800}, {"day": "SUNDAY", "requiredSeconds": 28800}]}, "emitted_at": 1667382521281} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/2", "tempoWorklogId": 2, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10000", "id": 10000}, "timeSpentSeconds": 600, "billableSeconds": 600, "startDate": "2021-01-24", "startTime": "08:00:00", "description": "Test", "createdAt": "2021-01-25T19:08:43Z", "updatedAt": "2021-01-25T19:08:43Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/2/work-attribute-values", "values": []}}, "emitted_at": 1667382538355} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/4", "tempoWorklogId": 4, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10000", "id": 10000}, "timeSpentSeconds": 14400, "billableSeconds": 14400, "startDate": "2021-01-24", "startTime": "08:10:00", "description": "test monday", "createdAt": "2021-01-25T19:14:11Z", "updatedAt": "2021-01-25T19:14:11Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/4/work-attribute-values", "values": []}}, "emitted_at": 1667382538372} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/5", "tempoWorklogId": 5, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10000", "id": 10000}, "timeSpentSeconds": 7200, "billableSeconds": 7200, "startDate": "2021-01-25", "startTime": "08:00:00", "description": "New", "createdAt": "2021-01-25T19:14:25Z", "updatedAt": "2021-01-25T19:14:25Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/5/work-attribute-values", "values": []}}, "emitted_at": 1667382538374} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/3", "tempoWorklogId": 3, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10002", "id": 10002}, "timeSpentSeconds": 1800, "billableSeconds": 1800, "startDate": "2021-01-27", "startTime": "08:00:00", "description": "test", "createdAt": "2021-01-25T19:13:44Z", "updatedAt": "2021-01-25T19:13:44Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/3/work-attribute-values", "values": []}}, "emitted_at": 1667382538588} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/6", "tempoWorklogId": 6, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10622", "id": 10622}, "timeSpentSeconds": 2640, "billableSeconds": 2640, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:09Z", "updatedAt": "2021-04-15T18:38:09Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/6/work-attribute-values", "values": []}}, "emitted_at": 1667382541565} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/7", "tempoWorklogId": 7, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10077", "id": 10077}, "timeSpentSeconds": 2160, "billableSeconds": 2160, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:10Z", "updatedAt": "2021-04-15T18:38:10Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/7/work-attribute-values", "values": []}}, "emitted_at": 1667382541569} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/8", "tempoWorklogId": 8, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10077", "id": 10077}, "timeSpentSeconds": 2760, "billableSeconds": 2760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:11Z", "updatedAt": "2021-04-15T18:38:10Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/8/work-attribute-values", "values": []}}, "emitted_at": 1667382541573} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/9", "tempoWorklogId": 9, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10077", "id": 10077}, "timeSpentSeconds": 4260, "billableSeconds": 4260, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:11Z", "updatedAt": "2021-04-15T18:38:11Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/9/work-attribute-values", "values": []}}, "emitted_at": 1667382541576} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/10", "tempoWorklogId": 10, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10077", "id": 10077}, "timeSpentSeconds": 2580, "billableSeconds": 2580, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:12Z", "updatedAt": "2021-04-15T18:38:12Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/10/work-attribute-values", "values": []}}, "emitted_at": 1667382541579} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/11", "tempoWorklogId": 11, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10077", "id": 10077}, "timeSpentSeconds": 5820, "billableSeconds": 5820, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:38:13Z", "updatedAt": "2021-04-15T18:38:12Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/11/work-attribute-values", "values": []}}, "emitted_at": 1667382541581} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/12", "tempoWorklogId": 12, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10073", "id": 10073}, "timeSpentSeconds": 8640, "billableSeconds": 8640, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:13Z", "updatedAt": "2021-04-15T18:38:13Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/12/work-attribute-values", "values": []}}, "emitted_at": 1667382541583} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/13", "tempoWorklogId": 13, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10070", "id": 10070}, "timeSpentSeconds": 6240, "billableSeconds": 6240, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:14Z", "updatedAt": "2021-04-15T18:38:14Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/13/work-attribute-values", "values": []}}, "emitted_at": 1667382541585} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/14", "tempoWorklogId": 14, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10070", "id": 10070}, "timeSpentSeconds": 7080, "billableSeconds": 7080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:14Z", "updatedAt": "2021-04-15T18:38:14Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/14/work-attribute-values", "values": []}}, "emitted_at": 1667382541587} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/15", "tempoWorklogId": 15, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10064", "id": 10064}, "timeSpentSeconds": 10860, "billableSeconds": 10860, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:15Z", "updatedAt": "2021-04-15T18:38:15Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/15/work-attribute-values", "values": []}}, "emitted_at": 1667382541589} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/16", "tempoWorklogId": 16, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10064", "id": 10064}, "timeSpentSeconds": 11880, "billableSeconds": 11880, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:16Z", "updatedAt": "2021-04-15T18:38:15Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/16/work-attribute-values", "values": []}}, "emitted_at": 1667382541590} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/17", "tempoWorklogId": 17, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10064", "id": 10064}, "timeSpentSeconds": 9780, "billableSeconds": 9780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:16Z", "updatedAt": "2021-04-15T18:38:16Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/17/work-attribute-values", "values": []}}, "emitted_at": 1667382541592} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/18", "tempoWorklogId": 18, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10064", "id": 10064}, "timeSpentSeconds": 9060, "billableSeconds": 9060, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:17Z", "updatedAt": "2021-04-15T18:38:16Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/18/work-attribute-values", "values": []}}, "emitted_at": 1667382541593} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/19", "tempoWorklogId": 19, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10064", "id": 10064}, "timeSpentSeconds": 9480, "billableSeconds": 9480, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:38:17Z", "updatedAt": "2021-04-15T18:38:17Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/19/work-attribute-values", "values": []}}, "emitted_at": 1667382541595} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/20", "tempoWorklogId": 20, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10059", "id": 10059}, "timeSpentSeconds": 11520, "billableSeconds": 11520, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:18Z", "updatedAt": "2021-04-15T18:38:18Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/20/work-attribute-values", "values": []}}, "emitted_at": 1667382541596} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/21", "tempoWorklogId": 21, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10059", "id": 10059}, "timeSpentSeconds": 9120, "billableSeconds": 9120, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:19Z", "updatedAt": "2021-04-15T18:38:18Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/21/work-attribute-values", "values": []}}, "emitted_at": 1667382541597} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/22", "tempoWorklogId": 22, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10059", "id": 10059}, "timeSpentSeconds": 5100, "billableSeconds": 5100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:19Z", "updatedAt": "2021-04-15T18:38:19Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/22/work-attribute-values", "values": []}}, "emitted_at": 1667382541599} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/23", "tempoWorklogId": 23, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10059", "id": 10059}, "timeSpentSeconds": 7980, "billableSeconds": 7980, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:20Z", "updatedAt": "2021-04-15T18:38:20Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/23/work-attribute-values", "values": []}}, "emitted_at": 1667382541600} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/24", "tempoWorklogId": 24, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10059", "id": 10059}, "timeSpentSeconds": 5760, "billableSeconds": 5760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:38:21Z", "updatedAt": "2021-04-15T18:38:20Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/24/work-attribute-values", "values": []}}, "emitted_at": 1667382541601} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/25", "tempoWorklogId": 25, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10050", "id": 10050}, "timeSpentSeconds": 5880, "billableSeconds": 5880, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:21Z", "updatedAt": "2021-04-15T18:38:21Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/25/work-attribute-values", "values": []}}, "emitted_at": 1667382541602} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/26", "tempoWorklogId": 26, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10050", "id": 10050}, "timeSpentSeconds": 6240, "billableSeconds": 6240, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:22Z", "updatedAt": "2021-04-15T18:38:22Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/26/work-attribute-values", "values": []}}, "emitted_at": 1667382541604} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/27", "tempoWorklogId": 27, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10046", "id": 10046}, "timeSpentSeconds": 3780, "billableSeconds": 3780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:23Z", "updatedAt": "2021-04-15T18:38:22Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/27/work-attribute-values", "values": []}}, "emitted_at": 1667382541605} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/28", "tempoWorklogId": 28, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10046", "id": 10046}, "timeSpentSeconds": 10140, "billableSeconds": 10140, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:23Z", "updatedAt": "2021-04-15T18:38:23Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/28/work-attribute-values", "values": []}}, "emitted_at": 1667382541606} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/29", "tempoWorklogId": 29, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10046", "id": 10046}, "timeSpentSeconds": 3240, "billableSeconds": 3240, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:24Z", "updatedAt": "2021-04-15T18:38:24Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/29/work-attribute-values", "values": []}}, "emitted_at": 1667382541607} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/30", "tempoWorklogId": 30, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10046", "id": 10046}, "timeSpentSeconds": 11760, "billableSeconds": 11760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:25Z", "updatedAt": "2021-04-15T18:38:24Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/30/work-attribute-values", "values": []}}, "emitted_at": 1667382541608} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/31", "tempoWorklogId": 31, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10046", "id": 10046}, "timeSpentSeconds": 9480, "billableSeconds": 9480, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:38:25Z", "updatedAt": "2021-04-15T18:38:25Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/31/work-attribute-values", "values": []}}, "emitted_at": 1667382541609} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/32", "tempoWorklogId": 32, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10033", "id": 10033}, "timeSpentSeconds": 5100, "billableSeconds": 5100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:26Z", "updatedAt": "2021-04-15T18:38:26Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/32/work-attribute-values", "values": []}}, "emitted_at": 1667382541610} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/33", "tempoWorklogId": 33, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10033", "id": 10033}, "timeSpentSeconds": 5520, "billableSeconds": 5520, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:27Z", "updatedAt": "2021-04-15T18:38:26Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/33/work-attribute-values", "values": []}}, "emitted_at": 1667382541611} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/34", "tempoWorklogId": 34, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10033", "id": 10033}, "timeSpentSeconds": 4980, "billableSeconds": 4980, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:27Z", "updatedAt": "2021-04-15T18:38:27Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/34/work-attribute-values", "values": []}}, "emitted_at": 1667382541612} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/35", "tempoWorklogId": 35, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10028", "id": 10028}, "timeSpentSeconds": 7080, "billableSeconds": 7080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:28Z", "updatedAt": "2021-04-15T18:38:27Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/35/work-attribute-values", "values": []}}, "emitted_at": 1667382541612} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/36", "tempoWorklogId": 36, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10027", "id": 10027}, "timeSpentSeconds": 11820, "billableSeconds": 11820, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:28Z", "updatedAt": "2021-04-15T18:38:28Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/36/work-attribute-values", "values": []}}, "emitted_at": 1667382541613} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/37", "tempoWorklogId": 37, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10027", "id": 10027}, "timeSpentSeconds": 3060, "billableSeconds": 3060, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:29Z", "updatedAt": "2021-04-15T18:38:29Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/37/work-attribute-values", "values": []}}, "emitted_at": 1667382541614} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/38", "tempoWorklogId": 38, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10027", "id": 10027}, "timeSpentSeconds": 1140, "billableSeconds": 1140, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:30Z", "updatedAt": "2021-04-15T18:38:29Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/38/work-attribute-values", "values": []}}, "emitted_at": 1667382541615} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/39", "tempoWorklogId": 39, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10023", "id": 10023}, "timeSpentSeconds": 2940, "billableSeconds": 2940, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:30Z", "updatedAt": "2021-04-15T18:38:30Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/39/work-attribute-values", "values": []}}, "emitted_at": 1667382541617} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/40", "tempoWorklogId": 40, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10023", "id": 10023}, "timeSpentSeconds": 4620, "billableSeconds": 4620, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:31Z", "updatedAt": "2021-04-15T18:38:31Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/40/work-attribute-values", "values": []}}, "emitted_at": 1667382541618} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/41", "tempoWorklogId": 41, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10023", "id": 10023}, "timeSpentSeconds": 8940, "billableSeconds": 8940, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:32Z", "updatedAt": "2021-04-15T18:38:31Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/41/work-attribute-values", "values": []}}, "emitted_at": 1667382541619} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/42", "tempoWorklogId": 42, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10023", "id": 10023}, "timeSpentSeconds": 8640, "billableSeconds": 8640, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:32Z", "updatedAt": "2021-04-15T18:38:32Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/42/work-attribute-values", "values": []}}, "emitted_at": 1667382541620} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/43", "tempoWorklogId": 43, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10022", "id": 10022}, "timeSpentSeconds": 5640, "billableSeconds": 5640, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:33Z", "updatedAt": "2021-04-15T18:38:32Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/43/work-attribute-values", "values": []}}, "emitted_at": 1667382541621} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/44", "tempoWorklogId": 44, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10022", "id": 10022}, "timeSpentSeconds": 10500, "billableSeconds": 10500, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:33Z", "updatedAt": "2021-04-15T18:38:33Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/44/work-attribute-values", "values": []}}, "emitted_at": 1667382541621} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/45", "tempoWorklogId": 45, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10022", "id": 10022}, "timeSpentSeconds": 3780, "billableSeconds": 3780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:34Z", "updatedAt": "2021-04-15T18:38:34Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/45/work-attribute-values", "values": []}}, "emitted_at": 1667382541622} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/46", "tempoWorklogId": 46, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10022", "id": 10022}, "timeSpentSeconds": 900, "billableSeconds": 900, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:35Z", "updatedAt": "2021-04-15T18:38:34Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/46/work-attribute-values", "values": []}}, "emitted_at": 1667382541623} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/47", "tempoWorklogId": 47, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10022", "id": 10022}, "timeSpentSeconds": 4440, "billableSeconds": 4440, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:38:35Z", "updatedAt": "2021-04-15T18:38:35Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/47/work-attribute-values", "values": []}}, "emitted_at": 1667382541624} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/48", "tempoWorklogId": 48, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10015", "id": 10015}, "timeSpentSeconds": 2340, "billableSeconds": 2340, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:36Z", "updatedAt": "2021-04-15T18:38:36Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/48/work-attribute-values", "values": []}}, "emitted_at": 1667382541625} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/49", "tempoWorklogId": 49, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10015", "id": 10015}, "timeSpentSeconds": 1500, "billableSeconds": 1500, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:37Z", "updatedAt": "2021-04-15T18:38:36Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/49/work-attribute-values", "values": []}}, "emitted_at": 1667382541626} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/50", "tempoWorklogId": 50, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10015", "id": 10015}, "timeSpentSeconds": 7380, "billableSeconds": 7380, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:37Z", "updatedAt": "2021-04-15T18:38:37Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/50/work-attribute-values", "values": []}}, "emitted_at": 1667382541627} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/51", "tempoWorklogId": 51, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10015", "id": 10015}, "timeSpentSeconds": 10560, "billableSeconds": 10560, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:38Z", "updatedAt": "2021-04-15T18:38:37Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/51/work-attribute-values", "values": []}}, "emitted_at": 1667382541628} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/52", "tempoWorklogId": 52, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10014", "id": 10014}, "timeSpentSeconds": 10080, "billableSeconds": 10080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:38Z", "updatedAt": "2021-04-15T18:38:38Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/52/work-attribute-values", "values": []}}, "emitted_at": 1667382541629} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/53", "tempoWorklogId": 53, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10014", "id": 10014}, "timeSpentSeconds": 1860, "billableSeconds": 1860, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:39Z", "updatedAt": "2021-04-15T18:38:39Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/53/work-attribute-values", "values": []}}, "emitted_at": 1667382541630} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/54", "tempoWorklogId": 54, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10014", "id": 10014}, "timeSpentSeconds": 5580, "billableSeconds": 5580, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:40Z", "updatedAt": "2021-04-15T18:38:39Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/54/work-attribute-values", "values": []}}, "emitted_at": 1667382541631} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/55", "tempoWorklogId": 55, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10014", "id": 10014}, "timeSpentSeconds": 7920, "billableSeconds": 7920, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:40Z", "updatedAt": "2021-04-15T18:38:40Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/55/work-attribute-values", "values": []}}, "emitted_at": 1667382541632} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/56", "tempoWorklogId": 56, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10014", "id": 10014}, "timeSpentSeconds": 8160, "billableSeconds": 8160, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:38:41Z", "updatedAt": "2021-04-15T18:38:40Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/56/work-attribute-values", "values": []}}, "emitted_at": 1667382541924} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/57", "tempoWorklogId": 57, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10011", "id": 10011}, "timeSpentSeconds": 2160, "billableSeconds": 2160, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:41Z", "updatedAt": "2021-04-15T18:38:41Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/57/work-attribute-values", "values": []}}, "emitted_at": 1667382541925} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/58", "tempoWorklogId": 58, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10011", "id": 10011}, "timeSpentSeconds": 7200, "billableSeconds": 7200, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:42Z", "updatedAt": "2021-04-15T18:38:42Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/58/work-attribute-values", "values": []}}, "emitted_at": 1667382541927} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/59", "tempoWorklogId": 59, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10011", "id": 10011}, "timeSpentSeconds": 3900, "billableSeconds": 3900, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:43Z", "updatedAt": "2021-04-15T18:38:43Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/59/work-attribute-values", "values": []}}, "emitted_at": 1667382541928} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/60", "tempoWorklogId": 60, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10011", "id": 10011}, "timeSpentSeconds": 11880, "billableSeconds": 11880, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:44Z", "updatedAt": "2021-04-15T18:38:43Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/60/work-attribute-values", "values": []}}, "emitted_at": 1667382541930} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/61", "tempoWorklogId": 61, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10079", "id": 10079}, "timeSpentSeconds": 7380, "billableSeconds": 7380, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:44Z", "updatedAt": "2021-04-15T18:38:44Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/61/work-attribute-values", "values": []}}, "emitted_at": 1667382541931} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/62", "tempoWorklogId": 62, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10079", "id": 10079}, "timeSpentSeconds": 780, "billableSeconds": 780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:45Z", "updatedAt": "2021-04-15T18:38:44Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/62/work-attribute-values", "values": []}}, "emitted_at": 1667382541933} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/63", "tempoWorklogId": 63, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10076", "id": 10076}, "timeSpentSeconds": 9480, "billableSeconds": 9480, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:45Z", "updatedAt": "2021-04-15T18:38:45Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/63/work-attribute-values", "values": []}}, "emitted_at": 1667382541934} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/64", "tempoWorklogId": 64, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10076", "id": 10076}, "timeSpentSeconds": 6480, "billableSeconds": 6480, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:46Z", "updatedAt": "2021-04-15T18:38:46Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/64/work-attribute-values", "values": []}}, "emitted_at": 1667382541936} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/65", "tempoWorklogId": 65, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10074", "id": 10074}, "timeSpentSeconds": 7680, "billableSeconds": 7680, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:47Z", "updatedAt": "2021-04-15T18:38:46Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/65/work-attribute-values", "values": []}}, "emitted_at": 1667382541937} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/66", "tempoWorklogId": 66, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10074", "id": 10074}, "timeSpentSeconds": 6540, "billableSeconds": 6540, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:47Z", "updatedAt": "2021-04-15T18:38:47Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/66/work-attribute-values", "values": []}}, "emitted_at": 1667382541939} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/67", "tempoWorklogId": 67, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10074", "id": 10074}, "timeSpentSeconds": 5100, "billableSeconds": 5100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:48Z", "updatedAt": "2021-04-15T18:38:48Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/67/work-attribute-values", "values": []}}, "emitted_at": 1667382541941} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/68", "tempoWorklogId": 68, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10074", "id": 10074}, "timeSpentSeconds": 8100, "billableSeconds": 8100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:49Z", "updatedAt": "2021-04-15T18:38:48Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/68/work-attribute-values", "values": []}}, "emitted_at": 1667382541942} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/69", "tempoWorklogId": 69, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10072", "id": 10072}, "timeSpentSeconds": 5820, "billableSeconds": 5820, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:50Z", "updatedAt": "2021-04-15T18:38:49Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/69/work-attribute-values", "values": []}}, "emitted_at": 1667382541943} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/70", "tempoWorklogId": 70, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10072", "id": 10072}, "timeSpentSeconds": 3360, "billableSeconds": 3360, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:50Z", "updatedAt": "2021-04-15T18:38:50Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/70/work-attribute-values", "values": []}}, "emitted_at": 1667382541945} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/71", "tempoWorklogId": 71, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10072", "id": 10072}, "timeSpentSeconds": 6540, "billableSeconds": 6540, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:51Z", "updatedAt": "2021-04-15T18:38:50Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/71/work-attribute-values", "values": []}}, "emitted_at": 1667382541946} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/72", "tempoWorklogId": 72, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10068", "id": 10068}, "timeSpentSeconds": 8100, "billableSeconds": 8100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:52Z", "updatedAt": "2021-04-15T18:38:51Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/72/work-attribute-values", "values": []}}, "emitted_at": 1667382541947} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/73", "tempoWorklogId": 73, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10068", "id": 10068}, "timeSpentSeconds": 4140, "billableSeconds": 4140, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:52Z", "updatedAt": "2021-04-15T18:38:52Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/73/work-attribute-values", "values": []}}, "emitted_at": 1667382541948} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/74", "tempoWorklogId": 74, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10066", "id": 10066}, "timeSpentSeconds": 3240, "billableSeconds": 3240, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:53Z", "updatedAt": "2021-04-15T18:38:52Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/74/work-attribute-values", "values": []}}, "emitted_at": 1667382541949} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/75", "tempoWorklogId": 75, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10066", "id": 10066}, "timeSpentSeconds": 6000, "billableSeconds": 6000, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:53Z", "updatedAt": "2021-04-15T18:38:53Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/75/work-attribute-values", "values": []}}, "emitted_at": 1667382541950} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/76", "tempoWorklogId": 76, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10066", "id": 10066}, "timeSpentSeconds": 6240, "billableSeconds": 6240, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:54Z", "updatedAt": "2021-04-15T18:38:54Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/76/work-attribute-values", "values": []}}, "emitted_at": 1667382541951} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/77", "tempoWorklogId": 77, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10066", "id": 10066}, "timeSpentSeconds": 10740, "billableSeconds": 10740, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:55Z", "updatedAt": "2021-04-15T18:38:54Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/77/work-attribute-values", "values": []}}, "emitted_at": 1667382541952} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/78", "tempoWorklogId": 78, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10060", "id": 10060}, "timeSpentSeconds": 7860, "billableSeconds": 7860, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:55Z", "updatedAt": "2021-04-15T18:38:55Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/78/work-attribute-values", "values": []}}, "emitted_at": 1667382541953} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/79", "tempoWorklogId": 79, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10060", "id": 10060}, "timeSpentSeconds": 4440, "billableSeconds": 4440, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:56Z", "updatedAt": "2021-04-15T18:38:56Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/79/work-attribute-values", "values": []}}, "emitted_at": 1667382541954} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/80", "tempoWorklogId": 80, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10058", "id": 10058}, "timeSpentSeconds": 4380, "billableSeconds": 4380, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:57Z", "updatedAt": "2021-04-15T18:38:56Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/80/work-attribute-values", "values": []}}, "emitted_at": 1667382541955} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/81", "tempoWorklogId": 81, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10056", "id": 10056}, "timeSpentSeconds": 5100, "billableSeconds": 5100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:38:57Z", "updatedAt": "2021-04-15T18:38:57Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/81/work-attribute-values", "values": []}}, "emitted_at": 1667382541956} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/82", "tempoWorklogId": 82, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10056", "id": 10056}, "timeSpentSeconds": 6960, "billableSeconds": 6960, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:38:58Z", "updatedAt": "2021-04-15T18:38:57Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/82/work-attribute-values", "values": []}}, "emitted_at": 1667382541956} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/83", "tempoWorklogId": 83, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10056", "id": 10056}, "timeSpentSeconds": 4860, "billableSeconds": 4860, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:38:59Z", "updatedAt": "2021-04-15T18:38:58Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/83/work-attribute-values", "values": []}}, "emitted_at": 1667382541957} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/84", "tempoWorklogId": 84, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10056", "id": 10056}, "timeSpentSeconds": 960, "billableSeconds": 960, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:38:59Z", "updatedAt": "2021-04-15T18:38:59Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/84/work-attribute-values", "values": []}}, "emitted_at": 1667382541959} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/85", "tempoWorklogId": 85, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10049", "id": 10049}, "timeSpentSeconds": 2640, "billableSeconds": 2640, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:00Z", "updatedAt": "2021-04-15T18:38:59Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/85/work-attribute-values", "values": []}}, "emitted_at": 1667382541959} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/86", "tempoWorklogId": 86, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10049", "id": 10049}, "timeSpentSeconds": 1980, "billableSeconds": 1980, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:00Z", "updatedAt": "2021-04-15T18:39:00Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/86/work-attribute-values", "values": []}}, "emitted_at": 1667382541960} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/87", "tempoWorklogId": 87, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10049", "id": 10049}, "timeSpentSeconds": 11940, "billableSeconds": 11940, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:01Z", "updatedAt": "2021-04-15T18:39:01Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/87/work-attribute-values", "values": []}}, "emitted_at": 1667382541961} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/88", "tempoWorklogId": 88, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10049", "id": 10049}, "timeSpentSeconds": 3000, "billableSeconds": 3000, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:02Z", "updatedAt": "2021-04-15T18:39:01Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/88/work-attribute-values", "values": []}}, "emitted_at": 1667382541962} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/89", "tempoWorklogId": 89, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10049", "id": 10049}, "timeSpentSeconds": 11700, "billableSeconds": 11700, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:39:02Z", "updatedAt": "2021-04-15T18:39:02Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/89/work-attribute-values", "values": []}}, "emitted_at": 1667382541963} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/90", "tempoWorklogId": 90, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10048", "id": 10048}, "timeSpentSeconds": 1980, "billableSeconds": 1980, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:03Z", "updatedAt": "2021-04-15T18:39:03Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/90/work-attribute-values", "values": []}}, "emitted_at": 1667382541964} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/91", "tempoWorklogId": 91, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10048", "id": 10048}, "timeSpentSeconds": 2580, "billableSeconds": 2580, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:04Z", "updatedAt": "2021-04-15T18:39:03Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/91/work-attribute-values", "values": []}}, "emitted_at": 1667382541965} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/92", "tempoWorklogId": 92, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10048", "id": 10048}, "timeSpentSeconds": 4920, "billableSeconds": 4920, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:04Z", "updatedAt": "2021-04-15T18:39:04Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/92/work-attribute-values", "values": []}}, "emitted_at": 1667382541966} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/93", "tempoWorklogId": 93, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10047", "id": 10047}, "timeSpentSeconds": 4740, "billableSeconds": 4740, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:05Z", "updatedAt": "2021-04-15T18:39:05Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/93/work-attribute-values", "values": []}}, "emitted_at": 1667382541967} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/94", "tempoWorklogId": 94, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10047", "id": 10047}, "timeSpentSeconds": 9720, "billableSeconds": 9720, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:06Z", "updatedAt": "2021-04-15T18:39:05Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/94/work-attribute-values", "values": []}}, "emitted_at": 1667382541968} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/95", "tempoWorklogId": 95, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10047", "id": 10047}, "timeSpentSeconds": 5100, "billableSeconds": 5100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:06Z", "updatedAt": "2021-04-15T18:39:06Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/95/work-attribute-values", "values": []}}, "emitted_at": 1667382541969} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/96", "tempoWorklogId": 96, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10047", "id": 10047}, "timeSpentSeconds": 960, "billableSeconds": 960, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:07Z", "updatedAt": "2021-04-15T18:39:07Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/96/work-attribute-values", "values": []}}, "emitted_at": 1667382541970} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/97", "tempoWorklogId": 97, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10047", "id": 10047}, "timeSpentSeconds": 5280, "billableSeconds": 5280, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:39:08Z", "updatedAt": "2021-04-15T18:39:08Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/97/work-attribute-values", "values": []}}, "emitted_at": 1667382541971} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/98", "tempoWorklogId": 98, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10045", "id": 10045}, "timeSpentSeconds": 5160, "billableSeconds": 5160, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:09Z", "updatedAt": "2021-04-15T18:39:08Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/98/work-attribute-values", "values": []}}, "emitted_at": 1667382541972} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/99", "tempoWorklogId": 99, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10045", "id": 10045}, "timeSpentSeconds": 1020, "billableSeconds": 1020, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:09Z", "updatedAt": "2021-04-15T18:39:09Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/99/work-attribute-values", "values": []}}, "emitted_at": 1667382541972} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/100", "tempoWorklogId": 100, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10041", "id": 10041}, "timeSpentSeconds": 11280, "billableSeconds": 11280, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:10Z", "updatedAt": "2021-04-15T18:39:10Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/100/work-attribute-values", "values": []}}, "emitted_at": 1667382541973} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/101", "tempoWorklogId": 101, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10041", "id": 10041}, "timeSpentSeconds": 5760, "billableSeconds": 5760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:11Z", "updatedAt": "2021-04-15T18:39:10Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/101/work-attribute-values", "values": []}}, "emitted_at": 1667382541974} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/102", "tempoWorklogId": 102, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10041", "id": 10041}, "timeSpentSeconds": 3720, "billableSeconds": 3720, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:11Z", "updatedAt": "2021-04-15T18:39:11Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/102/work-attribute-values", "values": []}}, "emitted_at": 1667382541975} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/103", "tempoWorklogId": 103, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10040", "id": 10040}, "timeSpentSeconds": 9420, "billableSeconds": 9420, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:12Z", "updatedAt": "2021-04-15T18:39:11Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/103/work-attribute-values", "values": []}}, "emitted_at": 1667382541976} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/104", "tempoWorklogId": 104, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10039", "id": 10039}, "timeSpentSeconds": 5340, "billableSeconds": 5340, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:13Z", "updatedAt": "2021-04-15T18:39:12Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/104/work-attribute-values", "values": []}}, "emitted_at": 1667382541977} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/105", "tempoWorklogId": 105, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10039", "id": 10039}, "timeSpentSeconds": 6540, "billableSeconds": 6540, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:13Z", "updatedAt": "2021-04-15T18:39:13Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/105/work-attribute-values", "values": []}}, "emitted_at": 1667382541978} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/106", "tempoWorklogId": 106, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10036", "id": 10036}, "timeSpentSeconds": 5700, "billableSeconds": 5700, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:14Z", "updatedAt": "2021-04-15T18:39:14Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/106/work-attribute-values", "values": []}}, "emitted_at": 1667382542236} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/107", "tempoWorklogId": 107, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10036", "id": 10036}, "timeSpentSeconds": 4860, "billableSeconds": 4860, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:15Z", "updatedAt": "2021-04-15T18:39:14Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/107/work-attribute-values", "values": []}}, "emitted_at": 1667382542237} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/108", "tempoWorklogId": 108, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10036", "id": 10036}, "timeSpentSeconds": 5340, "billableSeconds": 5340, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:15Z", "updatedAt": "2021-04-15T18:39:15Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/108/work-attribute-values", "values": []}}, "emitted_at": 1667382542238} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/109", "tempoWorklogId": 109, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10036", "id": 10036}, "timeSpentSeconds": 1800, "billableSeconds": 1800, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:16Z", "updatedAt": "2021-04-15T18:39:15Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/109/work-attribute-values", "values": []}}, "emitted_at": 1667382542239} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/110", "tempoWorklogId": 110, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10036", "id": 10036}, "timeSpentSeconds": 10440, "billableSeconds": 10440, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:39:16Z", "updatedAt": "2021-04-15T18:39:16Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/110/work-attribute-values", "values": []}}, "emitted_at": 1667382542240} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/111", "tempoWorklogId": 111, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10032", "id": 10032}, "timeSpentSeconds": 3180, "billableSeconds": 3180, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:17Z", "updatedAt": "2021-04-15T18:39:17Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/111/work-attribute-values", "values": []}}, "emitted_at": 1667382542241} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/112", "tempoWorklogId": 112, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10032", "id": 10032}, "timeSpentSeconds": 3120, "billableSeconds": 3120, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:18Z", "updatedAt": "2021-04-15T18:39:17Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/112/work-attribute-values", "values": []}}, "emitted_at": 1667382542242} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/113", "tempoWorklogId": 113, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10032", "id": 10032}, "timeSpentSeconds": 660, "billableSeconds": 660, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:18Z", "updatedAt": "2021-04-15T18:39:18Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/113/work-attribute-values", "values": []}}, "emitted_at": 1667382542243} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/114", "tempoWorklogId": 114, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10030", "id": 10030}, "timeSpentSeconds": 8460, "billableSeconds": 8460, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:19Z", "updatedAt": "2021-04-15T18:39:19Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/114/work-attribute-values", "values": []}}, "emitted_at": 1667382542243} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/115", "tempoWorklogId": 115, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10030", "id": 10030}, "timeSpentSeconds": 4260, "billableSeconds": 4260, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:20Z", "updatedAt": "2021-04-15T18:39:19Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/115/work-attribute-values", "values": []}}, "emitted_at": 1667382542244} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/116", "tempoWorklogId": 116, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10030", "id": 10030}, "timeSpentSeconds": 1800, "billableSeconds": 1800, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:20Z", "updatedAt": "2021-04-15T18:39:20Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/116/work-attribute-values", "values": []}}, "emitted_at": 1667382542245} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/117", "tempoWorklogId": 117, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10030", "id": 10030}, "timeSpentSeconds": 9480, "billableSeconds": 9480, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:21Z", "updatedAt": "2021-04-15T18:39:21Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/117/work-attribute-values", "values": []}}, "emitted_at": 1667382542246} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/118", "tempoWorklogId": 118, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10025", "id": 10025}, "timeSpentSeconds": 10380, "billableSeconds": 10380, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:22Z", "updatedAt": "2021-04-15T18:39:21Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/118/work-attribute-values", "values": []}}, "emitted_at": 1667382542247} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/119", "tempoWorklogId": 119, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10025", "id": 10025}, "timeSpentSeconds": 8100, "billableSeconds": 8100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:22Z", "updatedAt": "2021-04-15T18:39:22Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/119/work-attribute-values", "values": []}}, "emitted_at": 1667382542248} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/120", "tempoWorklogId": 120, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10025", "id": 10025}, "timeSpentSeconds": 1080, "billableSeconds": 1080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:23Z", "updatedAt": "2021-04-15T18:39:23Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/120/work-attribute-values", "values": []}}, "emitted_at": 1667382542249} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/121", "tempoWorklogId": 121, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10025", "id": 10025}, "timeSpentSeconds": 2460, "billableSeconds": 2460, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:24Z", "updatedAt": "2021-04-15T18:39:23Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/121/work-attribute-values", "values": []}}, "emitted_at": 1667382542250} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/122", "tempoWorklogId": 122, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10025", "id": 10025}, "timeSpentSeconds": 5460, "billableSeconds": 5460, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:39:24Z", "updatedAt": "2021-04-15T18:39:24Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/122/work-attribute-values", "values": []}}, "emitted_at": 1667382542251} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/123", "tempoWorklogId": 123, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10020", "id": 10020}, "timeSpentSeconds": 1860, "billableSeconds": 1860, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:25Z", "updatedAt": "2021-04-15T18:39:25Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/123/work-attribute-values", "values": []}}, "emitted_at": 1667382542252} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/124", "tempoWorklogId": 124, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10020", "id": 10020}, "timeSpentSeconds": 11220, "billableSeconds": 11220, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:26Z", "updatedAt": "2021-04-15T18:39:25Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/124/work-attribute-values", "values": []}}, "emitted_at": 1667382542253} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/125", "tempoWorklogId": 125, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10018", "id": 10018}, "timeSpentSeconds": 7440, "billableSeconds": 7440, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:26Z", "updatedAt": "2021-04-15T18:39:26Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/125/work-attribute-values", "values": []}}, "emitted_at": 1667382542254} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/126", "tempoWorklogId": 126, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10018", "id": 10018}, "timeSpentSeconds": 10920, "billableSeconds": 10920, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:27Z", "updatedAt": "2021-04-15T18:39:26Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/126/work-attribute-values", "values": []}}, "emitted_at": 1667382542255} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/127", "tempoWorklogId": 127, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10018", "id": 10018}, "timeSpentSeconds": 6360, "billableSeconds": 6360, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:27Z", "updatedAt": "2021-04-15T18:39:27Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/127/work-attribute-values", "values": []}}, "emitted_at": 1667382542255} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/128", "tempoWorklogId": 128, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10018", "id": 10018}, "timeSpentSeconds": 7620, "billableSeconds": 7620, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:28Z", "updatedAt": "2021-04-15T18:39:28Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/128/work-attribute-values", "values": []}}, "emitted_at": 1667382542256} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/129", "tempoWorklogId": 129, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10018", "id": 10018}, "timeSpentSeconds": 7980, "billableSeconds": 7980, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:39:29Z", "updatedAt": "2021-04-15T18:39:28Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/129/work-attribute-values", "values": []}}, "emitted_at": 1667382542257} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/130", "tempoWorklogId": 130, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10016", "id": 10016}, "timeSpentSeconds": 3420, "billableSeconds": 3420, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:29Z", "updatedAt": "2021-04-15T18:39:29Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/130/work-attribute-values", "values": []}}, "emitted_at": 1667382542258} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/131", "tempoWorklogId": 131, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10016", "id": 10016}, "timeSpentSeconds": 9540, "billableSeconds": 9540, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:31Z", "updatedAt": "2021-04-15T18:39:31Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/131/work-attribute-values", "values": []}}, "emitted_at": 1667382542259} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/132", "tempoWorklogId": 132, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10016", "id": 10016}, "timeSpentSeconds": 10320, "billableSeconds": 10320, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:32Z", "updatedAt": "2021-04-15T18:39:31Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/132/work-attribute-values", "values": []}}, "emitted_at": 1667382542260} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/133", "tempoWorklogId": 133, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10016", "id": 10016}, "timeSpentSeconds": 5460, "billableSeconds": 5460, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:32Z", "updatedAt": "2021-04-15T18:39:32Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/133/work-attribute-values", "values": []}}, "emitted_at": 1667382542261} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/134", "tempoWorklogId": 134, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10016", "id": 10016}, "timeSpentSeconds": 6360, "billableSeconds": 6360, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:39:33Z", "updatedAt": "2021-04-15T18:39:33Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/134/work-attribute-values", "values": []}}, "emitted_at": 1667382542262} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/135", "tempoWorklogId": 135, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10010", "id": 10010}, "timeSpentSeconds": 7800, "billableSeconds": 7800, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:34Z", "updatedAt": "2021-04-15T18:39:33Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/135/work-attribute-values", "values": []}}, "emitted_at": 1667382542263} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/136", "tempoWorklogId": 136, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10010", "id": 10010}, "timeSpentSeconds": 3420, "billableSeconds": 3420, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:34Z", "updatedAt": "2021-04-15T18:39:34Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/136/work-attribute-values", "values": []}}, "emitted_at": 1667382542264} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/137", "tempoWorklogId": 137, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10010", "id": 10010}, "timeSpentSeconds": 5940, "billableSeconds": 5940, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:35Z", "updatedAt": "2021-04-15T18:39:35Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/137/work-attribute-values", "values": []}}, "emitted_at": 1667382542265} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/138", "tempoWorklogId": 138, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10010", "id": 10010}, "timeSpentSeconds": 3780, "billableSeconds": 3780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:35Z", "updatedAt": "2021-04-15T18:39:35Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/138/work-attribute-values", "values": []}}, "emitted_at": 1667382542266} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/139", "tempoWorklogId": 139, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10006", "id": 10006}, "timeSpentSeconds": 2460, "billableSeconds": 2460, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:36Z", "updatedAt": "2021-04-15T18:39:36Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/139/work-attribute-values", "values": []}}, "emitted_at": 1667382542268} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/140", "tempoWorklogId": 140, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10006", "id": 10006}, "timeSpentSeconds": 7500, "billableSeconds": 7500, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:37Z", "updatedAt": "2021-04-15T18:39:36Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/140/work-attribute-values", "values": []}}, "emitted_at": 1667382542269} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/141", "tempoWorklogId": 141, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10005", "id": 10005}, "timeSpentSeconds": 6360, "billableSeconds": 6360, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:37Z", "updatedAt": "2021-04-15T18:39:37Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/141/work-attribute-values", "values": []}}, "emitted_at": 1667382542271} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/142", "tempoWorklogId": 142, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10005", "id": 10005}, "timeSpentSeconds": 10680, "billableSeconds": 10680, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:38Z", "updatedAt": "2021-04-15T18:39:38Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/142/work-attribute-values", "values": []}}, "emitted_at": 1667382542273} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/143", "tempoWorklogId": 143, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10005", "id": 10005}, "timeSpentSeconds": 6480, "billableSeconds": 6480, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:39Z", "updatedAt": "2021-04-15T18:39:38Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/143/work-attribute-values", "values": []}}, "emitted_at": 1667382542275} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/144", "tempoWorklogId": 144, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10004", "id": 10004}, "timeSpentSeconds": 10320, "billableSeconds": 10320, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:39Z", "updatedAt": "2021-04-15T18:39:39Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/144/work-attribute-values", "values": []}}, "emitted_at": 1667382542276} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/145", "tempoWorklogId": 145, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10004", "id": 10004}, "timeSpentSeconds": 5760, "billableSeconds": 5760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:40Z", "updatedAt": "2021-04-15T18:39:40Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/145/work-attribute-values", "values": []}}, "emitted_at": 1667382542277} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/146", "tempoWorklogId": 146, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10004", "id": 10004}, "timeSpentSeconds": 11580, "billableSeconds": 11580, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:41Z", "updatedAt": "2021-04-15T18:39:40Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/146/work-attribute-values", "values": []}}, "emitted_at": 1667382542278} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/147", "tempoWorklogId": 147, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10003", "id": 10003}, "timeSpentSeconds": 4020, "billableSeconds": 4020, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:41Z", "updatedAt": "2021-04-15T18:39:41Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/147/work-attribute-values", "values": []}}, "emitted_at": 1667382542279} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/148", "tempoWorklogId": 148, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10003", "id": 10003}, "timeSpentSeconds": 2760, "billableSeconds": 2760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:42Z", "updatedAt": "2021-04-15T18:39:41Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/148/work-attribute-values", "values": []}}, "emitted_at": 1667382542280} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/149", "tempoWorklogId": 149, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10003", "id": 10003}, "timeSpentSeconds": 8940, "billableSeconds": 8940, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:42Z", "updatedAt": "2021-04-15T18:39:42Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/149/work-attribute-values", "values": []}}, "emitted_at": 1667382542281} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/150", "tempoWorklogId": 150, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10002", "id": 10002}, "timeSpentSeconds": 2100, "billableSeconds": 2100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:43Z", "updatedAt": "2021-04-15T18:39:43Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/150/work-attribute-values", "values": []}}, "emitted_at": 1667382542282} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/151", "tempoWorklogId": 151, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10002", "id": 10002}, "timeSpentSeconds": 10260, "billableSeconds": 10260, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:44Z", "updatedAt": "2021-04-15T18:39:43Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/151/work-attribute-values", "values": []}}, "emitted_at": 1667382542283} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/152", "tempoWorklogId": 152, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10002", "id": 10002}, "timeSpentSeconds": 8340, "billableSeconds": 8340, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:44Z", "updatedAt": "2021-04-15T18:39:44Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/152/work-attribute-values", "values": []}}, "emitted_at": 1667382542284} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/153", "tempoWorklogId": 153, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10002", "id": 10002}, "timeSpentSeconds": 10920, "billableSeconds": 10920, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:45Z", "updatedAt": "2021-04-15T18:39:45Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/153/work-attribute-values", "values": []}}, "emitted_at": 1667382542284} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/154", "tempoWorklogId": 154, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10002", "id": 10002}, "timeSpentSeconds": 4860, "billableSeconds": 4860, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:39:46Z", "updatedAt": "2021-04-15T18:39:45Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/154/work-attribute-values", "values": []}}, "emitted_at": 1667382542285} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/155", "tempoWorklogId": 155, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10080", "id": 10080}, "timeSpentSeconds": 8460, "billableSeconds": 8460, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:46Z", "updatedAt": "2021-04-15T18:39:46Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/155/work-attribute-values", "values": []}}, "emitted_at": 1667382542286} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/156", "tempoWorklogId": 156, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10080", "id": 10080}, "timeSpentSeconds": 2220, "billableSeconds": 2220, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:47Z", "updatedAt": "2021-04-15T18:39:47Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/156/work-attribute-values", "values": []}}, "emitted_at": 1667382542693} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/157", "tempoWorklogId": 157, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10080", "id": 10080}, "timeSpentSeconds": 10200, "billableSeconds": 10200, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:48Z", "updatedAt": "2021-04-15T18:39:47Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/157/work-attribute-values", "values": []}}, "emitted_at": 1667382542697} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/158", "tempoWorklogId": 158, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10075", "id": 10075}, "timeSpentSeconds": 5280, "billableSeconds": 5280, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:48Z", "updatedAt": "2021-04-15T18:39:48Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/158/work-attribute-values", "values": []}}, "emitted_at": 1667382542701} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/159", "tempoWorklogId": 159, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10075", "id": 10075}, "timeSpentSeconds": 6900, "billableSeconds": 6900, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:49Z", "updatedAt": "2021-04-15T18:39:49Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/159/work-attribute-values", "values": []}}, "emitted_at": 1667382542705} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/160", "tempoWorklogId": 160, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10075", "id": 10075}, "timeSpentSeconds": 9720, "billableSeconds": 9720, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:49Z", "updatedAt": "2021-04-15T18:39:49Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/160/work-attribute-values", "values": []}}, "emitted_at": 1667382542708} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/161", "tempoWorklogId": 161, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10075", "id": 10075}, "timeSpentSeconds": 4980, "billableSeconds": 4980, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:50Z", "updatedAt": "2021-04-15T18:39:50Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/161/work-attribute-values", "values": []}}, "emitted_at": 1667382542711} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/162", "tempoWorklogId": 162, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10069", "id": 10069}, "timeSpentSeconds": 4080, "billableSeconds": 4080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:51Z", "updatedAt": "2021-04-15T18:39:50Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/162/work-attribute-values", "values": []}}, "emitted_at": 1667382542713} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/163", "tempoWorklogId": 163, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10069", "id": 10069}, "timeSpentSeconds": 720, "billableSeconds": 720, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:51Z", "updatedAt": "2021-04-15T18:39:51Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/163/work-attribute-values", "values": []}}, "emitted_at": 1667382542715} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/164", "tempoWorklogId": 164, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10069", "id": 10069}, "timeSpentSeconds": 9900, "billableSeconds": 9900, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:52Z", "updatedAt": "2021-04-15T18:39:52Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/164/work-attribute-values", "values": []}}, "emitted_at": 1667382542717} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/165", "tempoWorklogId": 165, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10069", "id": 10069}, "timeSpentSeconds": 3600, "billableSeconds": 3600, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:53Z", "updatedAt": "2021-04-15T18:39:52Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/165/work-attribute-values", "values": []}}, "emitted_at": 1667382542719} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/166", "tempoWorklogId": 166, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10065", "id": 10065}, "timeSpentSeconds": 10080, "billableSeconds": 10080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:53Z", "updatedAt": "2021-04-15T18:39:53Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/166/work-attribute-values", "values": []}}, "emitted_at": 1667382542721} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/167", "tempoWorklogId": 167, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10065", "id": 10065}, "timeSpentSeconds": 5580, "billableSeconds": 5580, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:39:54Z", "updatedAt": "2021-04-15T18:39:53Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/167/work-attribute-values", "values": []}}, "emitted_at": 1667382542723} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/168", "tempoWorklogId": 168, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10065", "id": 10065}, "timeSpentSeconds": 2760, "billableSeconds": 2760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:39:54Z", "updatedAt": "2021-04-15T18:39:54Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/168/work-attribute-values", "values": []}}, "emitted_at": 1667382542724} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/169", "tempoWorklogId": 169, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10065", "id": 10065}, "timeSpentSeconds": 5100, "billableSeconds": 5100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:39:55Z", "updatedAt": "2021-04-15T18:39:55Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/169/work-attribute-values", "values": []}}, "emitted_at": 1667382542726} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/170", "tempoWorklogId": 170, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10065", "id": 10065}, "timeSpentSeconds": 4680, "billableSeconds": 4680, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:39:56Z", "updatedAt": "2021-04-15T18:39:55Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/170/work-attribute-values", "values": []}}, "emitted_at": 1667382542727} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/171", "tempoWorklogId": 171, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10063", "id": 10063}, "timeSpentSeconds": 6480, "billableSeconds": 6480, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:56Z", "updatedAt": "2021-04-15T18:39:56Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/171/work-attribute-values", "values": []}}, "emitted_at": 1667382542728} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/172", "tempoWorklogId": 172, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10062", "id": 10062}, "timeSpentSeconds": 3720, "billableSeconds": 3720, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:59Z", "updatedAt": "2021-04-15T18:39:58Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/172/work-attribute-values", "values": []}}, "emitted_at": 1667382542730} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/173", "tempoWorklogId": 173, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10061", "id": 10061}, "timeSpentSeconds": 11220, "billableSeconds": 11220, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:39:59Z", "updatedAt": "2021-04-15T18:39:59Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/173/work-attribute-values", "values": []}}, "emitted_at": 1667382542731} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/174", "tempoWorklogId": 174, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10061", "id": 10061}, "timeSpentSeconds": 4800, "billableSeconds": 4800, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:00Z", "updatedAt": "2021-04-15T18:40:00Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/174/work-attribute-values", "values": []}}, "emitted_at": 1667382542732} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/175", "tempoWorklogId": 175, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10061", "id": 10061}, "timeSpentSeconds": 1140, "billableSeconds": 1140, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:01Z", "updatedAt": "2021-04-15T18:40:00Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/175/work-attribute-values", "values": []}}, "emitted_at": 1667382542733} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/176", "tempoWorklogId": 176, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10061", "id": 10061}, "timeSpentSeconds": 8820, "billableSeconds": 8820, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:01Z", "updatedAt": "2021-04-15T18:40:01Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/176/work-attribute-values", "values": []}}, "emitted_at": 1667382542734} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/177", "tempoWorklogId": 177, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10061", "id": 10061}, "timeSpentSeconds": 1800, "billableSeconds": 1800, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:40:02Z", "updatedAt": "2021-04-15T18:40:01Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/177/work-attribute-values", "values": []}}, "emitted_at": 1667382542735} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/178", "tempoWorklogId": 178, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10055", "id": 10055}, "timeSpentSeconds": 4380, "billableSeconds": 4380, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:03Z", "updatedAt": "2021-04-15T18:40:02Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/178/work-attribute-values", "values": []}}, "emitted_at": 1667382542737} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/179", "tempoWorklogId": 179, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10055", "id": 10055}, "timeSpentSeconds": 2100, "billableSeconds": 2100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:03Z", "updatedAt": "2021-04-15T18:40:03Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/179/work-attribute-values", "values": []}}, "emitted_at": 1667382542738} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/180", "tempoWorklogId": 180, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10051", "id": 10051}, "timeSpentSeconds": 11460, "billableSeconds": 11460, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:04Z", "updatedAt": "2021-04-15T18:40:04Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/180/work-attribute-values", "values": []}}, "emitted_at": 1667382542739} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/181", "tempoWorklogId": 181, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10051", "id": 10051}, "timeSpentSeconds": 3780, "billableSeconds": 3780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:05Z", "updatedAt": "2021-04-15T18:40:04Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/181/work-attribute-values", "values": []}}, "emitted_at": 1667382542740} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/182", "tempoWorklogId": 182, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10051", "id": 10051}, "timeSpentSeconds": 9120, "billableSeconds": 9120, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:05Z", "updatedAt": "2021-04-15T18:40:05Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/182/work-attribute-values", "values": []}}, "emitted_at": 1667382542741} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/183", "tempoWorklogId": 183, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10051", "id": 10051}, "timeSpentSeconds": 8940, "billableSeconds": 8940, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:06Z", "updatedAt": "2021-04-15T18:40:05Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/183/work-attribute-values", "values": []}}, "emitted_at": 1667382542742} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/184", "tempoWorklogId": 184, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10043", "id": 10043}, "timeSpentSeconds": 9660, "billableSeconds": 9660, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:06Z", "updatedAt": "2021-04-15T18:40:06Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/184/work-attribute-values", "values": []}}, "emitted_at": 1667382542743} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/185", "tempoWorklogId": 185, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10042", "id": 10042}, "timeSpentSeconds": 8760, "billableSeconds": 8760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:07Z", "updatedAt": "2021-04-15T18:40:07Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/185/work-attribute-values", "values": []}}, "emitted_at": 1667382542744} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/186", "tempoWorklogId": 186, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10037", "id": 10037}, "timeSpentSeconds": 4260, "billableSeconds": 4260, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:08Z", "updatedAt": "2021-04-15T18:40:07Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/186/work-attribute-values", "values": []}}, "emitted_at": 1667382542744} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/187", "tempoWorklogId": 187, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10037", "id": 10037}, "timeSpentSeconds": 4800, "billableSeconds": 4800, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:08Z", "updatedAt": "2021-04-15T18:40:08Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/187/work-attribute-values", "values": []}}, "emitted_at": 1667382542745} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/188", "tempoWorklogId": 188, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10029", "id": 10029}, "timeSpentSeconds": 5160, "billableSeconds": 5160, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:09Z", "updatedAt": "2021-04-15T18:40:09Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/188/work-attribute-values", "values": []}}, "emitted_at": 1667382542746} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/189", "tempoWorklogId": 189, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10029", "id": 10029}, "timeSpentSeconds": 5520, "billableSeconds": 5520, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:10Z", "updatedAt": "2021-04-15T18:40:09Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/189/work-attribute-values", "values": []}}, "emitted_at": 1667382542747} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/190", "tempoWorklogId": 190, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10029", "id": 10029}, "timeSpentSeconds": 4080, "billableSeconds": 4080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:10Z", "updatedAt": "2021-04-15T18:40:10Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/190/work-attribute-values", "values": []}}, "emitted_at": 1667382542748} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/191", "tempoWorklogId": 191, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10029", "id": 10029}, "timeSpentSeconds": 2580, "billableSeconds": 2580, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:11Z", "updatedAt": "2021-04-15T18:40:11Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/191/work-attribute-values", "values": []}}, "emitted_at": 1667382542749} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/192", "tempoWorklogId": 192, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10029", "id": 10029}, "timeSpentSeconds": 3120, "billableSeconds": 3120, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:40:12Z", "updatedAt": "2021-04-15T18:40:11Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/192/work-attribute-values", "values": []}}, "emitted_at": 1667382542750} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/193", "tempoWorklogId": 193, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10024", "id": 10024}, "timeSpentSeconds": 9720, "billableSeconds": 9720, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:12Z", "updatedAt": "2021-04-15T18:40:12Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/193/work-attribute-values", "values": []}}, "emitted_at": 1667382542751} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/194", "tempoWorklogId": 194, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10024", "id": 10024}, "timeSpentSeconds": 5280, "billableSeconds": 5280, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:13Z", "updatedAt": "2021-04-15T18:40:12Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/194/work-attribute-values", "values": []}}, "emitted_at": 1667382542752} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/195", "tempoWorklogId": 195, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10024", "id": 10024}, "timeSpentSeconds": 7080, "billableSeconds": 7080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:13Z", "updatedAt": "2021-04-15T18:40:13Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/195/work-attribute-values", "values": []}}, "emitted_at": 1667382542753} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/196", "tempoWorklogId": 196, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10021", "id": 10021}, "timeSpentSeconds": 6180, "billableSeconds": 6180, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:14Z", "updatedAt": "2021-04-15T18:40:14Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/196/work-attribute-values", "values": []}}, "emitted_at": 1667382542754} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/197", "tempoWorklogId": 197, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10021", "id": 10021}, "timeSpentSeconds": 4860, "billableSeconds": 4860, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:15Z", "updatedAt": "2021-04-15T18:40:14Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/197/work-attribute-values", "values": []}}, "emitted_at": 1667382542754} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/198", "tempoWorklogId": 198, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10019", "id": 10019}, "timeSpentSeconds": 11340, "billableSeconds": 11340, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:15Z", "updatedAt": "2021-04-15T18:40:15Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/198/work-attribute-values", "values": []}}, "emitted_at": 1667382542755} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/199", "tempoWorklogId": 199, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10017", "id": 10017}, "timeSpentSeconds": 3060, "billableSeconds": 3060, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:16Z", "updatedAt": "2021-04-15T18:40:16Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/199/work-attribute-values", "values": []}}, "emitted_at": 1667382542756} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/200", "tempoWorklogId": 200, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10017", "id": 10017}, "timeSpentSeconds": 4440, "billableSeconds": 4440, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:17Z", "updatedAt": "2021-04-15T18:40:16Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/200/work-attribute-values", "values": []}}, "emitted_at": 1667382542757} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/201", "tempoWorklogId": 201, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10013", "id": 10013}, "timeSpentSeconds": 780, "billableSeconds": 780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:17Z", "updatedAt": "2021-04-15T18:40:17Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/201/work-attribute-values", "values": []}}, "emitted_at": 1667382542758} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/202", "tempoWorklogId": 202, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10012", "id": 10012}, "timeSpentSeconds": 3780, "billableSeconds": 3780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:18Z", "updatedAt": "2021-04-15T18:40:17Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/202/work-attribute-values", "values": []}}, "emitted_at": 1667382542759} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/203", "tempoWorklogId": 203, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10012", "id": 10012}, "timeSpentSeconds": 900, "billableSeconds": 900, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:18Z", "updatedAt": "2021-04-15T18:40:18Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/203/work-attribute-values", "values": []}}, "emitted_at": 1667382542760} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/204", "tempoWorklogId": 204, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10009", "id": 10009}, "timeSpentSeconds": 3780, "billableSeconds": 3780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:19Z", "updatedAt": "2021-04-15T18:40:19Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/204/work-attribute-values", "values": []}}, "emitted_at": 1667382542760} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/205", "tempoWorklogId": 205, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10009", "id": 10009}, "timeSpentSeconds": 3300, "billableSeconds": 3300, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:20Z", "updatedAt": "2021-04-15T18:40:19Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/205/work-attribute-values", "values": []}}, "emitted_at": 1667382542761} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/206", "tempoWorklogId": 206, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10008", "id": 10008}, "timeSpentSeconds": 7440, "billableSeconds": 7440, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:20Z", "updatedAt": "2021-04-15T18:40:20Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/206/work-attribute-values", "values": []}}, "emitted_at": 1667382543035} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/207", "tempoWorklogId": 207, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10008", "id": 10008}, "timeSpentSeconds": 10740, "billableSeconds": 10740, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:21Z", "updatedAt": "2021-04-15T18:40:21Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/207/work-attribute-values", "values": []}}, "emitted_at": 1667382543036} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/208", "tempoWorklogId": 208, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10008", "id": 10008}, "timeSpentSeconds": 10680, "billableSeconds": 10680, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:22Z", "updatedAt": "2021-04-15T18:40:21Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/208/work-attribute-values", "values": []}}, "emitted_at": 1667382543036} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/209", "tempoWorklogId": 209, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10008", "id": 10008}, "timeSpentSeconds": 11820, "billableSeconds": 11820, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:22Z", "updatedAt": "2021-04-15T18:40:22Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/209/work-attribute-values", "values": []}}, "emitted_at": 1667382543037} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/210", "tempoWorklogId": 210, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10008", "id": 10008}, "timeSpentSeconds": 9780, "billableSeconds": 9780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:40:23Z", "updatedAt": "2021-04-15T18:40:22Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/210/work-attribute-values", "values": []}}, "emitted_at": 1667382543038} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/211", "tempoWorklogId": 211, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10007", "id": 10007}, "timeSpentSeconds": 7980, "billableSeconds": 7980, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:23Z", "updatedAt": "2021-04-15T18:40:23Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/211/work-attribute-values", "values": []}}, "emitted_at": 1667382543039} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/212", "tempoWorklogId": 212, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10007", "id": 10007}, "timeSpentSeconds": 1200, "billableSeconds": 1200, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:24Z", "updatedAt": "2021-04-15T18:40:24Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/212/work-attribute-values", "values": []}}, "emitted_at": 1667382543040} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/213", "tempoWorklogId": 213, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10007", "id": 10007}, "timeSpentSeconds": 10260, "billableSeconds": 10260, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:25Z", "updatedAt": "2021-04-15T18:40:24Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/213/work-attribute-values", "values": []}}, "emitted_at": 1667382543041} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/214", "tempoWorklogId": 214, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10007", "id": 10007}, "timeSpentSeconds": 1500, "billableSeconds": 1500, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:25Z", "updatedAt": "2021-04-15T18:40:25Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/214/work-attribute-values", "values": []}}, "emitted_at": 1667382543042} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/215", "tempoWorklogId": 215, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10007", "id": 10007}, "timeSpentSeconds": 11700, "billableSeconds": 11700, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:40:26Z", "updatedAt": "2021-04-15T18:40:25Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/215/work-attribute-values", "values": []}}, "emitted_at": 1667382543043} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/216", "tempoWorklogId": 216, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10001", "id": 10001}, "timeSpentSeconds": 8580, "billableSeconds": 8580, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:27Z", "updatedAt": "2021-04-15T18:40:26Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/216/work-attribute-values", "values": []}}, "emitted_at": 1667382543044} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/217", "tempoWorklogId": 217, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10000", "id": 10000}, "timeSpentSeconds": 9900, "billableSeconds": 9900, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:27Z", "updatedAt": "2021-04-15T18:40:27Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/217/work-attribute-values", "values": []}}, "emitted_at": 1667382543045} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/218", "tempoWorklogId": 218, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10000", "id": 10000}, "timeSpentSeconds": 8100, "billableSeconds": 8100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:28Z", "updatedAt": "2021-04-15T18:40:28Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/218/work-attribute-values", "values": []}}, "emitted_at": 1667382543046} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/219", "tempoWorklogId": 219, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10000", "id": 10000}, "timeSpentSeconds": 7740, "billableSeconds": 7740, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:29Z", "updatedAt": "2021-04-15T18:40:29Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/219/work-attribute-values", "values": []}}, "emitted_at": 1667382543047} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/220", "tempoWorklogId": 220, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10000", "id": 10000}, "timeSpentSeconds": 5460, "billableSeconds": 5460, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:29Z", "updatedAt": "2021-04-15T18:40:29Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/220/work-attribute-values", "values": []}}, "emitted_at": 1667382543047} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/221", "tempoWorklogId": 221, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10081", "id": 10081}, "timeSpentSeconds": 8100, "billableSeconds": 8100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:30Z", "updatedAt": "2021-04-15T18:40:30Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/221/work-attribute-values", "values": []}}, "emitted_at": 1667382543048} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/222", "tempoWorklogId": 222, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10081", "id": 10081}, "timeSpentSeconds": 3300, "billableSeconds": 3300, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:31Z", "updatedAt": "2021-04-15T18:40:30Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/222/work-attribute-values", "values": []}}, "emitted_at": 1667382543049} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/223", "tempoWorklogId": 223, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10081", "id": 10081}, "timeSpentSeconds": 1500, "billableSeconds": 1500, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:31Z", "updatedAt": "2021-04-15T18:40:31Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/223/work-attribute-values", "values": []}}, "emitted_at": 1667382543050} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/224", "tempoWorklogId": 224, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10081", "id": 10081}, "timeSpentSeconds": 5880, "billableSeconds": 5880, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:32Z", "updatedAt": "2021-04-15T18:40:32Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/224/work-attribute-values", "values": []}}, "emitted_at": 1667382543051} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/225", "tempoWorklogId": 225, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10078", "id": 10078}, "timeSpentSeconds": 11220, "billableSeconds": 11220, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:33Z", "updatedAt": "2021-04-15T18:40:32Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/225/work-attribute-values", "values": []}}, "emitted_at": 1667382543052} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/226", "tempoWorklogId": 226, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10078", "id": 10078}, "timeSpentSeconds": 1920, "billableSeconds": 1920, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:33Z", "updatedAt": "2021-04-15T18:40:33Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/226/work-attribute-values", "values": []}}, "emitted_at": 1667382543053} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/227", "tempoWorklogId": 227, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10078", "id": 10078}, "timeSpentSeconds": 6900, "billableSeconds": 6900, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:34Z", "updatedAt": "2021-04-15T18:40:33Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/227/work-attribute-values", "values": []}}, "emitted_at": 1667382543054} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/228", "tempoWorklogId": 228, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10071", "id": 10071}, "timeSpentSeconds": 6120, "billableSeconds": 6120, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:34Z", "updatedAt": "2021-04-15T18:40:34Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/228/work-attribute-values", "values": []}}, "emitted_at": 1667382543055} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/229", "tempoWorklogId": 229, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10071", "id": 10071}, "timeSpentSeconds": 4440, "billableSeconds": 4440, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:35Z", "updatedAt": "2021-04-15T18:40:35Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/229/work-attribute-values", "values": []}}, "emitted_at": 1667382543056} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/230", "tempoWorklogId": 230, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10067", "id": 10067}, "timeSpentSeconds": 720, "billableSeconds": 720, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:36Z", "updatedAt": "2021-04-15T18:40:35Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/230/work-attribute-values", "values": []}}, "emitted_at": 1667382543057} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/231", "tempoWorklogId": 231, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10067", "id": 10067}, "timeSpentSeconds": 2400, "billableSeconds": 2400, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:36Z", "updatedAt": "2021-04-15T18:40:36Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/231/work-attribute-values", "values": []}}, "emitted_at": 1667382543058} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/232", "tempoWorklogId": 232, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10067", "id": 10067}, "timeSpentSeconds": 3300, "billableSeconds": 3300, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:37Z", "updatedAt": "2021-04-15T18:40:36Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/232/work-attribute-values", "values": []}}, "emitted_at": 1667382543059} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/233", "tempoWorklogId": 233, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10067", "id": 10067}, "timeSpentSeconds": 1020, "billableSeconds": 1020, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:37Z", "updatedAt": "2021-04-15T18:40:37Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/233/work-attribute-values", "values": []}}, "emitted_at": 1667382543059} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/234", "tempoWorklogId": 234, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10067", "id": 10067}, "timeSpentSeconds": 10740, "billableSeconds": 10740, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:40:38Z", "updatedAt": "2021-04-15T18:40:38Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/234/work-attribute-values", "values": []}}, "emitted_at": 1667382543060} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/235", "tempoWorklogId": 235, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10057", "id": 10057}, "timeSpentSeconds": 3360, "billableSeconds": 3360, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:39Z", "updatedAt": "2021-04-15T18:40:38Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/235/work-attribute-values", "values": []}}, "emitted_at": 1667382543061} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/236", "tempoWorklogId": 236, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10057", "id": 10057}, "timeSpentSeconds": 2520, "billableSeconds": 2520, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:39Z", "updatedAt": "2021-04-15T18:40:39Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/236/work-attribute-values", "values": []}}, "emitted_at": 1667382543062} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/237", "tempoWorklogId": 237, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10057", "id": 10057}, "timeSpentSeconds": 2220, "billableSeconds": 2220, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:40Z", "updatedAt": "2021-04-15T18:40:39Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/237/work-attribute-values", "values": []}}, "emitted_at": 1667382543063} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/238", "tempoWorklogId": 238, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10057", "id": 10057}, "timeSpentSeconds": 6480, "billableSeconds": 6480, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:41Z", "updatedAt": "2021-04-15T18:40:40Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/238/work-attribute-values", "values": []}}, "emitted_at": 1667382543064} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/239", "tempoWorklogId": 239, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10054", "id": 10054}, "timeSpentSeconds": 11880, "billableSeconds": 11880, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:41Z", "updatedAt": "2021-04-15T18:40:41Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/239/work-attribute-values", "values": []}}, "emitted_at": 1667382543065} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/240", "tempoWorklogId": 240, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10054", "id": 10054}, "timeSpentSeconds": 7740, "billableSeconds": 7740, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:42Z", "updatedAt": "2021-04-15T18:40:41Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/240/work-attribute-values", "values": []}}, "emitted_at": 1667382543066} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/241", "tempoWorklogId": 241, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10054", "id": 10054}, "timeSpentSeconds": 1740, "billableSeconds": 1740, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:42Z", "updatedAt": "2021-04-15T18:40:42Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/241/work-attribute-values", "values": []}}, "emitted_at": 1667382543067} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/242", "tempoWorklogId": 242, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10054", "id": 10054}, "timeSpentSeconds": 1920, "billableSeconds": 1920, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:43Z", "updatedAt": "2021-04-15T18:40:42Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/242/work-attribute-values", "values": []}}, "emitted_at": 1667382543068} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/243", "tempoWorklogId": 243, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10053", "id": 10053}, "timeSpentSeconds": 11220, "billableSeconds": 11220, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:43Z", "updatedAt": "2021-04-15T18:40:43Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/243/work-attribute-values", "values": []}}, "emitted_at": 1667382543068} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/244", "tempoWorklogId": 244, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10053", "id": 10053}, "timeSpentSeconds": 1380, "billableSeconds": 1380, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:44Z", "updatedAt": "2021-04-15T18:40:44Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/244/work-attribute-values", "values": []}}, "emitted_at": 1667382543069} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/245", "tempoWorklogId": 245, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10053", "id": 10053}, "timeSpentSeconds": 7020, "billableSeconds": 7020, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:45Z", "updatedAt": "2021-04-15T18:40:44Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/245/work-attribute-values", "values": []}}, "emitted_at": 1667382543070} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/246", "tempoWorklogId": 246, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10053", "id": 10053}, "timeSpentSeconds": 2820, "billableSeconds": 2820, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:45Z", "updatedAt": "2021-04-15T18:40:45Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/246/work-attribute-values", "values": []}}, "emitted_at": 1667382543071} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/247", "tempoWorklogId": 247, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10053", "id": 10053}, "timeSpentSeconds": 1260, "billableSeconds": 1260, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 4", "createdAt": "2021-04-15T18:40:46Z", "updatedAt": "2021-04-15T18:40:46Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/247/work-attribute-values", "values": []}}, "emitted_at": 1667382543072} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/248", "tempoWorklogId": 248, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10052", "id": 10052}, "timeSpentSeconds": 3240, "billableSeconds": 3240, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:47Z", "updatedAt": "2021-04-15T18:40:46Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/248/work-attribute-values", "values": []}}, "emitted_at": 1667382543073} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/249", "tempoWorklogId": 249, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10052", "id": 10052}, "timeSpentSeconds": 7740, "billableSeconds": 7740, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:47Z", "updatedAt": "2021-04-15T18:40:47Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/249/work-attribute-values", "values": []}}, "emitted_at": 1667382543074} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/250", "tempoWorklogId": 250, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10052", "id": 10052}, "timeSpentSeconds": 3780, "billableSeconds": 3780, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:48Z", "updatedAt": "2021-04-15T18:40:47Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/250/work-attribute-values", "values": []}}, "emitted_at": 1667382543075} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/251", "tempoWorklogId": 251, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10044", "id": 10044}, "timeSpentSeconds": 10140, "billableSeconds": 10140, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:48Z", "updatedAt": "2021-04-15T18:40:48Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/251/work-attribute-values", "values": []}}, "emitted_at": 1667382543076} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/252", "tempoWorklogId": 252, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10044", "id": 10044}, "timeSpentSeconds": 8820, "billableSeconds": 8820, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:49Z", "updatedAt": "2021-04-15T18:40:49Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/252/work-attribute-values", "values": []}}, "emitted_at": 1667382543077} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/253", "tempoWorklogId": 253, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10044", "id": 10044}, "timeSpentSeconds": 3600, "billableSeconds": 3600, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:50Z", "updatedAt": "2021-04-15T18:40:49Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/253/work-attribute-values", "values": []}}, "emitted_at": 1667382543078} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/254", "tempoWorklogId": 254, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10044", "id": 10044}, "timeSpentSeconds": 8760, "billableSeconds": 8760, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:50Z", "updatedAt": "2021-04-15T18:40:50Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/254/work-attribute-values", "values": []}}, "emitted_at": 1667382543079} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/255", "tempoWorklogId": 255, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10038", "id": 10038}, "timeSpentSeconds": 7080, "billableSeconds": 7080, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:51Z", "updatedAt": "2021-04-15T18:40:51Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/255/work-attribute-values", "values": []}}, "emitted_at": 1667382543079} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/256", "tempoWorklogId": 256, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10038", "id": 10038}, "timeSpentSeconds": 8700, "billableSeconds": 8700, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:51Z", "updatedAt": "2021-04-15T18:40:51Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/256/work-attribute-values", "values": []}}, "emitted_at": 1667382543312} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/257", "tempoWorklogId": 257, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10035", "id": 10035}, "timeSpentSeconds": 8340, "billableSeconds": 8340, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:52Z", "updatedAt": "2021-04-15T18:40:52Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/257/work-attribute-values", "values": []}}, "emitted_at": 1667382543313} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/258", "tempoWorklogId": 258, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10035", "id": 10035}, "timeSpentSeconds": 6300, "billableSeconds": 6300, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:53Z", "updatedAt": "2021-04-15T18:40:52Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/258/work-attribute-values", "values": []}}, "emitted_at": 1667382543314} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/259", "tempoWorklogId": 259, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10035", "id": 10035}, "timeSpentSeconds": 9420, "billableSeconds": 9420, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:53Z", "updatedAt": "2021-04-15T18:40:53Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/259/work-attribute-values", "values": []}}, "emitted_at": 1667382543315} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/260", "tempoWorklogId": 260, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10034", "id": 10034}, "timeSpentSeconds": 11100, "billableSeconds": 11100, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:54Z", "updatedAt": "2021-04-15T18:40:54Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/260/work-attribute-values", "values": []}}, "emitted_at": 1667382543316} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/261", "tempoWorklogId": 261, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10031", "id": 10031}, "timeSpentSeconds": 3300, "billableSeconds": 3300, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:55Z", "updatedAt": "2021-04-15T18:40:54Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/261/work-attribute-values", "values": []}}, "emitted_at": 1667382543317} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/262", "tempoWorklogId": 262, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10031", "id": 10031}, "timeSpentSeconds": 5520, "billableSeconds": 5520, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:55Z", "updatedAt": "2021-04-15T18:40:55Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/262/work-attribute-values", "values": []}}, "emitted_at": 1667382543318} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/263", "tempoWorklogId": 263, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10026", "id": 10026}, "timeSpentSeconds": 9540, "billableSeconds": 9540, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 0", "createdAt": "2021-04-15T18:40:56Z", "updatedAt": "2021-04-15T18:40:56Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/263/work-attribute-values", "values": []}}, "emitted_at": 1667382543319} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/264", "tempoWorklogId": 264, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10026", "id": 10026}, "timeSpentSeconds": 4260, "billableSeconds": 4260, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 1", "createdAt": "2021-04-15T18:40:57Z", "updatedAt": "2021-04-15T18:40:56Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/264/work-attribute-values", "values": []}}, "emitted_at": 1667382543320} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/265", "tempoWorklogId": 265, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10026", "id": 10026}, "timeSpentSeconds": 4500, "billableSeconds": 4500, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 2", "createdAt": "2021-04-15T18:40:57Z", "updatedAt": "2021-04-15T18:40:57Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/265/work-attribute-values", "values": []}}, "emitted_at": 1667382543320} +{"stream": "worklogs", "data": {"self": "https://api.tempo.io/4/worklogs/266", "tempoWorklogId": 266, "issue": {"self": "https://airbyteio.atlassian.net/rest/api/2/issue/10026", "id": 10026}, "timeSpentSeconds": 5280, "billableSeconds": 5280, "startDate": "2021-04-14", "startTime": "18:48:52", "description": "I did some work here. 3", "createdAt": "2021-04-15T18:40:58Z", "updatedAt": "2021-04-15T18:40:57Z", "author": {"self": "https://airbyteio.atlassian.net/rest/api/2/user?accountId=5fc9e78d2730d800760becc4", "accountId": "5fc9e78d2730d800760becc4"}, "attributes": {"self": "https://api.tempo.io/4/worklogs/266/work-attribute-values", "values": []}}, "emitted_at": 1667382543321} diff --git a/airbyte-integrations/connectors/source-tempo/setup.py b/airbyte-integrations/connectors/source-tempo/setup.py index 1258ccc7e9331..979bef06a5a8b 100644 --- a/airbyte-integrations/connectors/source-tempo/setup.py +++ b/airbyte-integrations/connectors/source-tempo/setup.py @@ -6,8 +6,7 @@ from setuptools import find_packages, setup MAIN_REQUIREMENTS = [ - "airbyte-cdk~=0.1", - "requests==2.25.1", + "airbyte-cdk", ] TEST_REQUIREMENTS = [ diff --git a/airbyte-integrations/connectors/source-tempo/source_tempo/client.py b/airbyte-integrations/connectors/source-tempo/source_tempo/client.py deleted file mode 100644 index 1b4f1c1fc01b3..0000000000000 --- a/airbyte-integrations/connectors/source-tempo/source_tempo/client.py +++ /dev/null @@ -1,65 +0,0 @@ -# -# Copyright (c) 2022 Airbyte, Inc., all rights reserved. -# - - -from functools import partial -from json import JSONDecodeError -from typing import Mapping, Tuple - -import requests -from airbyte_cdk.sources.deprecated.client import BaseClient -from requests.exceptions import ConnectionError - - -class Client(BaseClient): - """ - Tempo API Reference: https://tempo-io.github.io/tempo-api-docs/ - """ - - API_VERSION = 3 - DEFAULT_ITEMS_PER_PAGE = 100 - - PARAMS = {"limit": DEFAULT_ITEMS_PER_PAGE, "offset": 0} - ENTITIES_MAP = { - "accounts": {"url": "/accounts", "func": lambda v: v["results"], "paginated": False, "params": PARAMS}, - "customers": {"url": "/customers", "func": lambda v: v["results"], "paginated": False, "params": PARAMS}, - "worklogs": {"url": "/worklogs", "func": lambda v: v["results"], "paginated": True, "params": PARAMS}, - "workload-schemes": {"url": "/workload-schemes", "func": lambda v: v["results"], "paginated": True, "params": PARAMS}, - } - - def __init__(self, api_token): - self.headers = {"Authorization": "Bearer " + api_token} - self.base_api_url = f"https://api.tempo.io/core/{self.API_VERSION}" - super().__init__() - - def lists(self, name, url, params, func, **kwargs): - while True: - response = requests.get(f"{self.base_api_url}{url}?limit={params['limit']}&offset={params['offset']}", headers=self.headers) - data = func(response.json()) - yield from data - if not self.ENTITIES_MAP[name]["paginated"] or len(data) < self.DEFAULT_ITEMS_PER_PAGE: - break - params["offset"] += self.DEFAULT_ITEMS_PER_PAGE - - def _enumerate_methods(self) -> Mapping[str, callable]: - return {entity: partial(self.lists, name=entity, **value) for entity, value in self.ENTITIES_MAP.items()} - - def health_check(self) -> Tuple[bool, str]: - alive = True - error_msg = None - # must be implemented later - - try: - next(self.lists(name="workload-schemes", **self.ENTITIES_MAP["workload-schemes"])) - - except ConnectionError as error: - alive, error_msg = False, str(error) - # If the input domain is incorrect or doesn't exist, then the response would be empty, resulting in a JSONDecodeError - except JSONDecodeError: - alive, error_msg = ( - False, - "Unable to connect to the Tempo API with the provided credentials. Please make sure the input credentials and environment are correct.", - ) - - return alive, error_msg diff --git a/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/accounts.json b/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/accounts.json index 8a9b88d9b10c8..895424951fb9f 100644 --- a/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/accounts.json +++ b/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/accounts.json @@ -21,19 +21,16 @@ "type": "boolean" }, "monthlyBudget": { - "type": "integer" + "type": ["null", "integer"] }, "lead": { "type": "object", "properties": { "self": { - "type": "string" + "type": ["null", "string"] }, "accountId": { "type": "string" - }, - "displayName": { - "type": "string" } } }, @@ -46,9 +43,6 @@ "accountId": { "type": "string" }, - "displayName": { - "type": "string" - }, "type": { "type": "string" } @@ -64,18 +58,23 @@ "type": "string" }, "id": { - "type": "string" + "type": "integer" }, "name": { "type": "string" }, "type": { - "type": "string" + "type": "object", + "properties": { + "name": { + "type": "string" + } + } } } }, "customer": { - "type": "object", + "type": ["null", "object"], "properties": { "self": { "type": "string" @@ -84,7 +83,7 @@ "type": "string" }, "id": { - "type": "string" + "type": "integer" }, "name": { "type": "string" diff --git a/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/workload-schemes.json b/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/workload-schemes.json index f3e0998fb52ef..e9de873eef588 100644 --- a/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/workload-schemes.json +++ b/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/workload-schemes.json @@ -37,12 +37,15 @@ "type": "array", "description": "The days of the tempo Workload Scheme.", "readOnly": true, - "properties": { - "day": { - "type": "string" - }, - "requiredSeconds": { - "type": "string" + "items": { + "type": "object", + "properties": { + "day": { + "type": "string" + }, + "requiredSeconds": { + "type": "integer" + } } } } diff --git a/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/worklogs.json b/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/worklogs.json index 67909045461f0..d057029b56176 100644 --- a/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/worklogs.json +++ b/airbyte-integrations/connectors/source-tempo/source_tempo/schemas/worklogs.json @@ -13,14 +13,17 @@ "description": "The ID of the tempo worklog.", "readOnly": true }, - "jiraWorklogId": { - "type": "integer", - "description": "The ID of the jira worklog.", - "readOnly": true - }, "issue": { "type": "object", "description": "Details of the issue", + "properties": { + "id": { + "type": "integer" + }, + "self": { + "type": "string" + } + }, "readOnly": true }, "timeSpentSeconds": { @@ -34,12 +37,12 @@ "readOnly": true }, "startTime": { - "type": "string", + "type": ["null", "string"], "description": "Start time of the worklog", "readOnly": true }, "description": { - "type": "string", + "type": ["null", "string"], "description": "Description of the worklog", "readOnly": true }, @@ -56,10 +59,37 @@ "author": { "type": "object", "description": "Author of the worklog", + "properties": { + "accountId": { + "type": "string" + }, + "self": { + "type": ["null", "string"] + } + }, "readOnly": true }, "attributes": { "type": "object", + "properties": { + "self": { + "type": "string" + }, + "values": { + "type": ["null", "array"], + "items": { + "type": "object", + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": ["null", "string"] + } + } + } + } + }, "description": "Additional attribute of the worklog", "readOnly": true }, diff --git a/airbyte-integrations/connectors/source-tempo/source_tempo/source.py b/airbyte-integrations/connectors/source-tempo/source_tempo/source.py index e6535976f8088..3b744d6ad807c 100644 --- a/airbyte-integrations/connectors/source-tempo/source_tempo/source.py +++ b/airbyte-integrations/connectors/source-tempo/source_tempo/source.py @@ -2,11 +2,16 @@ # Copyright (c) 2022 Airbyte, Inc., all rights reserved. # +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -from airbyte_cdk.sources.deprecated.base_source import BaseSource +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. +WARNING: Do not modify this file. +""" -from .client import Client - -class SourceTempo(BaseSource): - client_class = Client +# Declarative Source +class SourceTempo(YamlDeclarativeSource): + def __init__(self): + super().__init__(path_to_yaml="tempo.yaml") diff --git a/airbyte-integrations/connectors/source-tempo/source_tempo/tempo.yaml b/airbyte-integrations/connectors/source-tempo/source_tempo/tempo.yaml new file mode 100644 index 0000000000000..a1384d3b322d9 --- /dev/null +++ b/airbyte-integrations/connectors/source-tempo/source_tempo/tempo.yaml @@ -0,0 +1,101 @@ +version: "0.1.0" + +definitions: + selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_pointer: ["results"] + requester: + type: HttpRequester + name: "{{ options['name'] }}" + url_base: "https://api.tempo.io/4/" + http_method: "GET" + error_handler: + type: CompositeErrorHandler + # ignore 403 error but retry default retriable http errors (429, 500 - 600) + error_handlers: + - type: DefaultErrorHandler + response_filters: + - http_codes: [403] + action: IGNORE + - type: DefaultErrorHandler + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_token'] }}" + retriever: + type: SimpleRetriever + name: "{{ options['name'] }}" + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response['metadata']['next'] }}" + stop_condition: "{{ 'next' not in response['metadata'] }}" + page_size: 50 + page_size_option: + field_name: "limit" + inject_into: "request_parameter" + page_token_option: + inject_into: "path" + url_base: "*ref(definitions.requester.url_base)" + base_stream: + primary_key: "id" + retriever: + $ref: "*ref(definitions.retriever)" + requester: + $ref: "*ref(definitions.requester)" + accounts_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "accounts" + path: "accounts" + customers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "customers" + path: "customers" + worklogs_stream: + $ref: "*ref(definitions.base_stream)" + stream_cursor_field: "startDate" + retriever: + $ref: "*ref(definitions.retriever)" + requester: "*ref(definitions.requester)" + stream_slicer: + cursor_field: "startDate" + datetime_format: "%Y-%m-%d" + start_datetime: + datetime: "2020-01-01" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ today_utc() }}" + datetime_format: "%Y-%m-%d" + step: "1w" + end_time_option: + field_name: "to" + inject_into: "request_parameter" + start_time_option: + field_name: "from" + inject_into: "request_parameter" + type: DatetimeStreamSlicer + $options: + name: "worklogs" + path: "worklogs" + primary_key: "tempoWorklogId" + workload_schemes_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "workload-schemes" + path: "workload-schemes" + +streams: + - "*ref(definitions.accounts_stream)" + - "*ref(definitions.customers_stream)" + - "*ref(definitions.worklogs_stream)" + - "*ref(definitions.workload_schemes_stream)" + +check: + type: CheckStream + stream_names: ["workload-schemes"] diff --git a/airbyte-integrations/connectors/source-the-guardian-api/.dockerignore b/airbyte-integrations/connectors/source-the-guardian-api/.dockerignore new file mode 100644 index 0000000000000..b63aa473d142b --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_the_guardian_api +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-the-guardian-api/Dockerfile b/airbyte-integrations/connectors/source-the-guardian-api/Dockerfile new file mode 100644 index 0000000000000..97385698dc009 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_the_guardian_api ./source_the_guardian_api + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-the-guardian-api diff --git a/airbyte-integrations/connectors/source-the-guardian-api/README.md b/airbyte-integrations/connectors/source-the-guardian-api/README.md new file mode 100644 index 0000000000000..4bc60225dafd1 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/README.md @@ -0,0 +1,79 @@ +# The Guardian Api Source + +This is the repository for the The Guardian Api configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/the-guardian-api). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-the-guardian-api:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/the-guardian-api) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_the_guardian_api/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source the-guardian-api test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-the-guardian-api:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-the-guardian-api:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-the-guardian-api:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-the-guardian-api:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-the-guardian-api:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-the-guardian-api:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-the-guardian-api:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-the-guardian-api:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-the-guardian-api/__init__.py b/airbyte-integrations/connectors/source-the-guardian-api/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-config.yml b/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-config.yml new file mode 100644 index 0000000000000..0648031a84aa2 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-config.yml @@ -0,0 +1,30 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-the-guardian-api:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_the_guardian_api/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-the-guardian-api/bootstrap.md b/airbyte-integrations/connectors/source-the-guardian-api/bootstrap.md new file mode 100644 index 0000000000000..816bdb38ab47c --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/bootstrap.md @@ -0,0 +1,46 @@ +# The Guardian API + +## Overview + +[The Guardian Open Platform](https://open-platform.theguardian.com/) is a public web service for accessing all the content the Guardian creates, categorised by tags and section. To get started, You need a key to successfully authenticate against the API. The Guardian API Connector is implemented with the [Airbyte Low-Code CDK](https://docs.airbyte.com/connector-development/config-based/low-code-cdk-overview). + +## Output Format + +#### Each content item has the following structure:- + +```yaml +{ + "id": "string", + "type": "string" + "sectionId": "string" + "sectionName": "string" + "webPublicationDate": "string" + "webTitle": "string" + "webUrl": "string" + "apiUrl": "string" + "isHosted": "boolean" + "pillarId": "string" + "pillarName": "string" +} +``` + +**Description:-** + +**webPublicationDate**: The combined date and time of publication +**webUrl**: The URL of the html content +**apiUrl**: The URL of the raw content + +## Core Streams + +Connector supports the `content` stream that returns all pieces of content in the API. + +## Rate Limiting + +The key that you are assigned is rate-limited and as such any applications that depend on making large numbers of requests on a polling basis are likely to exceed their daily quota and thus be prevented from making further requests until the next period begins. + +## Authentication and Permissions + +To access the API, you will need to sign up for an API key, which should be sent with every request. Visit [this](https://open-platform.theguardian.com/access) link to get an API key. +The easiest way to see what data is included is to explore the data. You can build complex queries quickly and browse the results. Visit [this](https://open-platform.theguardian.com/explore) link to explore the data. + +See [this](https://docs.airbyte.io/integrations/sources/the-guardian-api) link for the connector docs. diff --git a/airbyte-integrations/connectors/source-the-guardian-api/build.gradle b/airbyte-integrations/connectors/source-the-guardian-api/build.gradle new file mode 100644 index 0000000000000..7148bffa332d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_the_guardian_api' +} diff --git a/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/__init__.py b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..39645fa011f22 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "content": { + "webPublicationDate": "2123-10-31T10:10:10Z" + } +} diff --git a/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..23bee6c786302 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "content", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..8cab379396176 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/invalid_config.json @@ -0,0 +1,5 @@ +{ + "api_key": "", + "query": "water OR rain", + "start_date": "2022-10-25" +} diff --git a/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/sample_config.json new file mode 100644 index 0000000000000..eb40df2dabcab --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/sample_config.json @@ -0,0 +1,5 @@ +{ + "api_key": "", + "query": "water OR rain OR thunder", + "start_date": "2022-10-25" +} diff --git a/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/sample_state.json new file mode 100644 index 0000000000000..aba11a25ab694 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "content": { + "webPublicationDate": "2022-10-25T10:10:10Z" + } +} diff --git a/airbyte-integrations/connectors/source-the-guardian-api/main.py b/airbyte-integrations/connectors/source-the-guardian-api/main.py new file mode 100644 index 0000000000000..dfcf2b6a88c5e --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_the_guardian_api import SourceTheGuardianApi + +if __name__ == "__main__": + source = SourceTheGuardianApi() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-the-guardian-api/requirements.txt b/airbyte-integrations/connectors/source-the-guardian-api/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-the-guardian-api/setup.py b/airbyte-integrations/connectors/source-the-guardian-api/setup.py new file mode 100644 index 0000000000000..3f15ab9cf2522 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_the_guardian_api", + description="Source implementation for The Guardian Api.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/__init__.py b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/__init__.py new file mode 100644 index 0000000000000..68d4ea3908649 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceTheGuardianApi + +__all__ = ["SourceTheGuardianApi"] diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/custom_page_strategy.py b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/custom_page_strategy.py new file mode 100644 index 0000000000000..e8c02d9529be7 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/custom_page_strategy.py @@ -0,0 +1,32 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any, List, Mapping, Optional + +import requests +from airbyte_cdk.sources.declarative.requesters.paginators.strategies.page_increment import PageIncrement + + +@dataclass +class CustomPageIncrement(PageIncrement): + """ + Starts page from 1 instead of the default value that is 0. Stops Pagination when currentPage is equal to totalPages. + """ + + def next_page_token(self, response: requests.Response, last_records: List[Mapping[str, Any]]) -> Optional[Any]: + res = response.json().get("response") + currPage = res.get("currentPage") + totalPages = res.get("pages") + if currPage < totalPages: + self._page += 1 + return self._page + else: + return None + + def __post_init__(self, options: Mapping[str, Any]): + self._page = 1 + + def reset(self): + self._page = 1 diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/schemas/content.json b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/schemas/content.json new file mode 100644 index 0000000000000..3751793358c4f --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/schemas/content.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "sectionId": { + "type": "string" + }, + "sectionName": { + "type": "string" + }, + "webPublicationDate": { + "type": "string" + }, + "webTitle": { + "type": "string" + }, + "webUrl": { + "type": "string" + }, + "apiUrl": { + "type": "string" + }, + "isHosted": { + "type": "boolean" + }, + "pillarId": { + "type": "string" + }, + "pillarName": { + "type": "string" + } + }, + "required": [ + "id", + "type", + "sectionId", + "sectionName", + "webPublicationDate", + "webTitle", + "webUrl", + "apiUrl", + "isHosted", + "pillarId", + "pillarName" + ] +} diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/source.py b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/source.py new file mode 100644 index 0000000000000..19ae06a52dae8 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceTheGuardianApi(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "the_guardian_api.yaml"}) diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/spec.yaml b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/spec.yaml new file mode 100644 index 0000000000000..b9e0e1f3a7671 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/spec.yaml @@ -0,0 +1,54 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/the-guardian-api +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: The Guardian Api Spec + type: object + required: + - api_key + - start_date + additionalProperties: true + properties: + api_key: + title: API Key + type: string + description: Your API Key. See here. The key is case sensitive. + airbyte_secret: true + start_date: + title: Start Date + type: string + description: Use this to set the minimum date (YYYY-MM-DD) of the results. Results older than the start_date will not be shown. + pattern: ^([1-9][0-9]{3})\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01])$ + examples: + - YYYY-MM-DD + query: + title: Query + type: string + description: (Optional) The query (q) parameter filters the results to only those that include that search term. The q parameter supports AND, OR and NOT operators. + examples: + - environment AND NOT water + - environment AND political + - amusement park + - political + tag: + title: Tag + type: string + description: (Optional) A tag is a piece of data that is used by The Guardian to categorise content. Use this parameter to filter results by showing only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation. + examples: + - environment/recycling + - environment/plasticbags + - environment/energyefficiency + section: + title: Section + type: string + description: (Optional) Use this to filter the results by a particular section. See here for a list of all sections, and here for the sections endpoint documentation. + examples: + - media + - technology + - housing-network + end_date: + title: End Date + type: string + description: (Optional) Use this to set the maximum date (YYYY-MM-DD) of the results. Results newer than the end_date will not be shown. Default is set to the current date (today) for incremental syncs. + pattern: ^([1-9][0-9]{3})\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01])$ + examples: + - YYYY-MM-DD diff --git a/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/the_guardian_api.yaml b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/the_guardian_api.yaml new file mode 100644 index 0000000000000..83a87e1d49cd0 --- /dev/null +++ b/airbyte-integrations/connectors/source-the-guardian-api/source_the_guardian_api/the_guardian_api.yaml @@ -0,0 +1,74 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: + - response + - results + requester: + url_base: "https://content.guardianapis.com" + http_method: "GET" + request_options_provider: + request_parameters: + api-key: "{{ config['api_key'] }}" + q: "{{ config['query'] }}" + tag: "{{ config['tag'] }}" + section: "{{ config['section'] }}" + order-by: "oldest" + # from-date: "{{ config['start_date'] }}" + # to-date: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d') }}" + stream_slicer: + type: "DatetimeStreamSlicer" + start_datetime: + datetime: "{{ config['start_date'] }}" + datetime_format: "%Y-%m-%d" + end_datetime: + datetime: "{{ config['end_date'] or now_utc().strftime('%Y-%m-%d') }}" + datetime_format: "%Y-%m-%d" + step: "7d" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_field: "{{ options['stream_cursor_field'] }}" + start_time_option: + field_name: "from-date" + inject_into: "request_parameter" + end_time_option: + field_name: "to-date" + inject_into: "request_parameter" + + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: "DefaultPaginator" + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + class_name: "source_the_guardian_api.custom_page_strategy.CustomPageIncrement" + page_size: 10 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + page_size_option: + inject_into: "body_data" + field_name: "page_size" + requester: + $ref: "*ref(definitions.requester)" + stream_slicer: + $ref: "*ref(definitions.stream_slicer)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + content_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "content" + primary_key: "id" + path: "/search" + stream_cursor_field: "webPublicationDate" + +streams: + - "*ref(definitions.content_stream)" + +check: + stream_names: + - "content" diff --git a/airbyte-integrations/connectors/source-tidb/acceptance-test-config.yml b/airbyte-integrations/connectors/source-tidb/acceptance-test-config.yml new file mode 100644 index 0000000000000..193e2c9541998 --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/acceptance-test-config.yml @@ -0,0 +1,7 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-tidb:dev +tests: + spec: + - spec_path: "src/test-integration/resources/expected_spec.json" + config_path: "src/test-integration/resources/dummy_config.json" diff --git a/airbyte-integrations/connectors/source-tidb/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-tidb/acceptance-test-docker.sh new file mode 100644 index 0000000000000..ba0ab2874b989 --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2):dev + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-tidb/build.gradle b/airbyte-integrations/connectors/source-tidb/build.gradle old mode 100755 new mode 100644 index 7676d78d77afd..6f1001121a8d1 --- a/airbyte-integrations/connectors/source-tidb/build.gradle +++ b/airbyte-integrations/connectors/source-tidb/build.gradle @@ -2,6 +2,7 @@ plugins { id 'application' id 'airbyte-docker' id 'airbyte-integration-test-java' + id 'airbyte-source-acceptance-test' } application { @@ -33,4 +34,4 @@ dependencies { implementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) integrationTestJavaImplementation files(project(':airbyte-integrations:bases:base-java').airbyteDocker.outputs) -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-tidb/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-tidb/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/dummy_config.json b/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/dummy_config.json new file mode 100644 index 0000000000000..560e553333780 --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/dummy_config.json @@ -0,0 +1,6 @@ +{ + "host": "default", + "port": 5555, + "database": "default", + "username": "default" +} diff --git a/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/expected_spec.json b/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/expected_spec.json new file mode 100644 index 0000000000000..395e18282687d --- /dev/null +++ b/airbyte-integrations/connectors/source-tidb/src/test-integration/resources/expected_spec.json @@ -0,0 +1,176 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/tidb", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "TiDB Source Spec", + "type": "object", + "required": ["host", "port", "database", "username"], + "properties": { + "host": { + "description": "Hostname of the database.", + "title": "Host", + "type": "string", + "order": 0 + }, + "port": { + "description": "Port of the database.", + "title": "Port", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 4000, + "examples": ["4000"], + "order": 1 + }, + "database": { + "description": "Name of the database.", + "title": "Database", + "type": "string", + "order": 2 + }, + "username": { + "description": "Username to use to access the database.", + "title": "Username", + "type": "string", + "order": 3 + }, + "password": { + "description": "Password associated with the username.", + "title": "Password", + "type": "string", + "airbyte_secret": true, + "order": 4 + }, + "jdbc_url_params": { + "description": "Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)", + "title": "JDBC URL Params", + "type": "string", + "order": 5 + }, + "ssl": { + "title": "SSL Connection", + "description": "Encrypt data using SSL.", + "type": "boolean", + "default": false, + "order": 6 + }, + "tunnel_method": { + "type": "object", + "title": "SSH Tunnel Method", + "description": "Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.", + "oneOf": [ + { + "title": "No Tunnel", + "required": ["tunnel_method"], + "properties": { + "tunnel_method": { + "description": "No ssh tunnel needed to connect to database", + "type": "string", + "const": "NO_TUNNEL", + "order": 0 + } + } + }, + { + "title": "SSH Key Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "ssh_key" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and ssh key", + "type": "string", + "const": "SSH_KEY_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host.", + "type": "string", + "order": 3 + }, + "ssh_key": { + "title": "SSH Private Key", + "description": "OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )", + "type": "string", + "airbyte_secret": true, + "multiline": true, + "order": 4 + } + } + }, + { + "title": "Password Authentication", + "required": [ + "tunnel_method", + "tunnel_host", + "tunnel_port", + "tunnel_user", + "tunnel_user_password" + ], + "properties": { + "tunnel_method": { + "description": "Connect through a jump server tunnel host using username and password authentication", + "type": "string", + "const": "SSH_PASSWORD_AUTH", + "order": 0 + }, + "tunnel_host": { + "title": "SSH Tunnel Jump Server Host", + "description": "Hostname of the jump server host that allows inbound ssh tunnel.", + "type": "string", + "order": 1 + }, + "tunnel_port": { + "title": "SSH Connection Port", + "description": "Port on the proxy/jump server that accepts inbound ssh connections.", + "type": "integer", + "minimum": 0, + "maximum": 65536, + "default": 22, + "examples": ["22"], + "order": 2 + }, + "tunnel_user": { + "title": "SSH Login Username", + "description": "OS-level username for logging into the jump server host", + "type": "string", + "order": 3 + }, + "tunnel_user_password": { + "title": "Password", + "description": "OS-level password for logging into the jump server host", + "type": "string", + "airbyte_secret": true, + "order": 4 + } + } + } + ] + } + } + }, + "supportsNormalization": false, + "supportsDBT": false, + "supported_destination_sync_modes": [] +} diff --git a/airbyte-integrations/connectors/source-tmdb/.dockerignore b/airbyte-integrations/connectors/source-tmdb/.dockerignore new file mode 100644 index 0000000000000..a8550643f9a2d --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_tmdb +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-tmdb/Dockerfile b/airbyte-integrations/connectors/source-tmdb/Dockerfile new file mode 100644 index 0000000000000..38dccf4f61f2a --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_tmdb ./source_tmdb + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-tmdb diff --git a/airbyte-integrations/connectors/source-tmdb/README.md b/airbyte-integrations/connectors/source-tmdb/README.md new file mode 100644 index 0000000000000..a13681405f9a3 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/README.md @@ -0,0 +1,104 @@ +# Tmdb Source + +This is the repository for the Tmdb configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/tmdb). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt + +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-tmdb:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/tmdb) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_tmdb/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source tmdb test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-tmdb:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-tmdb:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-tmdb:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-tmdb:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-tmdb:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-tmdb:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-tmdb:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-tmdb:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-tmdb/__init__.py b/airbyte-integrations/connectors/source-tmdb/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-tmdb/acceptance-test-config.yml b/airbyte-integrations/connectors/source-tmdb/acceptance-test-config.yml new file mode 100644 index 0000000000000..f1d7a04303672 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-tmdb:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_tmdb/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-tmdb/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-tmdb/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-tmdb/bootstrap.md b/airbyte-integrations/connectors/source-tmdb/bootstrap.md new file mode 100644 index 0000000000000..c88370a864353 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/bootstrap.md @@ -0,0 +1,35 @@ +# TMDb + +The connector uses the v3 API documented here: https://developers.themoviedb.org/3/getting-started/introduction. It is +straightforward HTTP REST API with API Authentication. + +## API key + +Api key is mandate for this connector to work. It could be generated using a free account at TMDb. Visit: https://www.themoviedb.org/settings/api + +## Implementation details + +## Setup guide + +### Step 1: Set up TMDb connection + +- Have an API key by generating personal API key (Example: 12345) +- A movie ID, or query could be configured in config.json (Not Mandate, Default movie _id would be 550 and query would be marvel) +- See sample_config.json for more details + +## Step 2: Generate schema for the endpoint + +### Custom schema is generated and tested with different IDs + +## Step 3: Spec, Secrets, and connector yaml files are configured with reference to the Airbyte documentation. + +## In a nutshell: + +1. Navigate to the Airbyte Open Source dashboard. +2. Set the name for your source. +3. Enter your `api_key`. +5. Enter params `movie_id, query, language` (if needed). +6. Click **Set up source**. + + * We use only GET methods, all streams are straightforward. + diff --git a/airbyte-integrations/connectors/source-tmdb/build.gradle b/airbyte-integrations/connectors/source-tmdb/build.gradle new file mode 100644 index 0000000000000..c92a8722601e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_tmdb' +} diff --git a/airbyte-integrations/connectors/source-tmdb/integration_tests/__init__.py b/airbyte-integrations/connectors/source-tmdb/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-tmdb/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-tmdb/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-tmdb/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-tmdb/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-tmdb/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..6258861321b93 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/integration_tests/configured_catalog.json @@ -0,0 +1,292 @@ +{ + "streams": [ + { + "stream": { + "name": "certification_movie", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "certification_tv", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "changes_movie", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "changes_tv", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "changes_person", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_details", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_alternative_titles", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_credits", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_external_ids", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_images", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_keywords", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_latest", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_lists", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_now_playing", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_popular", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_recommendations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_releases_dates", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_reviews", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_similar_movies", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_top_rated", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_translations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_upcoming", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_videos", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "movies_watch_providers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "trending", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "search_companies", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "search_collections", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "search_keywords", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "search_movies", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "search_multi", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "search_people", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "search_tv_shows", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-tmdb/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-tmdb/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..8ffef2b9a48a0 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "api_key": "", + "movie_id": "", + "query": "", + "language": "" +} diff --git a/airbyte-integrations/connectors/source-tmdb/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-tmdb/integration_tests/sample_config.json new file mode 100644 index 0000000000000..059ab7f6fbe17 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "api_key": "", + "movie_id": "550", + "query": "Marvel", + "language": "" +} diff --git a/airbyte-integrations/connectors/source-tmdb/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-tmdb/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/main.py b/airbyte-integrations/connectors/source-tmdb/main.py new file mode 100644 index 0000000000000..a6b8fe5dd5e4b --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_tmdb import SourceTmdb + +if __name__ == "__main__": + source = SourceTmdb() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-tmdb/requirements.txt b/airbyte-integrations/connectors/source-tmdb/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-tmdb/setup.py b/airbyte-integrations/connectors/source-tmdb/setup.py new file mode 100644 index 0000000000000..87b62e457a442 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_tmdb", + description="Source implementation for Tmdb.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/__init__.py b/airbyte-integrations/connectors/source-tmdb/source_tmdb/__init__.py new file mode 100644 index 0000000000000..9ce4b7b1c09e9 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceTmdb + +__all__ = ["SourceTmdb"] diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/certification_movie.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/certification_movie.json new file mode 100644 index 0000000000000..cee425a104603 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/certification_movie.json @@ -0,0 +1,607 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667208363.json", + "title": "Root", + "type": "object", + "required": ["certifications"], + "properties": { + "certifications": { + "$id": "#root/certifications", + "title": "Certifications", + "type": "object", + "required": [ + "US", + "CA", + "AU", + "DE", + "FR", + "NZ", + "IN", + "GB", + "NL", + "BR", + "FI", + "BG", + "ES", + "PH", + "PT" + ], + "properties": { + "US": { + "$id": "#root/certifications/US", + "title": "Us", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/US/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/US/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["G"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/US/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "All ages admitted. There is no content that would be objectionable to most parents. This is one of only two ratings dating back to 1968 that still exists today." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/US/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "CA": { + "$id": "#root/certifications/CA", + "title": "Ca", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/CA/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/CA/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["18A"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/CA/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "Persons under 18 years of age must be accompanied by an adult. In the Maritimes & Manitoba, children under the age of 14 are prohibited from viewing the film." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/CA/items/order", + "title": "Order", + "type": "integer", + "examples": [4], + "default": 0 + } + } + } + }, + "AU": { + "$id": "#root/certifications/AU", + "title": "Au", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/AU/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/AU/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["E"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/AU/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "Exempt from classification. Films that are exempt from classification must not contain contentious material (i.e. material that would ordinarily be rated M or higher)." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/AU/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "DE": { + "$id": "#root/certifications/DE", + "title": "De", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/DE/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/DE/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["0"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/DE/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["No age restriction."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/DE/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "FR": { + "$id": "#root/certifications/FR", + "title": "Fr", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/FR/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/FR/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["U"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/FR/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["(Tous publics) valid for all audiences."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/FR/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "NZ": { + "$id": "#root/certifications/NZ", + "title": "Nz", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/NZ/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/NZ/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["M"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/NZ/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "Suitable for (but not restricted to) mature audiences 16 years and up." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/NZ/items/order", + "title": "Order", + "type": "integer", + "examples": [3], + "default": 0 + } + } + } + }, + "IN": { + "$id": "#root/certifications/IN", + "title": "In", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/IN/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/IN/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["U"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/IN/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "Unrestricted Public Exhibition throughout India, suitable for all age groups. Films under this category should not upset children over 4. Such films may contain educational, social or family-oriented themes. Films under this category may also contain fantasy violence and/or mild bad language." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/IN/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + }, + "GB": { + "$id": "#root/certifications/GB", + "title": "Gb", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/GB/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/GB/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["15"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/GB/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "Only those over 15 years are admitted. Nobody younger than 15 can rent or buy a 15-rated VHS, DVD, Blu-ray Disc, UMD or game, or watch a film in the cinema with this rating. Films under this category can contain adult themes, hard drugs, frequent strong language and limited use of very strong language, strong violence and strong sex references, and nudity without graphic detail. Sexual activity may be portrayed but without any strong detail. Sexual violence may be shown if discreet and justified by context." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/GB/items/order", + "title": "Order", + "type": "integer", + "examples": [5], + "default": 0 + } + } + } + }, + "NL": { + "$id": "#root/certifications/NL", + "title": "Nl", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/NL/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/NL/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["AL"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/NL/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["All ages."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/NL/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "BR": { + "$id": "#root/certifications/BR", + "title": "Br", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/BR/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/BR/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["L"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/BR/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "General Audiences. Do not expose children to potentially harmful content." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/BR/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "FI": { + "$id": "#root/certifications/FI", + "title": "Fi", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/FI/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/FI/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["S"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/FI/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["For all ages."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/FI/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "BG": { + "$id": "#root/certifications/BG", + "title": "Bg", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/BG/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/BG/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["A"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/BG/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["Recommended for children."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/BG/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "ES": { + "$id": "#root/certifications/ES", + "title": "Es", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/ES/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/ES/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["APTA"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/ES/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["General admission."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/ES/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "PH": { + "$id": "#root/certifications/PH", + "title": "Ph", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/PH/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/PH/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["G"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/PH/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "General Audiences. Viewers of all ages are admitted." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/PH/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "PT": { + "$id": "#root/certifications/PT", + "title": "Pt", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/PT/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/PT/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["Públicos"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/PT/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "For all the public (especially designed for children under 3 years of age)." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/PT/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/certification_tv.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/certification_tv.json new file mode 100644 index 0000000000000..747281523ccd7 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/certification_tv.json @@ -0,0 +1,398 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667208548.json", + "title": "Root", + "type": "object", + "required": ["certifications"], + "properties": { + "certifications": { + "$id": "#root/certifications", + "title": "Certifications", + "type": "object", + "required": ["RU", "US", "CA", "AU", "FR", "DE", "TH", "KR", "GB", "BR"], + "properties": { + "RU": { + "$id": "#root/certifications/RU", + "title": "Ru", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/RU/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/RU/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["18+"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/RU/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["Restricted to People 18 or Older."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/RU/items/order", + "title": "Order", + "type": "integer", + "examples": [5], + "default": 0 + } + } + } + }, + "US": { + "$id": "#root/certifications/US", + "title": "Us", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/US/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/US/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["NR"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/US/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["No rating information."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/US/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + }, + "CA": { + "$id": "#root/certifications/CA", + "title": "Ca", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/CA/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/CA/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["Exempt"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/CA/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "Shows which are exempt from ratings (such as news and sports programming) will not display an on-screen rating at all." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/CA/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + }, + "AU": { + "$id": "#root/certifications/AU", + "title": "Au", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/AU/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/AU/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["P"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/AU/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "Programming is intended for younger children 2–11; commercial stations must show at least 30 minutes of P-rated content each weekday and weekends at all times. No advertisements may be shown during P-rated programs." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/AU/items/order", + "title": "Order", + "type": "integer", + "examples": [1], + "default": 0 + } + } + } + }, + "FR": { + "$id": "#root/certifications/FR", + "title": "Fr", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/FR/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/FR/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["NR"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/FR/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["No rating information."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/FR/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + }, + "DE": { + "$id": "#root/certifications/DE", + "title": "De", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/DE/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/DE/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["0"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/DE/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["Can be aired at any time."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/DE/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + }, + "TH": { + "$id": "#root/certifications/TH", + "title": "Th", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/TH/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/TH/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["ส"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/TH/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "Sor - Educational movies which the public should be encouraged to see." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/TH/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + }, + "KR": { + "$id": "#root/certifications/KR", + "title": "Kr", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/KR/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/KR/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["Exempt"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/KR/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "This rating is only for knowledge based game shows; lifestyle shows; documentary shows; news; current topic discussion shows; education/culture shows; sports that excludes MMA or other violent sports; and other programs that Korea Communications Standards Commission recognizes." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/KR/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + }, + "GB": { + "$id": "#root/certifications/GB", + "title": "Gb", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/GB/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/GB/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["U"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/GB/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": [ + "The U symbol stands for Universal. A U film should be suitable for audiences aged four years and over." + ], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/GB/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + }, + "BR": { + "$id": "#root/certifications/BR", + "title": "Br", + "type": "array", + "default": [], + "items": { + "$id": "#root/certifications/BR/items", + "title": "Items", + "type": "object", + "required": ["certification", "meaning", "order"], + "properties": { + "certification": { + "$id": "#root/certifications/BR/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "examples": ["L"], + "pattern": "^.*$" + }, + "meaning": { + "$id": "#root/certifications/BR/items/meaning", + "title": "Meaning", + "type": "string", + "default": "", + "examples": ["Content is suitable for all audiences."], + "pattern": "^.*$" + }, + "order": { + "$id": "#root/certifications/BR/items/order", + "title": "Order", + "type": "integer", + "examples": [0], + "default": 0 + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_movie.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_movie.json new file mode 100644 index 0000000000000..304c2ef408935 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_movie.json @@ -0,0 +1,59 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667208732.json", + "title": "Root", + "type": "object", + "required": ["results", "page", "total_pages", "total_results"], + "properties": { + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "required": ["id", "adult"], + "properties": { + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "examples": [412683], + "default": 0 + }, + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": ["null", "boolean"], + "examples": [false], + "default": true + } + } + } + }, + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "examples": [1], + "default": 0 + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "examples": [10], + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "examples": [952], + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_person.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_person.json new file mode 100644 index 0000000000000..502820ab85e22 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_person.json @@ -0,0 +1,59 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667210538.json", + "title": "Root", + "type": "object", + "required": ["results", "page", "total_pages", "total_results"], + "properties": { + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "required": ["id", "adult"], + "properties": { + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "examples": [1670120], + "default": 0 + }, + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": ["null", "boolean"], + "examples": [false], + "default": true + } + } + } + }, + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "examples": [1], + "default": 0 + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "examples": [7], + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "examples": [620], + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_tv.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_tv.json new file mode 100644 index 0000000000000..c69374a964af8 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/changes_tv.json @@ -0,0 +1,59 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667208773.json", + "title": "Root", + "type": "object", + "required": ["results", "page", "total_pages", "total_results"], + "properties": { + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "required": ["id", "adult"], + "properties": { + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "examples": [67563], + "default": 0 + }, + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": ["null", "boolean"], + "examples": [false], + "default": true + } + } + } + }, + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "examples": [1], + "default": 0 + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "examples": [2], + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "examples": [122], + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_alternative_titles.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_alternative_titles.json new file mode 100644 index 0000000000000..fd8dcc1f142df --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_alternative_titles.json @@ -0,0 +1,49 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666884919.json", + "title": "Root", + "type": "object", + "properties": { + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "titles": { + "$id": "#root/titles", + "title": "Titles", + "type": ["array", "null", "object"], + "default": [], + "items": { + "$id": "#root/titles/items", + "title": "Items", + "type": ["array", "null", "object"], + "properties": { + "iso_3166_1": { + "$id": "#root/titles/items/iso_3166_1", + "title": "Iso_3166_1", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/titles/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "type": { + "$id": "#root/titles/items/type", + "title": "Type", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_changes.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_changes.json new file mode 100644 index 0000000000000..be71c6b7dfa08 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_changes.json @@ -0,0 +1,85 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1668092844.json", + "title": "Root", + "type": "object", + "properties": { + "key": { + "$id": "#root/changes/items/key", + "title": "Key", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "items": { + "$id": "#root/changes/items/items", + "title": "Items", + "type": "array", + "default": [], + "items": { + "$id": "#root/changes/items/items/items", + "title": "Items", + "type": "object", + "properties": { + "id": { + "$id": "#root/changes/items/items/items/id", + "title": "Id", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "action": { + "$id": "#root/changes/items/items/items/action", + "title": "Action", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "time": { + "$id": "#root/changes/items/items/items/time", + "title": "Time", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "iso_639_1": { + "$id": "#root/changes/items/items/items/iso_639_1", + "title": "Iso_639_1", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "iso_3166_1": { + "$id": "#root/changes/items/items/items/iso_3166_1", + "title": "Iso_3166_1", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "value": { + "$id": "#root/changes/items/items/items/value", + "title": "Value", + "type": "object", + "properties": { + "poster": { + "$id": "#root/changes/items/items/items/value/poster", + "title": "Poster", + "type": "object", + "properties": { + "file_path": { + "$id": "#root/changes/items/items/items/value/poster/file_path", + "title": "File_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + } + } + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_credits.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_credits.json new file mode 100644 index 0000000000000..610312ca8df1d --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_credits.json @@ -0,0 +1,192 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666852792.json", + "title": "Root", + "type": "object", + "properties": { + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "cast": { + "$id": "#root/cast", + "title": "Cast", + "type": "array", + "default": [], + "items": { + "$id": "#root/cast/items", + "title": "Items", + "type": "object", + "properties": { + "adult": { + "$id": "#root/cast/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "gender": { + "$id": "#root/cast/items/gender", + "title": "Gender", + "type": "integer", + "default": 0 + }, + "id": { + "$id": "#root/cast/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "known_for_department": { + "$id": "#root/cast/items/known_for_department", + "title": "Known_for_department", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "name": { + "$id": "#root/cast/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_name": { + "$id": "#root/cast/items/original_name", + "title": "Original_name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/cast/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "profile_path": { + "$id": "#root/cast/items/profile_path", + "title": "Profile_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "cast_id": { + "$id": "#root/cast/items/cast_id", + "title": "Cast_id", + "type": "integer", + "default": 0 + }, + "character": { + "$id": "#root/cast/items/character", + "title": "Character", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "credit_id": { + "$id": "#root/cast/items/credit_id", + "title": "Credit_id", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "order": { + "$id": "#root/cast/items/order", + "title": "Order", + "type": "integer", + "default": 0 + } + } + } + }, + "crew": { + "$id": "#root/crew", + "title": "Crew", + "type": "array", + "default": [], + "items": { + "$id": "#root/crew/items", + "title": "Items", + "type": "object", + "properties": { + "adult": { + "$id": "#root/crew/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "gender": { + "$id": "#root/crew/items/gender", + "title": "Gender", + "type": "integer", + "default": 0 + }, + "id": { + "$id": "#root/crew/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "known_for_department": { + "$id": "#root/crew/items/known_for_department", + "title": "Known_for_department", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "name": { + "$id": "#root/crew/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_name": { + "$id": "#root/crew/items/original_name", + "title": "Original_name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/crew/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "profile_path": { + "$id": "#root/crew/items/profile_path", + "title": "Profile_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "credit_id": { + "$id": "#root/crew/items/credit_id", + "title": "Credit_id", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "department": { + "$id": "#root/crew/items/department", + "title": "Department", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "job": { + "$id": "#root/crew/items/job", + "title": "Job", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_details.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_details.json new file mode 100644 index 0000000000000..952d7122cf75c --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_details.json @@ -0,0 +1,265 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666852195.json", + "title": "Root", + "type": "object", + "properties": { + "adult": { + "$id": "#root/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "backdrop_path": { + "$id": "#root/backdrop_path", + "title": "Backdrop_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "belongs_to_collection": { + "$id": "#root/belongs_to_collection", + "title": "Belongs_to_collection", + "type": ["null", "object"], + "default": null + }, + "budget": { + "$id": "#root/budget", + "title": "Budget", + "type": "integer", + "default": 0 + }, + "genres": { + "$id": "#root/genres", + "title": "Genres", + "type": ["null", "array"], + "default": [], + "items": { + "$id": "#root/genres/items", + "title": "Items", + "type": "object", + "properties": { + "id": { + "$id": "#root/genres/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "name": { + "$id": "#root/genres/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "homepage": { + "$id": "#root/homepage", + "title": "Homepage", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "imdb_id": { + "$id": "#root/imdb_id", + "title": "Imdb_id", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_language": { + "$id": "#root/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_title": { + "$id": "#root/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "overview": { + "$id": "#root/overview", + "title": "Overview", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "poster_path": { + "$id": "#root/poster_path", + "title": "Poster_path", + "type": ["string", "null"], + "default": null + }, + "production_companies": { + "$id": "#root/production_companies", + "title": "Production_companies", + "type": ["null", "array"], + "default": [], + "items": { + "$id": "#root/production_companies/items", + "title": "Items", + "type": "object", + "properties": { + "id": { + "$id": "#root/production_companies/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/production_companies/items/logo_path", + "title": "Logo_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "name": { + "$id": "#root/production_companies/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "origin_country": { + "$id": "#root/production_companies/items/origin_country", + "title": "Origin_country", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "production_countries": { + "$id": "#root/production_countries", + "title": "Production_countries", + "type": ["null", "array"], + "default": [], + "items": { + "$id": "#root/production_countries/items", + "title": "Items", + "type": "object", + "properties": { + "iso_3166_1": { + "$id": "#root/production_countries/items/iso_3166_1", + "title": "Iso_3166_1", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "name": { + "$id": "#root/production_countries/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "release_date": { + "$id": "#root/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "revenue": { + "$id": "#root/revenue", + "title": "Revenue", + "type": "integer", + "default": 0 + }, + "runtime": { + "$id": "#root/runtime", + "title": "Runtime", + "type": ["null", "integer"], + "default": 0 + }, + "spoken_languages": { + "$id": "#root/spoken_languages", + "title": "Spoken_languages", + "type": ["null", "array"], + "default": [], + "items": { + "$id": "#root/spoken_languages/items", + "title": "Items", + "type": "object", + "properties": { + "iso_639_1": { + "$id": "#root/spoken_languages/items/iso_639_1", + "title": "Iso_639_1", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "name": { + "$id": "#root/spoken_languages/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "status": { + "$id": "#root/status", + "title": "Status", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "tagline": { + "$id": "#root/tagline", + "title": "Tagline", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "video": { + "$id": "#root/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/vote_average", + "title": "Vote_average", + "type": "number", + "default": 0.0 + }, + "vote_count": { + "$id": "#root/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_external_ids.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_external_ids.json new file mode 100644 index 0000000000000..fd4484d863d40 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_external_ids.json @@ -0,0 +1,43 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666852857.json", + "title": "Root", + "type": "object", + "properties": { + "imdb_id": { + "$id": "#root/imdb_id", + "title": "Imdb_id", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "facebook_id": { + "$id": "#root/facebook_id", + "title": "Facebook_id", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "instagram_id": { + "$id": "#root/instagram_id", + "title": "Instagram_id", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "twitter_id": { + "$id": "#root/twitter_id", + "title": "Twitter_id", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_images.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_images.json new file mode 100644 index 0000000000000..326d7af979b3f --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_images.json @@ -0,0 +1,128 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666852902.json", + "title": "Root", + "type": "object", + "properties": { + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "backdrops": { + "$id": "#root/backdrops", + "title": "Backdrops", + "type": "array", + "default": [], + "items": { + "$id": "#root/backdrops/items", + "title": "Items", + "type": "object", + "properties": { + "aspect_ratio": { + "$id": "#root/backdrops/items/aspect_ratio", + "title": "Aspect_ratio", + "type": "number", + "default": 0.0 + }, + "file_path": { + "$id": "#root/backdrops/items/file_path", + "title": "File_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "height": { + "$id": "#root/backdrops/items/height", + "title": "Height", + "type": "integer", + "default": 0 + }, + "iso_639_1": { + "$id": "#root/backdrops/items/iso_639_1", + "title": "Iso_639_1", + "type": ["string", "null"], + "default": null + }, + "vote_average": { + "$id": "#root/backdrops/items/vote_average", + "title": "Vote_average", + "type": ["number", "integer", "string"], + "default": 0 + }, + "vote_count": { + "$id": "#root/backdrops/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "width": { + "$id": "#root/backdrops/items/width", + "title": "Width", + "type": "integer", + "default": 0 + } + } + } + }, + "posters": { + "$id": "#root/posters", + "title": "Posters", + "type": "array", + "default": [], + "items": { + "$id": "#root/posters/items", + "title": "Items", + "type": "object", + "properties": { + "aspect_ratio": { + "$id": "#root/posters/items/aspect_ratio", + "title": "Aspect_ratio", + "type": "number", + "default": 0.0 + }, + "file_path": { + "$id": "#root/posters/items/file_path", + "title": "File_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "height": { + "$id": "#root/posters/items/height", + "title": "Height", + "type": "integer", + "default": 0 + }, + "iso_639_1": { + "$id": "#root/posters/items/iso_639_1", + "title": "Iso_639_1", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "vote_average": { + "$id": "#root/posters/items/vote_average", + "title": "Vote_average", + "type": ["number", "integer", "string"], + "default": 0 + }, + "vote_count": { + "$id": "#root/posters/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "width": { + "$id": "#root/posters/items/width", + "title": "Width", + "type": "integer", + "default": 0 + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_keywords.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_keywords.json new file mode 100644 index 0000000000000..83e8980715d1f --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_keywords.json @@ -0,0 +1,41 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666852923.json", + "title": "Root", + "type": "object", + "properties": { + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "keywords": { + "$id": "#root/keywords", + "title": "Keywords", + "type": "array", + "default": [], + "items": { + "$id": "#root/keywords/items", + "title": "Items", + "type": "object", + "properties": { + "id": { + "$id": "#root/keywords/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "name": { + "$id": "#root/keywords/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_latest.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_latest.json new file mode 100644 index 0000000000000..bf2a00a01644d --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_latest.json @@ -0,0 +1,189 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666853268.json", + "title": "Root", + "type": "object", + "properties": { + "adult": { + "$id": "#root/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "backdrop_path": { + "$id": "#root/backdrop_path", + "title": "Backdrop_path", + "type": ["string", "null"], + "default": null + }, + "belongs_to_collection": { + "$id": "#root/belongs_to_collection", + "title": "Belongs_to_collection", + "type": ["string", "null"], + "default": null + }, + "budget": { + "$id": "#root/budget", + "title": "Budget", + "type": "integer", + "default": 0 + }, + "genres": { + "$id": "#root/genres", + "title": "Genres", + "type": "array", + "default": [], + "items": { + "$id": "#root/genres/items", + "title": "Items", + "type": "object", + "properties": { + "id": { + "$id": "#root/genres/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "name": { + "$id": "#root/genres/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "homepage": { + "$id": "#root/homepage", + "title": "Homepage", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "imdb_id": { + "$id": "#root/imdb_id", + "title": "Imdb_id", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "original_language": { + "$id": "#root/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_title": { + "$id": "#root/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "overview": { + "$id": "#root/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/popularity", + "title": "Popularity", + "type": "integer", + "default": 0 + }, + "poster_path": { + "$id": "#root/poster_path", + "title": "Poster_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "production_companies": { + "$id": "#root/production_companies", + "title": "Production_companies", + "type": "array", + "default": [] + }, + "production_countries": { + "$id": "#root/production_countries", + "title": "Production_countries", + "type": "array", + "default": [] + }, + "release_date": { + "$id": "#root/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "revenue": { + "$id": "#root/revenue", + "title": "Revenue", + "type": "integer", + "default": 0 + }, + "runtime": { + "$id": "#root/runtime", + "title": "Runtime", + "type": "integer", + "default": 0 + }, + "spoken_languages": { + "$id": "#root/spoken_languages", + "title": "Spoken_languages", + "type": "array", + "default": [] + }, + "status": { + "$id": "#root/status", + "title": "Status", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "tagline": { + "$id": "#root/tagline", + "title": "Tagline", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "video": { + "$id": "#root/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/vote_average", + "title": "Vote_average", + "type": "integer", + "default": 0 + }, + "vote_count": { + "$id": "#root/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_lists.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_lists.json new file mode 100644 index 0000000000000..89da7dd053f98 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_lists.json @@ -0,0 +1,208 @@ +{ + "id": 550, + "page": 1, + "results": [ + { + "description": "Pick one and then one or two alternates", + "favorite_count": 0, + "id": 7213, + "item_count": 1358, + "iso_639_1": "en", + "list_type": "movie", + "name": "Movie Night", + "poster_path": "/p60VSQL7usdxztIGokJPpHmKWdU.jpg" + }, + { + "description": "Your favorite and classic movies of the 1990s", + "favorite_count": 0, + "id": 6968, + "item_count": 482, + "iso_639_1": "en", + "list_type": "movie", + "name": "1990", + "poster_path": "/5R68Xn4EG0g5qlaQxpE89zGrtes.jpg" + }, + { + "description": "All the movies I own on vhs/dvd/bluray or digital.", + "favorite_count": 0, + "id": 9660, + "item_count": 3299, + "iso_639_1": "en", + "list_type": "movie", + "name": "My Film Collection", + "poster_path": "/3TsnEEkBRfQ1b7Gi2FTsuxHFUZq.jpg" + }, + { + "description": "", + "favorite_count": 0, + "id": 8085, + "item_count": 23, + "iso_639_1": "en", + "list_type": "movie", + "name": "Twists and Shouts", + "poster_path": "/drzZQfdyoAq7pXq4EPI2aZKyGD5.jpg" + }, + { + "description": "", + "favorite_count": 0, + "id": 12031, + "item_count": 475, + "iso_639_1": "fr", + "list_type": "movie", + "name": "Films", + "poster_path": null + }, + { + "description": "List maintained by Movie Collection, the iOS app.", + "favorite_count": 0, + "id": 1718, + "item_count": 610, + "iso_639_1": "en", + "list_type": "movie", + "name": "Movie Collection", + "poster_path": null + }, + { + "description": "", + "favorite_count": 0, + "id": 11052, + "item_count": 478, + "iso_639_1": "en", + "list_type": "movie", + "name": "Movies", + "poster_path": null + }, + { + "description": "Weekly Box Office Nº1 from 1982", + "favorite_count": 0, + "id": 5576, + "item_count": 647, + "iso_639_1": "es", + "list_type": "movie", + "name": "Weekly Box Office Nº1 from 1982", + "poster_path": "/yTpKlgaNw3fEBndyDYPg3hRloR5.jpg" + }, + { + "description": "", + "favorite_count": 0, + "id": 4023, + "item_count": 1072, + "iso_639_1": "it", + "list_type": "movie", + "name": "Dvd collection", + "poster_path": "/sHGO5Ct2Vo0KPVnSEmSf3Zo5MIl.jpg" + }, + { + "description": "Películas para recomendar", + "favorite_count": 0, + "id": 10354, + "item_count": 33, + "iso_639_1": "en", + "list_type": "movie", + "name": "Peliculones", + "poster_path": null + }, + { + "description": "", + "favorite_count": 0, + "id": 10141, + "item_count": 304, + "iso_639_1": "en", + "list_type": "movie", + "name": "R3ST", + "poster_path": null + }, + { + "description": "", + "favorite_count": 0, + "id": 5829, + "item_count": 396, + "iso_639_1": "de", + "list_type": "movie", + "name": "Criss Filme", + "poster_path": "/gnNU653Lm0cCpdYDaOgjTREf6JI.jpg" + }, + { + "description": "movies that I've watched", + "favorite_count": 0, + "id": 6357, + "item_count": 1690, + "iso_639_1": "en", + "list_type": "movie", + "name": "watched", + "poster_path": "/7o7fCNaruJGrTjWpmmK2jj8rE7Q.jpg" + }, + { + "description": "", + "favorite_count": 0, + "id": 9267, + "item_count": 121, + "iso_639_1": "en", + "list_type": "movie", + "name": "Dangerous Hill Classics", + "poster_path": "/f7KYM1uO8f2duhoSY3Bps55mIe.jpg" + }, + { + "description": "List maintained by Movie Collection, the iOS app.", + "favorite_count": 0, + "id": 3136, + "item_count": 185, + "iso_639_1": "en", + "list_type": "movie", + "name": "Movie Collection", + "poster_path": "/koLwUfgmlT2kr0twamPJ8l6eQIp.jpg" + }, + { + "description": "", + "favorite_count": 0, + "id": 4557, + "item_count": 591, + "iso_639_1": "hu", + "list_type": "movie", + "name": "Watched", + "poster_path": "/43pgav2uIG0ZNGdcaZX1v073ITv.jpg" + }, + { + "description": "", + "favorite_count": 0, + "id": 12467, + "item_count": 13, + "iso_639_1": "en", + "list_type": "movie", + "name": "Bradd", + "poster_path": null + }, + { + "description": "", + "favorite_count": 0, + "id": 4280, + "item_count": 390, + "iso_639_1": "it", + "list_type": "movie", + "name": "MY COLLECTION", + "poster_path": "/noLXB6fiDAl9uC6O2W26kCDXIFl.jpg" + }, + { + "description": "Movies that I have watched.", + "favorite_count": 0, + "id": 10482, + "item_count": 567, + "iso_639_1": "en", + "list_type": "movie", + "name": "Watched", + "poster_path": null + }, + { + "description": "DVDs I have", + "favorite_count": 0, + "id": 11773, + "item_count": 1158, + "iso_639_1": "en", + "list_type": "movie", + "name": "DVD Collection", + "poster_path": null + } + ], + "total_pages": 18, + "total_results": 358 +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_now_playing.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_now_playing.json new file mode 100644 index 0000000000000..87054cdca26ed --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_now_playing.json @@ -0,0 +1,158 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666853486.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "original_title": { + "$id": "#root/results/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/results/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "video": { + "$id": "#root/results/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": "number", + "default": 0.0 + } + } + } + }, + "dates": { + "$id": "#root/dates", + "title": "Dates", + "type": "object", + "properties": { + "maximum": { + "$id": "#root/dates/maximum", + "title": "Maximum", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "minimum": { + "$id": "#root/dates/minimum", + "title": "Minimum", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_popular.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_popular.json new file mode 100644 index 0000000000000..67ae3473ae8cc --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_popular.json @@ -0,0 +1,137 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666853522.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "original_title": { + "$id": "#root/results/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/results/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "video": { + "$id": "#root/results/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": "number", + "default": 0.0 + } + } + } + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_recommendations.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_recommendations.json new file mode 100644 index 0000000000000..ef3808248ed2d --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_recommendations.json @@ -0,0 +1,135 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666852992.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": ["string", "null"], + "default": null + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_title": { + "$id": "#root/results/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": ["string", "null"], + "default": null + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "title": { + "$id": "#root/results/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "video": { + "$id": "#root/results/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": ["number", "integer", "string"], + "default": 0 + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + } + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_releases_dates.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_releases_dates.json new file mode 100644 index 0000000000000..6e24834e3d7f2 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_releases_dates.json @@ -0,0 +1,75 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666853037.json", + "title": "Root", + "type": "object", + "properties": { + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "iso_3166_1": { + "$id": "#root/results/items/iso_3166_1", + "title": "Iso_3166_1", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_dates": { + "$id": "#root/results/items/release_dates", + "title": "Release_dates", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/release_dates/items", + "title": "Items", + "type": "object", + "properties": { + "certification": { + "$id": "#root/results/items/release_dates/items/certification", + "title": "Certification", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "iso_639_1": { + "$id": "#root/results/items/release_dates/items/iso_639_1", + "title": "Iso_639_1", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_dates/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "type": { + "$id": "#root/results/items/release_dates/items/type", + "title": "Type", + "type": "integer", + "default": 0 + } + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_reviews.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_reviews.json new file mode 100644 index 0000000000000..6ec310e343a07 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_reviews.json @@ -0,0 +1,122 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666853293.json", + "title": "Root", + "type": "object", + "properties": { + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "author": { + "$id": "#root/results/items/author", + "title": "Author", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "author_details": { + "$id": "#root/results/items/author_details", + "title": "Author_details", + "type": "object", + "properties": { + "name": { + "$id": "#root/results/items/author_details/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "username": { + "$id": "#root/results/items/author_details/username", + "title": "Username", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "avatar_path": { + "$id": "#root/results/items/author_details/avatar_path", + "title": "Avatar_path", + "type": ["null", "string", "integer"], + "default": "", + "pattern": "^.*$" + }, + "rating": { + "$id": "#root/results/items/author_details/rating", + "title": "Rating", + "type": ["null", "string", "integer"], + "default": null + } + } + }, + "content": { + "$id": "#root/results/items/content", + "title": "Content", + "type": "string", + "default": "", + "pattern": "^.*" + }, + "created_at": { + "$id": "#root/results/items/created_at", + "title": "Created_at", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "updated_at": { + "$id": "#root/results/items/updated_at", + "title": "Updated_at", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "url": { + "$id": "#root/results/items/url", + "title": "Url", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_similar_movies.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_similar_movies.json new file mode 100644 index 0000000000000..5af5feb86db23 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_similar_movies.json @@ -0,0 +1,135 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666853122.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": ["string", "null"], + "default": null + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_title": { + "$id": "#root/results/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": ["string", "null"], + "default": null + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "title": { + "$id": "#root/results/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "video": { + "$id": "#root/results/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": ["number", "integer", "string"], + "default": 0 + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + } + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_top_rated.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_top_rated.json new file mode 100644 index 0000000000000..5c415fc648e19 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_top_rated.json @@ -0,0 +1,327 @@ +{ + "page": 1, + "results": [ + { + "poster_path": "/9O7gLzmreU0nGkIB6K3BsJbzvNv.jpg", + "adult": false, + "overview": "Framed in the 1940s for the double murder of his wife and her lover, upstanding banker Andy Dufresne begins a new life at the Shawshank prison, where he puts his accounting skills to work for an amoral warden. During his long stretch in prison, Dufresne comes to be admired by the other inmates -- including an older prisoner named Red -- for his integrity and unquenchable sense of hope.", + "release_date": "1994-09-10", + "genre_ids": [18, 80], + "id": 278, + "original_title": "The Shawshank Redemption", + "original_language": "en", + "title": "The Shawshank Redemption", + "backdrop_path": "/xBKGJQsAIeweesB79KC89FpBrVr.jpg", + "popularity": 6.741296, + "vote_count": 5238, + "video": false, + "vote_average": 8.32 + }, + { + "poster_path": "/lIv1QinFqz4dlp5U4lQ6HaiskOZ.jpg", + "adult": false, + "overview": "Under the direction of a ruthless instructor, a talented young drummer begins to pursue perfection at any cost, even his humanity.", + "release_date": "2014-10-10", + "genre_ids": [18, 10402], + "id": 244786, + "original_title": "Whiplash", + "original_language": "en", + "title": "Whiplash", + "backdrop_path": "/6bbZ6XyvgfjhQwbplnUh1LSj1ky.jpg", + "popularity": 10.776056, + "vote_count": 2059, + "video": false, + "vote_average": 8.29 + }, + { + "poster_path": "/d4KNaTrltq6bpkFS01pYtyXa09m.jpg", + "adult": false, + "overview": "The story spans the years from 1945 to 1955 and chronicles the fictional Italian-American Corleone crime family. When organized crime family patriarch Vito Corleone barely survives an attempt on his life, his youngest son, Michael, steps in to take care of the would-be killers, launching a campaign of bloody revenge.", + "release_date": "1972-03-15", + "genre_ids": [18, 80], + "id": 238, + "original_title": "The Godfather", + "original_language": "en", + "title": "The Godfather", + "backdrop_path": "/6xKCYgH16UuwEGAyroLU6p8HLIn.jpg", + "popularity": 4.554654, + "vote_count": 3570, + "video": false, + "vote_average": 8.26 + }, + { + "poster_path": "/ynXoOxmDHNQ4UAy0oU6avW71HVW.jpg", + "adult": false, + "overview": "Spirited Away is an Oscar winning Japanese animated film about a ten year old girl who wanders away from her parents along a path that leads to a world ruled by strange and unusual monster-like animals. Her parents have been changed into pigs along with others inside a bathhouse full of these creatures. Will she ever see the world how it once was?", + "release_date": "2001-07-20", + "genre_ids": [14, 12, 16, 10751], + "id": 129, + "original_title": "千と千尋の神隠し", + "original_language": "ja", + "title": "Spirited Away", + "backdrop_path": "/djgM2d3e42p9GFQObg6lwK2SVw2.jpg", + "popularity": 6.886678, + "vote_count": 2000, + "video": false, + "vote_average": 8.15 + }, + { + "poster_path": "/nBNZadXqJSdt05SHLqgT0HuC5Gm.jpg", + "adult": false, + "overview": "Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.", + "release_date": "2014-11-05", + "genre_ids": [12, 18, 878], + "id": 157336, + "original_title": "Interstellar", + "original_language": "en", + "title": "Interstellar", + "backdrop_path": "/xu9zaAevzQ5nnrsXN6JcahLnG4i.jpg", + "popularity": 12.481061, + "vote_count": 5600, + "video": false, + "vote_average": 8.12 + }, + { + "poster_path": "/tHbMIIF51rguMNSastqoQwR0sBs.jpg", + "adult": false, + "overview": "The continuing saga of the Corleone crime family tells the story of a young Vito Corleone growing up in Sicily and in 1910s New York; and follows Michael Corleone in the 1950s as he attempts to expand the family business into Las Vegas, Hollywood and Cuba", + "release_date": "1974-12-20", + "genre_ids": [18, 80], + "id": 240, + "original_title": "The Godfather: Part II", + "original_language": "en", + "title": "The Godfather: Part II", + "backdrop_path": "/gLbBRyS7MBrmVUNce91Hmx9vzqI.jpg", + "popularity": 4.003715, + "vote_count": 1894, + "video": false, + "vote_average": 8.1 + }, + { + "poster_path": "/4mFsNQwbD0F237Tx7gAPotd0nbJ.jpg", + "adult": false, + "overview": "A true story of two men who should never have met - a quadriplegic aristocrat who was injured in a paragliding accident and a young man from the projects.", + "release_date": "2011-11-02", + "genre_ids": [18, 35], + "id": 77338, + "original_title": "Intouchables", + "original_language": "fr", + "title": "The Intouchables", + "backdrop_path": "/ihWaJZCUIon2dXcosjQG2JHJAPN.jpg", + "popularity": 3.698279, + "vote_count": 2740, + "video": false, + "vote_average": 8.1 + }, + { + "poster_path": "/bwVhmPpydv8P7mWfrmL3XVw0MV5.jpg", + "adult": false, + "overview": "In the latter part of World War II, a boy and his sister, orphaned when their mother is killed in the firebombing of Tokyo, are left to survive on their own in what remains of civilian life in Japan. The plot follows this boy and his sister as they do their best to survive in the Japanese countryside, battling hunger, prejudice, and pride in their own quiet, personal battle.", + "release_date": "1988-04-16", + "genre_ids": [16, 18, 10751, 10752], + "id": 12477, + "original_title": "火垂るの墓", + "original_language": "ja", + "title": "Grave of the Fireflies", + "backdrop_path": "/fCUIuG7y4YKC3hofZ8wsj7zhCpR.jpg", + "popularity": 1.001401, + "vote_count": 430, + "video": false, + "vote_average": 8.07 + }, + { + "poster_path": "/yPisjyLweCl1tbgwgtzBCNCBle.jpg", + "adult": false, + "overview": "Told from the perspective of businessman Oskar Schindler who saved over a thousand Jewish lives from the Nazis while they worked as slaves in his factory. Schindler’s List is based on a true story, illustrated in black and white and controversially filmed in many original locations.", + "release_date": "1993-11-29", + "genre_ids": [18, 36, 10752], + "id": 424, + "original_title": "Schindler's List", + "original_language": "en", + "title": "Schindler's List", + "backdrop_path": "/rIpSszng8P0DL0TimSzZbpfnvh1.jpg", + "popularity": 5.372319, + "vote_count": 2308, + "video": false, + "vote_average": 8.07 + }, + { + "poster_path": "/eqFckcHuFCT1FrzLOAvXBb4jHwq.jpg", + "adult": false, + "overview": "Jack is a young boy of 5 years old who has lived all his life in one room. He believes everything within it are the only real things in the world. But what will happen when his Ma suddenly tells him that there are other things outside of Room?", + "release_date": "2015-10-16", + "genre_ids": [18, 53], + "id": 264644, + "original_title": "Room", + "original_language": "en", + "title": "Room", + "backdrop_path": "/tBhp8MGaiL3BXpPCSl5xY397sGH.jpg", + "popularity": 5.593128, + "vote_count": 1179, + "video": false, + "vote_average": 8.06 + }, + { + "poster_path": "/f7DImXDebOs148U4uPjI61iDvaK.jpg", + "adult": false, + "overview": "A touching story of an Italian book seller of Jewish ancestry who lives in his own little fairy tale. His creative and happy life would come to an abrupt halt when his entire family is deported to a concentration camp during World War II. While locked up he tries to convince his son that the whole thing is just a game.", + "release_date": "1997-12-20", + "genre_ids": [35, 18], + "id": 637, + "original_title": "La vita è bella", + "original_language": "it", + "title": "Life Is Beautiful", + "backdrop_path": "/bORe0eI72D874TMawOOFvqWS6Xe.jpg", + "popularity": 5.385594, + "vote_count": 1593, + "video": false, + "vote_average": 8.06 + }, + { + "poster_path": "/s0C78plmx3dFcO3WMnoXCz56FiN.jpg", + "adult": false, + "overview": "A boy growing up in Dublin during the 1980s escapes his strained family life by starting a band to impress the mysterious girl he likes.", + "release_date": "2016-04-15", + "genre_ids": [10749, 18, 10402], + "id": 369557, + "original_title": "Sing Street", + "original_language": "en", + "title": "Sing Street", + "backdrop_path": "/9j4UaRypr19wz0BOofwvkPRm1Se.jpg", + "popularity": 3.343073, + "vote_count": 61, + "video": false, + "vote_average": 8.06 + }, + { + "poster_path": "/1hRoyzDtpgMU7Dz4JF22RANzQO7.jpg", + "adult": false, + "overview": "Batman raises the stakes in his war on crime. With the help of Lt. Jim Gordon and District Attorney Harvey Dent, Batman sets out to dismantle the remaining criminal organizations that plague the streets. The partnership proves to be effective, but they soon find themselves prey to a reign of chaos unleashed by a rising criminal mastermind known to the terrified citizens of Gotham as the Joker.", + "release_date": "2008-07-16", + "genre_ids": [18, 28, 80, 53], + "id": 155, + "original_title": "The Dark Knight", + "original_language": "en", + "title": "The Dark Knight", + "backdrop_path": "/nnMC0BM6XbjIIrT4miYmMtPGcQV.jpg", + "popularity": 8.090715, + "vote_count": 7744, + "video": false, + "vote_average": 8.06 + }, + { + "poster_path": "/811DjJTon9gD6hZ8nCjSitaIXFQ.jpg", + "adult": false, + "overview": "A ticking-time-bomb insomniac and a slippery soap salesman channel primal male aggression into a shocking new form of therapy. Their concept catches on, with underground \"fight clubs\" forming in every town, until an eccentric gets in the way and ignites an out-of-control spiral toward oblivion.", + "release_date": "1999-10-14", + "genre_ids": [18], + "id": 550, + "original_title": "Fight Club", + "original_language": "en", + "title": "Fight Club", + "backdrop_path": "/8uO0gUM8aNqYLs1OsTBQiXu0fEv.jpg", + "popularity": 6.590102, + "vote_count": 5221, + "video": false, + "vote_average": 8.05 + }, + { + "poster_path": "/dM2w364MScsjFf8pfMbaWUcWrR.jpg", + "adult": false, + "overview": "A burger-loving hit man, his philosophical partner, a drug-addled gangster's moll and a washed-up boxer converge in this sprawling, comedic crime caper. Their adventures unfurl in three stories that ingeniously trip back and forth in time.", + "release_date": "1994-10-14", + "genre_ids": [53, 80], + "id": 680, + "original_title": "Pulp Fiction", + "original_language": "en", + "title": "Pulp Fiction", + "backdrop_path": "/mte63qJaVnoxkkXbHkdFujBnBgd.jpg", + "popularity": 7.760216, + "vote_count": 4722, + "video": false, + "vote_average": 8.04 + }, + { + "poster_path": "/gzlJkVfWV5VEG5xK25cvFGJgkDz.jpg", + "adult": false, + "overview": "Ashitaka, a prince of the disappearing Ainu tribe, is cursed by a demonized boar god and must journey to the west to find a cure. Along the way, he encounters San, a young human woman fighting to protect the forest, and Lady Eboshi, who is trying to destroy it. Ashitaka must find a way to bring balance to this conflict.", + "release_date": "1997-07-12", + "genre_ids": [12, 14, 16], + "id": 128, + "original_title": "もののけ姫", + "original_language": "ja", + "title": "Princess Mononoke", + "backdrop_path": "/dB2rATwfCbsPGfRLIoluBnKdVHb.jpg", + "popularity": 4.672361, + "vote_count": 954, + "video": false, + "vote_average": 8.04 + }, + { + "poster_path": "/3TpMBcAYH4cxCw5WoRacWodMTCG.jpg", + "adult": false, + "overview": "An urban office worker finds that paper airplanes are instrumental in meeting a girl in ways he never expected.", + "release_date": "2012-11-02", + "genre_ids": [16, 10751, 10749], + "id": 140420, + "original_title": "Paperman", + "original_language": "en", + "title": "Paperman", + "backdrop_path": "/cqn1ynw78Wan37jzs1Ckm7va97G.jpg", + "popularity": 2.907096, + "vote_count": 452, + "video": false, + "vote_average": 8.03 + }, + { + "poster_path": "/pwpGfTImTGifEGgLb3s6LRPd4I6.jpg", + "adult": false, + "overview": "Henry Hill is a small time gangster, who takes part in a robbery with Jimmy Conway and Tommy De Vito, two other gangsters who have set their sights a bit higher. His two partners kill off everyone else involved in the robbery, and slowly start to climb up through the hierarchy of the Mob. Henry, however, is badly affected by his partners success, but will he stoop low enough to bring about the downfall of Jimmy and Tommy?", + "release_date": "1990-09-12", + "genre_ids": [18, 80], + "id": 769, + "original_title": "Goodfellas", + "original_language": "en", + "title": "Goodfellas", + "backdrop_path": "/xDEOxA01480uLTWuvQCw61VmDBt.jpg", + "popularity": 3.783589, + "vote_count": 1528, + "video": false, + "vote_average": 8.02 + }, + { + "poster_path": "/z4ROnCrL77ZMzT0MsNXY5j25wS2.jpg", + "adult": false, + "overview": "A man with a low IQ has accomplished great things in his life and been present during significant historic events - in each case, far exceeding what anyone imagined he could do. Yet, despite all the things he has attained, his one true love eludes him. 'Forrest Gump' is the story of a man who rose above his challenges, and who proved that determination, courage, and love are more important than ability.", + "release_date": "1994-07-06", + "genre_ids": [35, 18, 10749], + "id": 13, + "original_title": "Forrest Gump", + "original_language": "en", + "title": "Forrest Gump", + "backdrop_path": "/ctOEhQiFIHWkiaYp7b0ibSTe5IL.jpg", + "popularity": 6.224491, + "vote_count": 4279, + "video": false, + "vote_average": 8.02 + }, + { + "poster_path": "/5hqbJSmtAimbaP3XcYshCixuUtk.jpg", + "adult": false, + "overview": "A veteran samurai, who has fallen on hard times, answers a village's request for protection from bandits. He gathers 6 other samurai to help him, and they teach the townspeople how to defend themselves, and they supply the samurai with three small meals a day. The film culminates in a giant battle when 40 bandits attack the village.", + "release_date": "1954-04-26", + "genre_ids": [28, 18], + "id": 346, + "original_title": "七人の侍", + "original_language": "ja", + "title": "Seven Samurai", + "backdrop_path": "/61vLiK96sbXeHpQiMxI4CuqBA3z.jpg", + "popularity": 2.93856, + "vote_count": 436, + "video": false, + "vote_average": 8.02 + } + ], + "total_results": 5206, + "total_pages": 261 +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_translations.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_translations.json new file mode 100644 index 0000000000000..dc6364ad6b63c --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_translations.json @@ -0,0 +1,456 @@ +{ + "id": 550, + "translations": [ + { + "iso_3166_1": "UA", + "iso_639_1": "uk", + "name": "Український", + "english_name": "Ukrainian", + "data": { + "title": "Бійцівський клуб", + "overview": "Джек, страждає хронічним безсонням і відчайдушно намагається вирватися з нудного життя. Саме тоді він зустрічає Тайлера Дердена, торговця із збоченою філософією. Тайлер впевнений, що самовдосконалення - доля слабких, а саморуйнування - єдине, заради чого варто жити. І ось вже Джек і Тайлер луплять один одному пики на стоянці перед баром, що приносить їм неабияке задоволення. Залучаючи інших чоловіків до простих радощів фізичної жорстокості, Джек і Тайлер засновують таємний Бійцівський Клуб, який має величезний успіх. Але Джека чекає шокуюче відкриття, здатне змінити все...", + "homepage": "" + } + }, + { + "iso_3166_1": "DE", + "iso_639_1": "de", + "name": "Deutsch", + "english_name": "German", + "data": { + "title": "", + "overview": "Ein Yuppie findet beim charismatischen Tyler Durden Unterschlupf, nachdem seine Wohnung in die Luft gejagt wird. Ein Gerangel zwischen den beiden entwickelt sich zu einer Schlägerei, die mit der Erkenntnis endet, dass man sich nach einer ordentlichen Portion Prügel einfach besser fühlt. Der \"Fight Club\" ist geboren. Immer mehr Männer versammeln sich, um sich zu schlagen - und gestärkt in den Alltag zu gehen. Wie ein Virus greift das Konzept um sich, doch für Tyler ist der Kampfverein nur die erste Stufe, um die USA in die Knie zu zwingen.", + "homepage": "" + } + }, + { + "iso_3166_1": "PT", + "iso_639_1": "pt", + "name": "Português", + "english_name": "Portuguese", + "data": { + "title": "Clube de Combate", + "overview": "Jack (Edward Norton) é um executivo que trabalha como investigador de seguros, tem uma boa vida financeira, mas sofre com problemas de insônia. Para tentar se curar, ele começa a freqüentar terapias em grupo, mas sua vida vira de cabeça para baixo quando ele conhece Tyler (Brad Pitt). Com ele, forma um clube da luta, onde pessoas são amigas, mas se esmurram violentamente em algumas noites. Tudo ganha propósitos maiores quando as coisas começam a ficar loucas e surreais.", + "homepage": "http://www.welcometofc.com/" + } + }, + { + "iso_3166_1": "NL", + "iso_639_1": "nl", + "name": "Nederlands", + "english_name": "Dutch", + "data": { + "title": "", + "overview": "Een verzekeringsinspecteur die aan chronische slapeloosheid lijdt probeert wanhopig uit zijn oersaaie bestaan te ontsnappen. Bij toeval ontmoet hij Tyler Durden, een charismatische zeepverkoper met een bizarre levensfilosofie. Tyler gelooft namelijk dat \"zelfverbetering\" enkel voor de zwakken is, het is \"zelfvernieling\" die het leven meer waarde geeft. Samen met Tyler organiseert hij de ultieme vorm van ontspanning: zogeheten \"Fight Clubs\", waar gewone jonge mannen het beest in zichzelf loslaten in blotevuistgevechten. Het bevrijdende effect op de deelnemende mannen doet een vreemd soort kameraadschap ontstaan, dat al snel gevaarlijke vormen aanneemt.", + "homepage": "" + } + }, + { + "iso_3166_1": "RU", + "iso_639_1": "ru", + "name": "Pусский", + "english_name": "Russian", + "data": { + "title": "Бойцовский клуб", + "overview": "Терзаемый хронической бессоницей и отчаянно пытающийся вырваться из мучительно скучной жизни клерк встречает некоего Тайлера Дардена, харизматического торговца мылом с извращенной философией. Тайлер уверен, что самосовершенствование — удел слабых, а саморазрушение — единственное, ради чего стоит жить.", + "homepage": "" + } + }, + { + "iso_3166_1": "IT", + "iso_639_1": "it", + "name": "Italiano", + "english_name": "Italian", + "data": { + "title": "Fight Club", + "overview": "Un uomo di trent'anni è insofferente su tutto e la notte non riesce più a dormire. In cerca di qualche luogo dove scaricare la propria ansia si mette a frequentare quei corsi dove gruppi di malati gravi si riuniscono e confessano agli altri le rispettive situazioni. Mentre si lascia andare alla commozione e al pianto di fronte a quello che vede, l'uomo fa la conoscenza prima di Marla Singer poi di Tyler Durden. Lei è una ragazza a sua volta alla deriva, incapace di scelte o decisioni; lui è un tipo deciso e vigoroso con un'idea precisa in testa. Tyler fa saltare per aria l'appartamento dell'uomo e i due vanno a vivere insieme in una casa fatiscente. Deciso a coinvolgerlo nel suo progetto, Tyler lo fa entrare in un 'Fight Club', uno stanzone sotterraneo dove ci si riunisce per picchiarsi e in questo modo sentirsi di nuovo vivi...", + "homepage": "" + } + }, + { + "iso_3166_1": "TR", + "iso_639_1": "tr", + "name": "Türkçe", + "english_name": "Turkish", + "data": { + "title": "Dövüş Kulübü", + "overview": "Dövüş kulübünün ilk kuralı, dövüş kulübü hakkında konuşmamaktır. Dövüş kulübünün ikinci kuralı da, kulüp hakkında konuşmamaktır... Filmin baş kişisi, sıradan hayatının girdaplarında bunalımlar geçiren bir sigorta müfettişi olan Jack, Kanserli olmadığı halde, uykusuzluğunu yenmek ve hayatına anlam katmak adına, kanserlilere moral destek sağlayan terapi gruplarına katılır. Orada, Marla Singer adlı bir kızla garip bir yakınlık kurar. Bir iş gezisi dönüşü ise, Tyler Durden adlı egzantrik karakterle tanışır. Durden, Jack'in olmak isteyip de olamadığı adam gibidir. Tyler'ın girişimleriyle bir yeraltı faaliyeti olarak başlayan dövüş kulübü, Jack'e hayatında yepyeni kapılar açacaktır... Ve tabii, bu kapılardan ister istemez Marla geçecektir... Fakat... Tyler Durden gerçekte kimdir?", + "homepage": "" + } + }, + { + "iso_3166_1": "SE", + "iso_639_1": "sv", + "name": "svenska", + "english_name": "Swedish", + "data": { + "title": "Fight Club", + "overview": "Brad Pitt och Edward Norton gör två knockoutbra roller i denna häpnadsväckande och originella thriller med ironisk underton av David Fincher, regissören till Seven. Norton spelar Jack, en kroniskt sömnlös man som desperat försöker fly sitt olidligt tråkiga liv. Men så möter han Tyler Durden (Pitt) en karismatisk tvålförsäljare med en snedvriden filosofi. Tyler menar att självförbättring är för de svaga - det är självdestruktion som verkligen gör livet värt att leva. Inom kort är Jack och Tyler i full gång med att mörbulta varandra på en parkeringsplats. Ett renande slagsmål med en endorfinkick utan dess like. För att introducera andra män i denna enkla lycka av fysiskt våld bildar Jack och Tyler en hemlig \"Fight Club\" som snabbt blir omåttligt populär. Men en hemsk överraskning väntar Jack, en sanning som kommer att förändra allt... Filmen innehåller också kommentarer av David Fincher, Brad Pitt, Edward Norton och Helena Bonham Carter.", + "homepage": "" + } + }, + { + "iso_3166_1": "PL", + "iso_639_1": "pl", + "name": "Polski", + "english_name": "Polish", + "data": { + "title": "Podziemny krąg", + "overview": "Co ty możesz o sobie wiedzieć, jeśli nigdy nie walczyłeś? W tym niezwykłym, pełnym niespodziewanych zwrotów akcji i nie pozbawionym swoistego humoru filmie w reżyserii Davida Finchera (\"Siedem\") oryginalne i dynamiczne kreacje stworzyli Brad Pitt (\"Siedem\") i Edward Norton (\"Lęk pierwotny\"). Jack (Norton) cierpi na chroniczną bezsenność i jest całkowicie znudzony swym dotychczasowym życiem. Do czasu, gdy spotyka charyzmatycznego Tylera Durdena (Pitt) - sprzedawcę mydła o dość pokrętnej filozofii życia... Uważa on bowiem, że samo-doskonalenie jest dla słabeuszy, a to co rzeczywiście sprawia, że warto żyć to samo-destrukcja.", + "homepage": "" + } + }, + { + "iso_3166_1": "CZ", + "iso_639_1": "cs", + "name": "Český", + "english_name": "Czech", + "data": { + "title": "Klub rváčů", + "overview": "Když nemůžete půl roku usnout, celý okolní svět vám začne připadat jako nekonečný sen. Všechno kolem vás je nedokonalou xeroxovou kopií sebe sama. Chodíte do práce, díváte se na televizi a jste vděčni za to, když občas ztratíte vědomí a nevíte o světě. Lidí s podobnými problémy moc není, ale mladý úspěšný úředník, který si říká Jack, je jedním z nich. Má slušnou práci, vydělává slušné peníze, ale trpí nejtěžší formou nespavosti. Na služební cestě se Jack seznámí s Tylerem Durdenem, který mu nabídne příbytek pod podmínkou, že mu vrazí pořádnou ránu. Tato \"výměna názorů\" se oběma zalíbí a brzy vznikne první Klub rváčů. Místo, kde můžou mladí muži, znechucení světem, odložit své starosti a stát se na pár minut zvířaty.", + "homepage": "" + } + }, + { + "iso_3166_1": "IL", + "iso_639_1": "he", + "name": "עִבְרִית", + "english_name": "Hebrew", + "data": { + "title": "מועדון קרב", + "overview": "יאפי הסובל מנדודי שינה ועייפות כרונית מתחיל לבקר בסדנאות שיקום של חולי סרטן, אלכוהוליסטים אנונימיים וארגונים אחרים כדי להגיע להתרגשות בחייו. שם הוא פוגש צעירה משועממת ומתחזה כמוהו, איתה הוא מפתח קשר של אהבה/שנאה. במקביל הוא מתחבר אל צעיר ניהיליסט, המתפרנס מייצור סבון משומן אדם ובז לכל מה שקשור לממסד וחוק. השניים מקימים מועדון אגרוף בלתי חוקי לאנשים המחפשים פורקן מחוץ למסגרת הבורגנית. הקרבות מזרימים אדרנלין לדם, אך גם מובילים להקמתה של כת אנרכיסטית, מיליטנטית ומסוכנת.", + "homepage": "" + } + }, + { + "iso_3166_1": "RS", + "iso_639_1": "sr", + "name": "Srpski", + "english_name": "Serbian", + "data": { + "title": "Борилачки клуб", + "overview": "Усамљени безимени тридесетогодишњак (Едвард Нортон), запослен као кординатор за опозив у ауто-компанији, има проблема са несаницом коју покушава да реши на разне начине. У авиону упознаје Тајлера Дардена (Бред Пит), произвођача и продавца сапуна, са којим ће се спријатељити и основати удружење „Борилачки клуб“ које ће од простог малог друштва где људи после напорног дана долазе да се испразне постати тајни и добро организован анархистички покрет.", + "homepage": "" + } + }, + { + "iso_3166_1": "JP", + "iso_639_1": "ja", + "name": "日本語", + "english_name": "Japanese", + "data": { + "title": "ファイト・クラブ", + "overview": "空虚な生活を送るヤング・エグゼクティブのジャックは、謎の男テイラーに導かれるまま、謎の秘密組織「ファイト・クラブ」のメンバーになる。そこは鍛え抜かれた男達が己の拳のみを武器に闘いを繰り広げる、壮絶で危険な空間だった。", + "homepage": "" + } + }, + { + "iso_3166_1": "GR", + "iso_639_1": "el", + "name": "ελληνικά", + "english_name": "Greek", + "data": { + "title": "", + "overview": "Ένα στέλεχος που πάσχει από αϋπνίες βρίσκει τον αληθινό εαυτό του στα \"Fight Club\", όπου συμμετέχει σε αγώνες πυγμαχίας με γυμνά χέρια. Ο ιδρυτής των \"Fight Club\", ωστόσο, αποδεικνύεται μια επικίνδυνη προσωπικότητα, που επιδίδεται σε πράξεις τρομοκρατίας ενάντια στην καθεστηκυία τάξη.", + "homepage": "" + } + }, + { + "iso_3166_1": "BG", + "iso_639_1": "bg", + "name": "български език", + "english_name": "Bulgarian", + "data": { + "title": "Боен клуб", + "overview": "Разказвачът (Едуард Нортън) е самотник, чийто живот е безцелен низ от незначителни случки. Промяната започва едва след запознанството му с агресивния нихилист Джак (Брад Пит). Той го въвежда в подземния свят на тъмни сделки и машинации. Разказвача се включва като участник в кървави боксови двубои, които не му носят очакваното удовлетворение. Постепенно той осъзнава, че губи контрол над себе си, откривайки, че мачовете са част от зловещ план?", + "homepage": "" + } + }, + { + "iso_3166_1": "KR", + "iso_639_1": "ko", + "name": "한국어/조선말", + "english_name": "Korean", + "data": { + "title": "파이트 클럽", + "overview": "자동차 회사의 리콜 심사관으로 일하는 주인공(에드워드 노튼)은 일상의 무료함과 공허함 속에서 늘 새로운 탈출을 꿈꾼다. 그는 비행기에서 자신을 비누 제조업자라고 소개하는 타일러 더든(브래드 피트)을 만난다. 집에 돌아온 주인공은 아파트가 누군가에 의해 폭파되었음을 발견하고, 타일러에게 도움을 청해 함께 생활하게 된다. 어느 날 밤 타일러는 주인공에게 자신을 때려달라고 부탁한다. 사람은 싸워봐야 진정한 자신을 알 수 있다는 것이다. 결국 이들은 매주 토요일 밤 술집 지하에서 맨주먹으로 격투를 벌이는 파이트 클럽을 결성하기에 이르는데...", + "homepage": "" + } + }, + { + "iso_3166_1": "SK", + "iso_639_1": "sk", + "name": "Slovenčina", + "english_name": "Slovak", + "data": { + "title": "Klub bitkárov", + "overview": "Keď nemôžete pol roka zaspať, celý okolitý svet vám začne pripadať ako nekonečný sen. Všetko okolo vás je nedokonalou xeroxovou kópiou seba samého. Chodíte do práce, pozeráte sa na televíziu a ste vďační za to, keď občas stratíte vedomie a neviete o svete. Ľudí s podobnými problémami moc nie je, ale mladý úspešný úradník, ktorý si hovorí Jack, je jedným z nich. Má slušnú prácu, zarába slušné peniaze, ale trpí najťažšou formou nespavosti. Na služobnej ceste sa Jack zoznámi s Tylerom Durdenom, ktorý mu ponúkne príbytok pod podmienkou, že mu vrazí poriadnu ranu. Táto \"výmena názorov\" sa obom zapáči a čoskoro vznikne prvý Klub bitkárov. Miesto, kde môžu mladí muži, znechutení svetom, odložiť svoje starosti a stať sa na pár minút zvieratami.", + "homepage": "" + } + }, + { + "iso_3166_1": "SA", + "iso_639_1": "ar", + "name": "العربية", + "english_name": "Arabic", + "data": { + "title": "", + "overview": "", + "homepage": "" + } + }, + { + "iso_3166_1": "LV", + "iso_639_1": "lv", + "name": "Latviešu", + "english_name": "Latvian", + "data": { + "title": "Cīņas klubs", + "overview": "", + "homepage": "" + } + }, + { + "iso_3166_1": "BR", + "iso_639_1": "pt", + "name": "Português", + "english_name": "Portuguese", + "data": { + "title": "Clube da Luta", + "overview": "Jack (Edward Norton) é um executivo jovem, trabalha como investigador de seguros, mora confortavelmente, mas ele está ficando cada vez mais insatisfeito com sua vida medíocre. Para piorar ele está enfrentando uma terrível crise de insônia, até que encontra uma cura inusitada para o sua falta de sono ao frequentar grupos de auto-ajuda. Nesses encontros ele passa a conviver com pessoas problemáticas como a viciada Marla Singer (Helena Bonham Carter) e a conhecer estranhos como Tyler Durden (Brad Pitt). Misterioso e cheio de ideias, Tyler apresenta para Jack um grupo secreto que se encontra para extravasar suas angústias e tensões através de violentos combates corporais.", + "homepage": "" + } + }, + { + "iso_3166_1": "AZ", + "iso_639_1": "az", + "name": "Azərbaycan", + "english_name": "Azerbaijani", + "data": { + "title": "Döyüshçü Klubu", + "overview": "", + "homepage": "" + } + }, + { + "iso_3166_1": "ET", + "iso_639_1": "et", + "name": "Eesti", + "english_name": "Estonian", + "data": { + "title": "Kaklusklubi", + "overview": "", + "homepage": "" + } + }, + { + "iso_3166_1": "LT", + "iso_639_1": "lt", + "name": "Lietuviškai", + "english_name": "Lithuanian", + "data": { + "title": "Kovos klubas", + "overview": "Nemigos kamuojamas Džekas lankosi įvairiuose nelaimėlių susitikimuose, klausosi jų išpažinčių ir drauge verkia. Jis skraido lėktuvais, kiekvienąkart tikėdamasis katastrofos. Tačiau kartą skrisdamas jis sutinka spalvingą asmenybę. Tailerio gyvenimas nepaprastas - nerūpestingas ir linksmas. Jis kartais linksmai ir nerūpestingai paprašo trenkti jam į galvą... Džekas neatsisako. Todėl tarp draugų užverda rimtos muštynės, ir netrukus jiedu įkuria klubą, kur vaikinai gali muštis iki sąmonės netekimo...", + "homepage": "" + } + }, + { + "iso_3166_1": "MK", + "iso_639_1": "mk", + "name": "", + "english_name": "Macedonian", + "data": { + "title": "Borechki Klub", + "overview": "", + "homepage": "" + } + }, + { + "iso_3166_1": "HR", + "iso_639_1": "hr", + "name": "Hrvatski", + "english_name": "Croatian", + "data": { + "title": "Klub boraca", + "overview": "", + "homepage": "" + } + }, + { + "iso_3166_1": "TW", + "iso_639_1": "tw", + "name": "", + "english_name": "Twi", + "data": { + "title": "鬥陣俱樂部", + "overview": "", + "homepage": "" + } + }, + { + "iso_3166_1": "RO", + "iso_639_1": "ro", + "name": "Română", + "english_name": "Romanian", + "data": { + "title": "Clubul de lupte", + "overview": "Un insomniac gata să cedeze și un vânzător de săpun fără scrupule își canalizează agresiunea masculină într-o formă de terapie nouă și șocantă. Ideea lor se prinde și „cluburi de luptă” se formează în fiecare oraș, până când apare un excentric și pornește o spirală ieșită de sub control către uitare.", + "homepage": "" + } + }, + { + "iso_3166_1": "TW", + "iso_639_1": "zh", + "name": "普通话", + "english_name": "Mandarin", + "data": { + "title": "鬥陣俱樂部", + "overview": "傑克(愛德華諾頓飾演)是一個充滿中年危機意識的人,他非常憎恨自己的生活及一切,再加上他患有嚴重的失眠症,所以他常常參加各種團體諮詢會,只為了能接觸人群。在某一個團體諮詢會上,傑克遇上了一個跟他同樣理由來參加的女煙槍,瑪拉(海倫娜寶漢卡特飾演)。 在一個商務旅行中,傑克在飛機上遇到賣肥皂的商人-泰勒(布萊德彼特飾演),兩人因緣際會地成了好友,並開始創建了「鬥陣俱樂部」:一個讓彼此不戴護具而互毆的聚會,宗旨在發洩情緒。 某夜,泰勒在傑克的公寓中把瑪拉給「上」了,這讓傑克非常忌妒。同時「鬥陣俱樂部」也成了全國性的地下大組織,所有成員都將泰勒視為教父。為了辨識起見,成員還都剃了光頭。 傑克對於「鬥陣俱樂部」的現況及泰勒的瘋狂模樣越來越無法忍受,所以他決定疏遠泰勒。但是,此時的「鬥陣俱樂部」成員卻發起全國性的暴動,他們炸毀了不少建築物......,一切的局勢都是傑克始料未及的,他該如何解決這混亂的現狀?「鬥陣俱樂部」又會瘋狂成什麼樣子? 傑克與泰勒之間的恩恩怨怨會如何了結?", + "homepage": "" + } + }, + { + "iso_3166_1": "MX", + "iso_639_1": "es", + "name": "Español", + "english_name": "Spanish", + "data": { + "title": "El club de la pelea", + "overview": "Un joven hastiado de su gris y monótona vida lucha contra el insomnio. En un viaje en avión conoce a un carismático vendedor de jabón que sostiene una teoría muy particular: el perfeccionismo es cosa de gentes débiles; sólo la autodestrucción hace que la vida merezca la pena. Ambos deciden entonces fundar un club secreto de lucha, donde poder descargar sus frustaciones y su ira, que tendrá un éxito arrollador.", + "homepage": "" + } + }, + { + "iso_3166_1": "SI", + "iso_639_1": "sl", + "name": "Slovenščina", + "english_name": "Slovenian", + "data": { + "title": "Klub golih pesti", + "overview": "Fight Club ni le prispodoba o (samo)destruktivnem begu posameznika iz ujetosti potrošniškega nesmisla, temveč (tudi) parabola upora zoper nezmožnost poistovetenja s prvobitnim moškim/očetovskim principom — odraža se v sprevrženi percepciji psihosocialnih norm in nevrotični konfrontaciji ter boju (dobesedno \"z golimi pestmi\") s samim seboj. Dodajmo še patološke predstave o ljubezenski zvezi in vlogi ženske, metaforični upor maskuliniziranega segmenta potlačene moške dominantnosti, ki v ničemer ne najde prave identifikacije in opore, zato njegov dezorientirani razcepljeni um primanjkljaj kompenzira tako, da ustvarja namišljene alegorične osebnosti — ter izvrstno igralsko zasedbo (Edward Norton, Brad Pitt, Helena Bontham Carter) — pa dobimo enega najbolj izvirnih (in vplivnih) produktov ameriške literature in kinematografije zadnjih desetletij.", + "homepage": "" + } + }, + { + "iso_3166_1": "TH", + "iso_639_1": "th", + "name": "ภาษาไทย", + "english_name": "Thai", + "data": { + "title": "ดิบดวลดิบ", + "overview": "ไทเลอร์ (แบรด พิทท์) กล่าวว่า ทุกสิ่งทุกอย่างที่คุณครอบครองอยู่นั้น ท้ายที่สุดแล้วพวกมันก็จะครอบงำคุณเสีย แต่ถ้าหากคุณสูญสิ้นทุกสิ่งเมื่อใด คุณก็จะกล้าทำอะไรต่อมิอะไรได้อย่างไร้กังวลตลอดไป ซึ่ง Fight Club เปรียบได้กับอิสรภาพแบบนั้น \"", + "homepage": "" + } + }, + { + "iso_3166_1": "US", + "iso_639_1": "en", + "name": "English", + "english_name": "English", + "data": { + "title": "", + "overview": "A ticking-time-bomb insomniac and a slippery soap salesman channel primal male aggression into a shocking new form of therapy. Their concept catches on, with underground \"fight clubs\" forming in every town, until an eccentric gets in the way and ignites an out-of-control spiral toward oblivion.", + "homepage": "http://www.foxmovies.com/movies/fight-club" + } + }, + { + "iso_3166_1": "ES", + "iso_639_1": "es", + "name": "Español", + "english_name": "Spanish", + "data": { + "title": "El club de la lucha", + "overview": "Un joven sin ilusiones lucha contra su insomnio, consecuencia quizás de su hastío por su gris y rutinaria vida. En un viaje en avión conoce a Tyler Durden, un carismático vendedor de jabón que sostiene una filosofía muy particular: el perfeccionismo es cosa de gentes débiles; en cambio, la autodestrucción es lo único que hace que realmente la vida merezca la pena. Ambos deciden entonces formar un club secreto de lucha donde descargar sus frustaciones y su ira que tendrá un éxito arrollador.", + "homepage": "" + } + }, + { + "iso_3166_1": "FR", + "iso_639_1": "fr", + "name": "Français", + "english_name": "French", + "data": { + "title": "Fight Club", + "overview": "Le narrateur, sans identité précise, vit seul, travaille seul, dort seul, mange seul ses plateaux-repas pour une personne comme beaucoup d'autres personnes seules qui connaissent la misère humaine, morale et sexuelle. Mais un jour il fait la rencontre de Tyler Durden, une sorte d'anarchiste entre gourou et philosophe qui prêche l'amour de son prochain. Ensemble ils vont créer le Fight club, un lieu clandestin ou l'on peut retrouver sa virilité, l'échange et la communication.", + "homepage": "" + } + }, + { + "iso_3166_1": "HU", + "iso_639_1": "hu", + "name": "Magyar", + "english_name": "Hungarian", + "data": { + "title": "Harcosok klubja", + "overview": "Amerika nagyvárosainak pincéiben egy titkos szervezet működik: ha egy éjjel az utca összes nyilvános telefonja összetörik, ők jártak ott; ha egy köztéri szobor óriás fémgömbje legurul talapzatáról, és szétrombol egy gyorsétkezdét, az az ő művük; ha egy elegáns bank parkolójának összes autóját rettentően összerondítják a galambok - az sem véletlen. Vigyáznak a leveleinkre, átveszik telefonüzeneteinket, kísérnek az utcán: és még csak készülnek a végső dobásra: a nagy bummra... Pedig az egészet csak két túlzottan unatkozó jóbarát találta ki: azzal kezdték, hogy rájöttek, nincs jobb stresszoldó, mint ha alaposan megverik egymást. Pofonokat adni jó. Pofonokat kapni jó. Számukra ez a boldog élet szabálya.", + "homepage": "" + } + }, + { + "iso_3166_1": "CN", + "iso_639_1": "zh", + "name": "普通话", + "english_name": "Mandarin", + "data": { + "title": "搏击俱乐部", + "overview": "杰克( 爱德华·诺顿 Edward Norton 饰)是一个充满中年危机意识的人,他非常憎恨自己的生活及一切,再加上他患有严重的失眠症,所以他常常参加各种团体谘询会,只为了能接触人群。在某一个团体谘询会上,杰克遇上了一个跟他同样理由来参加的女烟枪,玛拉(海伦娜·邦汉·卡特 Helena Bonham Carter 饰),在莫名激素的影响下,杰克和玛拉一起逃离了谘询会,两人的情愫因而滋生… 一个偶然的机会,杰克遇到了卖肥皂的商人泰勒(布拉德·皮特 Brad Pitt 饰),一个浑身充满叛逆、残酷和暴烈的痞子英雄,并因为自己公寓失火而住进了泰勒破旧不堪的家中。两人因缘际会地成为了好朋友,并创立了“搏击俱乐部”:一个让人们不戴护具而徒手搏击,宗旨在于发泄情绪的地下组织。", + "homepage": "" + } + }, + { + "iso_3166_1": "DK", + "iso_639_1": "da", + "name": "Dansk", + "english_name": "Danish", + "data": { + "title": "Fight Club", + "overview": "Nortons evner som stjerneskuespiller viser sig i rollen som Jack, en kronisk søvnløs der er desperat efter at slippe ud af sit ulideligt kedelige liv. Da møder han Tyler Durden (Pitt), en karismatisk sæbesælger med en usædvanlig livsfilosofi. Tyler synes at udvikling af én selv er for de svage - det er selvdestruktionen der gør livet værd at leve. Inden længe er Jack og Tyler ved at slå hinanden til plukfisk på en parkeringsplads, en slåskamp der giver det ultimative kick. For at introducere andre mænd for de simple glæder ved fysisk vold, danner Jack og Tyler den hemmelige Fight Club der bliver voldsomt populær. Men en chokerende overraskelse der vender op og ned på alt, venter Jack...", + "homepage": "" + } + }, + { + "iso_3166_1": "FI", + "iso_639_1": "fi", + "name": "suomi", + "english_name": "Finnish", + "data": { + "title": "Fight Club", + "overview": "Jack (Norton) kärsii kroonisesta unettomuudesta ja yrittää epätoivoisesti paeta sietämättömän tylsää elämäänsä. Työmatkalla hän tapaa karismaattisen saippuakauppiaan, Tyler Durdenin (Pitt), jolla on vähintäänkin kieroutunut elämänkatsomus. Tyler uskoo itsetuhon olevan elämän suola. Ennen pitkää Jack ja Tyler purkavat patoutumiaan hakkaamalla toisiaan tohjoksi paikallisbaarin parkkipaikalla, ja huomaavat ankaran nyrkkitappelun tuottavan heille äärimmäisen hyvänolontunteen. Jotta muutkin miehet pääsisivät jyvälle tästä fyysisen väkivallan tuottamasta ilosta, Jack ja Tyler perustavat salaisen tappelukerhon, Fight Clubin, jonka suosio on valtava. Mutta Jackia odottaa järkyttävä yllätys, joka muuttaa kaiken.", + "homepage": "" + } + }, + { + "iso_3166_1": "NO", + "iso_639_1": "no", + "name": "Norsk", + "english_name": "Norwegian", + "data": { + "title": "", + "overview": "Jack kjeder seg til han møter Tyler, som mener selvdestruksjon gjør livet verdt å leve. For å introdusere andre menn for de simple gleder ved fysisk vold, danner de den hemmelige Fight Club som blir voldsomt populær. Med Brad Pitt og Edward Norton.", + "homepage": "" + } + }, + { + "iso_3166_1": "IN", + "iso_639_1": "ml", + "name": "", + "english_name": "Malayalam", + "data": { + "title": "ഫൈറ്റ് ക്ലബ്ബ്", + "overview": "", + "homepage": "" + } + }, + { + "iso_3166_1": "IR", + "iso_639_1": "fa", + "name": "فارسی", + "english_name": "Persian", + "data": { + "title": "باشگاه مشت زنی", + "overview": "«راوی» (نورتن)، جوانی پریشان حال پی می برد که به کمک مشت بازی با دست های برهنه، بیش از هر زمان دیگری احساس زنده بودن می کند. او و «تایلر دردن» (پیت) که به دوستانی صمیمی تبدیل شده اند، هفته ای یک بار با هم ملاقات می کنند تا با هم مشت بازی کنند. در حالی که افراد دیگری هم به باشگاه شان می پیوندند، محفل شان به رغم آن که رازی است بین شرکت کننده هایش، شهرت و محبوبیت یک باشگاه زیرزمینی را پیدا می کند.", + "homepage": "" + } + } + ] +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_upcoming.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_upcoming.json new file mode 100644 index 0000000000000..b279372a6f2b5 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_upcoming.json @@ -0,0 +1,158 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666853600.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "original_title": { + "$id": "#root/results/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/results/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "video": { + "$id": "#root/results/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": "number", + "default": 0.0 + } + } + } + }, + "dates": { + "$id": "#root/dates", + "title": "Dates", + "type": "object", + "properties": { + "maximum": { + "$id": "#root/dates/maximum", + "title": "Maximum", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "minimum": { + "$id": "#root/dates/minimum", + "title": "Minimum", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_videos.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_videos.json new file mode 100644 index 0000000000000..e86a8df6c4ec7 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_videos.json @@ -0,0 +1,135 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666853122.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": "null", + "default": null + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "string", + "default": 0 + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_title": { + "$id": "#root/results/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": "null", + "default": null + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "title": { + "$id": "#root/results/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "video": { + "$id": "#root/results/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": "integer", + "default": 0 + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + } + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_watch_providers.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_watch_providers.json new file mode 100644 index 0000000000000..6b0e0112bd302 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/movies_watch_providers.json @@ -0,0 +1,5814 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1666860173.json", + "title": "Root", + "type": "object", + "properties": { + "id": { + "$id": "#root/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "object", + "properties": { + "AR": { + "$id": "#root/results/AR", + "title": "Ar", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/AR/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/AR/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/AR/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/AR/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/AR/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/AR/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/AR/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/AR/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/AR/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/AR/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/AR/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/AR/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/AR/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/AR/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/AR/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/AR/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/AR/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/AR/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/AR/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "AT": { + "$id": "#root/results/AT", + "title": "At", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/AT/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/AT/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/AT/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/AT/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/AT/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/AT/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/AT/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/AT/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/AT/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/AT/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/AT/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/AT/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/AT/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "AU": { + "$id": "#root/results/AU", + "title": "Au", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/AU/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/AU/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/AU/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/AU/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/AU/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/AU/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/AU/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/AU/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/AU/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/AU/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/AU/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/AU/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/AU/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/AU/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/AU/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/AU/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/AU/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/AU/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/AU/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "BE": { + "$id": "#root/results/BE", + "title": "Be", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/BE/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/BE/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/BE/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/BE/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/BE/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/BE/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/BE/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/BE/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/BE/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/BE/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/BE/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/BE/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/BE/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/BE/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/BE/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/BE/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/BE/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/BE/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/BE/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "BR": { + "$id": "#root/results/BR", + "title": "Br", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/BR/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/BR/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/BR/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/BR/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/BR/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/BR/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/BR/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/BR/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/BR/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/BR/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/BR/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/BR/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/BR/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/BR/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/BR/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/BR/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/BR/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/BR/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/BR/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "CA": { + "$id": "#root/results/CA", + "title": "Ca", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/CA/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/CA/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CA/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CA/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CA/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CA/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CA/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/CA/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CA/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CA/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CA/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CA/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CA/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/CA/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CA/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CA/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CA/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CA/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CA/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "CH": { + "$id": "#root/results/CH", + "title": "Ch", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/CH/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/CH/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CH/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CH/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CH/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CH/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CH/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/CH/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CH/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CH/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CH/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CH/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CH/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/CH/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CH/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CH/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CH/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CH/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CH/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "CL": { + "$id": "#root/results/CL", + "title": "Cl", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/CL/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/CL/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CL/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CL/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CL/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CL/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CL/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/CL/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CL/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CL/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CL/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CL/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CL/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/CL/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CL/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CL/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CL/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CL/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CL/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "CO": { + "$id": "#root/results/CO", + "title": "Co", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/CO/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/CO/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CO/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CO/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CO/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CO/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CO/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/CO/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CO/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CO/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CO/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CO/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CO/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/CO/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CO/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CO/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CO/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CO/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CO/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "CZ": { + "$id": "#root/results/CZ", + "title": "Cz", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/CZ/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/CZ/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CZ/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CZ/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CZ/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CZ/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CZ/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/CZ/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CZ/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CZ/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CZ/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CZ/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CZ/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/CZ/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/CZ/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/CZ/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/CZ/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/CZ/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/CZ/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "DE": { + "$id": "#root/results/DE", + "title": "De", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/DE/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/DE/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/DE/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/DE/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/DE/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/DE/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/DE/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/DE/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/DE/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/DE/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/DE/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/DE/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/DE/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "DK": { + "$id": "#root/results/DK", + "title": "Dk", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/DK/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/DK/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/DK/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/DK/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/DK/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/DK/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/DK/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/DK/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/DK/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/DK/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/DK/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/DK/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/DK/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/DK/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/DK/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/DK/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/DK/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/DK/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/DK/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "EC": { + "$id": "#root/results/EC", + "title": "Ec", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/EC/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/EC/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/EC/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/EC/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/EC/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/EC/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/EC/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/EC/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/EC/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/EC/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/EC/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/EC/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/EC/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/EC/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/EC/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/EC/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/EC/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/EC/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/EC/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "EE": { + "$id": "#root/results/EE", + "title": "Ee", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/EE/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/EE/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/EE/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/EE/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/EE/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/EE/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/EE/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/EE/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/EE/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/EE/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/EE/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/EE/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/EE/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/EE/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/EE/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/EE/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/EE/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/EE/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/EE/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "ES": { + "$id": "#root/results/ES", + "title": "Es", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/ES/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/ES/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ES/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ES/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ES/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ES/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ES/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/ES/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ES/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ES/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ES/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ES/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ES/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/ES/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ES/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ES/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ES/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ES/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ES/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "FI": { + "$id": "#root/results/FI", + "title": "Fi", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/FI/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/FI/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/FI/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/FI/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/FI/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/FI/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/FI/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/FI/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/FI/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/FI/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/FI/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/FI/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/FI/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/FI/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/FI/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/FI/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/FI/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/FI/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/FI/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "FR": { + "$id": "#root/results/FR", + "title": "Fr", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/FR/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/FR/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/FR/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/FR/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/FR/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/FR/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/FR/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/FR/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/FR/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/FR/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/FR/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/FR/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/FR/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/FR/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/FR/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/FR/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/FR/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/FR/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/FR/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "GB": { + "$id": "#root/results/GB", + "title": "Gb", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/GB/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/GB/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/GB/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/GB/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/GB/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/GB/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/GB/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/GB/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/GB/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/GB/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/GB/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/GB/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/GB/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/GB/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/GB/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/GB/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/GB/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/GB/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/GB/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "GR": { + "$id": "#root/results/GR", + "title": "Gr", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/GR/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/GR/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/GR/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/GR/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/GR/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/GR/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/GR/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/GR/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/GR/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/GR/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/GR/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/GR/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/GR/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/GR/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/GR/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/GR/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/GR/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/GR/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/GR/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "HU": { + "$id": "#root/results/HU", + "title": "Hu", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/HU/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/HU/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/HU/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/HU/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/HU/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/HU/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/HU/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/HU/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/HU/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/HU/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/HU/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/HU/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/HU/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/HU/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/HU/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/HU/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/HU/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/HU/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/HU/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "ID": { + "$id": "#root/results/ID", + "title": "Id", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/ID/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/ID/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ID/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ID/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ID/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ID/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ID/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/ID/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ID/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ID/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ID/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ID/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ID/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/ID/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ID/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ID/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ID/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ID/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ID/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "IE": { + "$id": "#root/results/IE", + "title": "Ie", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/IE/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/IE/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IE/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IE/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IE/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IE/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IE/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/IE/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IE/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IE/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IE/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IE/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IE/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/IE/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IE/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IE/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IE/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IE/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IE/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "IN": { + "$id": "#root/results/IN", + "title": "In", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/IN/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/IN/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IN/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IN/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IN/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IN/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IN/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/IN/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IN/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IN/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IN/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IN/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IN/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/IN/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IN/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IN/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IN/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IN/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IN/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "IT": { + "$id": "#root/results/IT", + "title": "It", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/IT/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/IT/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IT/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IT/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IT/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IT/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IT/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/IT/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IT/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IT/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IT/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IT/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IT/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/IT/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/IT/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/IT/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/IT/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/IT/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/IT/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "JP": { + "$id": "#root/results/JP", + "title": "Jp", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/JP/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/JP/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/JP/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/JP/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/JP/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/JP/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/JP/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/JP/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/JP/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/JP/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/JP/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/JP/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/JP/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/JP/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/JP/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/JP/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/JP/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/JP/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/JP/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "KR": { + "$id": "#root/results/KR", + "title": "Kr", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/KR/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/KR/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/KR/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/KR/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/KR/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/KR/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/KR/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/KR/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/KR/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/KR/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/KR/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/KR/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/KR/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/KR/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/KR/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/KR/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/KR/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/KR/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/KR/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "LT": { + "$id": "#root/results/LT", + "title": "Lt", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/LT/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/LT/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/LT/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/LT/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/LT/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/LT/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/LT/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/LT/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/LT/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/LT/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/LT/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/LT/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/LT/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "LV": { + "$id": "#root/results/LV", + "title": "Lv", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/LV/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/LV/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/LV/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/LV/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/LV/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/LV/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/LV/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/LV/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/LV/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/LV/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/LV/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/LV/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/LV/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "MX": { + "$id": "#root/results/MX", + "title": "Mx", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/MX/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/MX/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/MX/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/MX/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/MX/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/MX/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/MX/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/MX/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/MX/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/MX/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/MX/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/MX/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/MX/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/MX/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/MX/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/MX/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/MX/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/MX/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/MX/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "MY": { + "$id": "#root/results/MY", + "title": "My", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/MY/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/MY/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/MY/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/MY/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/MY/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/MY/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/MY/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/MY/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/MY/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/MY/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/MY/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/MY/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/MY/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/MY/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/MY/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/MY/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/MY/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/MY/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/MY/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "NL": { + "$id": "#root/results/NL", + "title": "Nl", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/NL/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/NL/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NL/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NL/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NL/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NL/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NL/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/NL/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NL/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NL/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NL/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NL/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NL/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/NL/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NL/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NL/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NL/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NL/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NL/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "NO": { + "$id": "#root/results/NO", + "title": "No", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/NO/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/NO/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NO/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NO/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NO/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NO/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NO/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/NO/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NO/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NO/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NO/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NO/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NO/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/NO/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NO/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NO/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NO/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NO/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NO/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "NZ": { + "$id": "#root/results/NZ", + "title": "Nz", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/NZ/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/NZ/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NZ/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NZ/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NZ/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NZ/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NZ/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/NZ/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NZ/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NZ/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NZ/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NZ/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NZ/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/NZ/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/NZ/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/NZ/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/NZ/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/NZ/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/NZ/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "PE": { + "$id": "#root/results/PE", + "title": "Pe", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/PE/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/PE/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PE/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PE/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PE/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PE/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PE/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/PE/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PE/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PE/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PE/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PE/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PE/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/PE/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PE/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PE/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PE/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PE/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PE/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "PH": { + "$id": "#root/results/PH", + "title": "Ph", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/PH/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/PH/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PH/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PH/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PH/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PH/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PH/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/PH/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PH/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PH/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PH/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PH/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PH/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/PH/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PH/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PH/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PH/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PH/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PH/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "PL": { + "$id": "#root/results/PL", + "title": "Pl", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/PL/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/PL/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PL/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PL/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PL/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PL/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PL/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/PL/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PL/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PL/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PL/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PL/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PL/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/PL/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PL/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PL/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PL/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PL/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PL/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "PT": { + "$id": "#root/results/PT", + "title": "Pt", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/PT/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/PT/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PT/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PT/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PT/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PT/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PT/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/PT/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PT/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PT/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PT/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PT/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PT/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/PT/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/PT/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/PT/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/PT/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/PT/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/PT/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "RO": { + "$id": "#root/results/RO", + "title": "Ro", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/RO/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/RO/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/RO/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/RO/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/RO/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/RO/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/RO/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "RU": { + "$id": "#root/results/RU", + "title": "Ru", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/RU/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/RU/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/RU/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/RU/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/RU/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/RU/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/RU/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/RU/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/RU/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/RU/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/RU/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/RU/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/RU/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/RU/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/RU/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/RU/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/RU/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/RU/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/RU/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "SE": { + "$id": "#root/results/SE", + "title": "Se", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/SE/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/SE/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/SE/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/SE/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/SE/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/SE/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/SE/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/SE/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/SE/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/SE/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/SE/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/SE/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/SE/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/SE/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/SE/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/SE/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/SE/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/SE/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/SE/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "SG": { + "$id": "#root/results/SG", + "title": "Sg", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/SG/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/SG/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/SG/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/SG/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/SG/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/SG/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/SG/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/SG/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/SG/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/SG/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/SG/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/SG/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/SG/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/SG/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/SG/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/SG/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/SG/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/SG/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/SG/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "TH": { + "$id": "#root/results/TH", + "title": "Th", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/TH/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/TH/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/TH/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/TH/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/TH/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/TH/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/TH/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/TH/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/TH/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/TH/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/TH/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/TH/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/TH/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/TH/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/TH/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/TH/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/TH/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/TH/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/TH/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "TR": { + "$id": "#root/results/TR", + "title": "Tr", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/TR/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "buy": { + "$id": "#root/results/TR/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/TR/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/TR/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/TR/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/TR/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/TR/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/TR/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/TR/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/TR/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/TR/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/TR/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/TR/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/TR/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/TR/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/TR/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/TR/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/TR/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/TR/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "US": { + "$id": "#root/results/US", + "title": "Us", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/US/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/US/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/US/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/US/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/US/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/US/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/US/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/US/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/US/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/US/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/US/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/US/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/US/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/US/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/US/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/US/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/US/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/US/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/US/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "VE": { + "$id": "#root/results/VE", + "title": "Ve", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/VE/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "flatrate": { + "$id": "#root/results/VE/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/VE/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/VE/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/VE/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/VE/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/VE/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "rent": { + "$id": "#root/results/VE/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/VE/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/VE/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/VE/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/VE/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/VE/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/VE/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/VE/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/VE/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/VE/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/VE/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/VE/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + }, + "ZA": { + "$id": "#root/results/ZA", + "title": "Za", + "type": "object", + "properties": { + "link": { + "$id": "#root/results/ZA/link", + "title": "Link", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "rent": { + "$id": "#root/results/ZA/rent", + "title": "Rent", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ZA/rent/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ZA/rent/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ZA/rent/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ZA/rent/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ZA/rent/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "buy": { + "$id": "#root/results/ZA/buy", + "title": "Buy", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ZA/buy/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ZA/buy/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ZA/buy/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ZA/buy/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ZA/buy/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "flatrate": { + "$id": "#root/results/ZA/flatrate", + "title": "Flatrate", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/ZA/flatrate/items", + "title": "Items", + "type": "object", + "properties": { + "display_priority": { + "$id": "#root/results/ZA/flatrate/items/display_priority", + "title": "Display_priority", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/ZA/flatrate/items/logo_path", + "title": "Logo_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "provider_id": { + "$id": "#root/results/ZA/flatrate/items/provider_id", + "title": "Provider_id", + "type": "integer", + "default": 0 + }, + "provider_name": { + "$id": "#root/results/ZA/flatrate/items/provider_name", + "title": "Provider_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + } + } + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_collections.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_collections.json new file mode 100644 index 0000000000000..c3178425cbc8e --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_collections.json @@ -0,0 +1,67 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667209129.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "name": { + "$id": "#root/results/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_companies.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_companies.json new file mode 100644 index 0000000000000..bdf989a693cd1 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_companies.json @@ -0,0 +1,60 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667208961.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "logo_path": { + "$id": "#root/results/items/logo_path", + "title": "Logo_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "name": { + "$id": "#root/results/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_keywords.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_keywords.json new file mode 100644 index 0000000000000..ce82950a74bc7 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_keywords.json @@ -0,0 +1,53 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667209167.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "name": { + "$id": "#root/results/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_movies.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_movies.json new file mode 100644 index 0000000000000..bba41033bc8d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_movies.json @@ -0,0 +1,137 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667209325.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "original_title": { + "$id": "#root/results/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/results/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "video": { + "$id": "#root/results/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": "number", + "default": 0.0 + } + } + } + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_multi.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_multi.json new file mode 100644 index 0000000000000..2bfcc5128f16c --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_multi.json @@ -0,0 +1,137 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667210689.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": ["string", "null"], + "default": null + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": ["number", "integer"], + "default": 0 + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*" + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": ["string", "null"], + "default": null + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": ["number", "integer"], + "default": 0 + }, + "media_type": { + "$id": "#root/results/items/media_type", + "title": "Media_type", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "first_air_date": { + "$id": "#root/results/items/first_air_date", + "title": "First_air_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "origin_country": { + "$id": "#root/results/items/origin_country", + "title": "Origin_country", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/origin_country/items", + "title": "Items", + "type": "string", + "default": "", + "pattern": "^.*$" + } + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [] + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "name": { + "$id": "#root/results/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_name": { + "$id": "#root/results/items/original_name", + "title": "Original_name", + "type": "string", + "default": "", + "pattern": "^.*$" + } + } + } + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_people.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_people.json new file mode 100644 index 0000000000000..62f31d32864d6 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_people.json @@ -0,0 +1,189 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667210990.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "profile_path": { + "$id": "#root/results/items/profile_path", + "title": "Profile_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "known_for": { + "$id": "#root/results/items/known_for", + "title": "Known_for", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/known_for/items", + "title": "Items", + "type": "object", + "properties": { + "poster_path": { + "$id": "#root/results/items/known_for/items/poster_path", + "title": "Poster_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "adult": { + "$id": "#root/results/items/known_for/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "overview": { + "$id": "#root/results/items/known_for/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*" + }, + "release_date": { + "$id": "#root/results/items/known_for/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_title": { + "$id": "#root/results/items/known_for/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "genre_ids": { + "$id": "#root/results/items/known_for/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/known_for/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/known_for/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "media_type": { + "$id": "#root/results/items/known_for/items/media_type", + "title": "Media_type", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_language": { + "$id": "#root/results/items/known_for/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/results/items/known_for/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "backdrop_path": { + "$id": "#root/results/items/known_for/items/backdrop_path", + "title": "Backdrop_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/results/items/known_for/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "vote_count": { + "$id": "#root/results/items/known_for/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "video": { + "$id": "#root/results/items/known_for/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/known_for/items/vote_average", + "title": "Vote_average", + "type": "number", + "default": 0.0 + } + } + } + }, + "name": { + "$id": "#root/results/items/name", + "title": "Name", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + } + } + } + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_tv_shows.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_tv_shows.json new file mode 100644 index 0000000000000..a45e1ff8588d7 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/search_tv_shows.json @@ -0,0 +1,138 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667666886.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": "number", + "default": 0.0 + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "first_air_date": { + "$id": "#root/results/items/first_air_date", + "title": "First_air_date", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "origin_country": { + "$id": "#root/results/items/origin_country", + "title": "Origin_country", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/origin_country/items", + "title": "Items", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + } + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "name": { + "$id": "#root/results/items/name", + "title": "Name", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + }, + "original_name": { + "$id": "#root/results/items/original_name", + "title": "Original_name", + "type": ["null", "string"], + "default": "", + "pattern": "^.*$" + } + } + } + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/trending.json b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/trending.json new file mode 100644 index 0000000000000..e895925782c7a --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/schemas/trending.json @@ -0,0 +1,137 @@ +{ + "definitions": {}, + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://example.com/object1667211672.json", + "title": "Root", + "type": "object", + "properties": { + "page": { + "$id": "#root/page", + "title": "Page", + "type": "integer", + "default": 0 + }, + "results": { + "$id": "#root/results", + "title": "Results", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items", + "title": "Items", + "type": "object", + "properties": { + "adult": { + "$id": "#root/results/items/adult", + "title": "Adult", + "type": "boolean", + "default": true + }, + "backdrop_path": { + "$id": "#root/results/items/backdrop_path", + "title": "Backdrop_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "genre_ids": { + "$id": "#root/results/items/genre_ids", + "title": "Genre_ids", + "type": "array", + "default": [], + "items": { + "$id": "#root/results/items/genre_ids/items", + "title": "Items", + "type": "integer", + "default": 0 + } + }, + "id": { + "$id": "#root/results/items/id", + "title": "Id", + "type": "integer", + "default": 0 + }, + "original_language": { + "$id": "#root/results/items/original_language", + "title": "Original_language", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "original_title": { + "$id": "#root/results/items/original_title", + "title": "Original_title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "overview": { + "$id": "#root/results/items/overview", + "title": "Overview", + "type": "string", + "default": "", + "pattern": "^.*" + }, + "poster_path": { + "$id": "#root/results/items/poster_path", + "title": "Poster_path", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "release_date": { + "$id": "#root/results/items/release_date", + "title": "Release_date", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "title": { + "$id": "#root/results/items/title", + "title": "Title", + "type": "string", + "default": "", + "pattern": "^.*$" + }, + "video": { + "$id": "#root/results/items/video", + "title": "Video", + "type": "boolean", + "default": true + }, + "vote_average": { + "$id": "#root/results/items/vote_average", + "title": "Vote_average", + "type": "number", + "default": 0.0 + }, + "vote_count": { + "$id": "#root/results/items/vote_count", + "title": "Vote_count", + "type": "integer", + "default": 0 + }, + "popularity": { + "$id": "#root/results/items/popularity", + "title": "Popularity", + "type": "number", + "default": 0.0 + } + } + } + }, + "total_pages": { + "$id": "#root/total_pages", + "title": "Total_pages", + "type": "integer", + "default": 0 + }, + "total_results": { + "$id": "#root/total_results", + "title": "Total_results", + "type": "integer", + "default": 0 + } + } +} diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/source.py b/airbyte-integrations/connectors/source-tmdb/source_tmdb/source.py new file mode 100644 index 0000000000000..be00b94482afe --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceTmdb(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "tmdb.yaml"}) diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/spec.yaml b/airbyte-integrations/connectors/source-tmdb/source_tmdb/spec.yaml new file mode 100644 index 0000000000000..0b99b10e10cbd --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/spec.yaml @@ -0,0 +1,38 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/tmdb +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Tmdb Spec + type: object + required: + - api_key + - movie_id + - query + - language + additionalProperties: true + properties: + api_key: + title: Unique key for establishing connection + type: string + description: API Key from tmdb account + airbyte_secret: true + movie_id: + title: Movie ID for targeting movies + type: string + description: Target movie ID, Mandate for movie streams (Example is 550) + examples: + - 550 + - 560 + query: + title: Query for search streams + type: string + description: Target movie ID, Mandate for search streams + examples: + - Marvel + - DC + language: + title: Language for filtering + type: string + description: Language expressed in ISO 639-1 scheme, Mandate for required streams (Example en-US) + examples: + - en-US + - en-UK diff --git a/airbyte-integrations/connectors/source-tmdb/source_tmdb/tmdb.yaml b/airbyte-integrations/connectors/source-tmdb/source_tmdb/tmdb.yaml new file mode 100644 index 0000000000000..315a411caa992 --- /dev/null +++ b/airbyte-integrations/connectors/source-tmdb/source_tmdb/tmdb.yaml @@ -0,0 +1,300 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://api.themoviedb.org/3/" + http_method: "GET" + request_options_provider: + request_parameters: + api_key: "{{ config['api_key'] }}" + language: | + {{ + config['language'] + if options['name'] == 'search_collections' or + options['name'] == 'search_movies' or + options['name'] == 'search_multi' or + options['name'] == 'search_people' or + options['name'] == 'search_tv_shows' + else '' + }} + query: | + {{ + config['query'] + if options['name'] == 'search_collections' or + options['name'] == 'search_companies' or + options['name'] == 'search_keywords' or + options['name'] == 'search_movies' or + options['name'] == 'search_multi' or + options['name'] == 'search_people' or + options['name'] == 'search_tv_shows' + else '' + }} + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + + base_stream: + schema_loader: + type: JsonSchema + file_path: "./source_tmdb/schemas/{{ options['name'] }}.json" + retriever: + $ref: "*ref(definitions.retriever)" + + page_stream: + schema_loader: + type: JsonSchema + file_path: "./source_tmdb/schemas/{{ options['name'] }}.json" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: "DefaultPaginator" + url_base: "*ref(definitions.requester.url_base)" + pagination_strategy: + type: "PageIncrement" + page_size: 1000 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + page_size_option: + inject_into: "request_parameter" + field_name: "" + requester: + $ref: "*ref(definitions.requester)" + + certification_movie_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "certification_movie" + path: "/certification/movie/list" + + certification_tv_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "certification_tv" + path: "/certification/tv/list" + + changes_movie_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "changes_movie" + path: "/movie/changes" + + changes_tv_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "changes_tv" + path: "/tv/changes" + + changes_person_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "changes_person" + path: "/person/changes" + + movies_details_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_details" + path: "/movie/{{ config['movie_id'] }}" + + movies_alternative_titles_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_alternative_titles" + path: "/movie/{{ config['movie_id'] }}/alternative_titles" + + movies_credits_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_credits" + path: "/movie/{{ config['movie_id'] }}/credits" + + movies_external_ids_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_external_ids" + path: "/movie/{{ config['movie_id'] }}/external_ids" + + movies_images_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_images" + path: "/movie/{{ config['movie_id'] }}/images" + + movies_keywords_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_keywords" + path: "/movie/{{ config['movie_id'] }}/keywords" + + movies_lists_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_lists" + path: "/movie/{{ config['movie_id'] }}/lists" + + movies_recommendations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_recommendations" + path: "/movie/{{ config['movie_id'] }}/recommendations" + + movies_releases_dates_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_releases_dates" + path: "/movie/{{ config['movie_id'] }}/release_dates" + + movies_reviews_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_reviews" + path: "/movie/{{ config['movie_id'] }}/reviews" + + movies_similar_movies_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_similar_movies" + path: "/movie/{{ config['movie_id'] }}/similar" + + movies_translations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_translations" + path: "/movie/{{ config['movie_id'] }}/translations" + + movies_videos_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_videos" + path: "/movie/{{ config['movie_id'] }}/videos" + + movies_watch_providers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_watch_providers" + path: "/movie/{{ config['movie_id'] }}/watch/providers" + + movies_latest_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "movies_latest" + path: "/movie/latest" + + movies_now_playing_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "movies_now_playing" + path: "/movie/now_playing" + + movies_popular_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "movies_popular" + path: "/movie/popular" + + movies_top_rated_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "movies_top_rated" + path: "/movie/top_rated" + + movies_upcoming_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "movies_upcoming" + path: "/movie/upcoming" + + trending_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "trending" + path: "/trending/all/day" + + search_companies_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "search_companies" + path: "/search/company" + + search_collections_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "search_collections" + path: "/search/collection" + + search_keywords_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "search_keywords" + path: "/search/keyword" + + search_movies_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "search_movies" + path: "/search/movie" + + search_multi_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "search_multi" + path: "/search/multi" + + search_people_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "search_people" + path: "/search/person" + + search_tv_shows_stream: + $ref: "*ref(definitions.page_stream)" + $options: + name: "search_tv_shows" + path: "/search/tv" + +streams: + - "*ref(definitions.certification_movie_stream)" + - "*ref(definitions.certification_tv_stream)" + - "*ref(definitions.changes_movie_stream)" + - "*ref(definitions.changes_tv_stream)" + - "*ref(definitions.changes_person_stream)" + - "*ref(definitions.movies_details_stream)" + - "*ref(definitions.movies_alternative_titles_stream)" + - "*ref(definitions.movies_credits_stream)" + - "*ref(definitions.movies_external_ids_stream)" + - "*ref(definitions.movies_images_stream)" + - "*ref(definitions.movies_keywords_stream)" + - "*ref(definitions.movies_latest_stream)" + - "*ref(definitions.movies_lists_stream)" + - "*ref(definitions.movies_now_playing_stream)" + - "*ref(definitions.movies_popular_stream)" + - "*ref(definitions.movies_recommendations_stream)" + - "*ref(definitions.movies_releases_dates_stream)" + - "*ref(definitions.movies_reviews_stream)" + - "*ref(definitions.movies_similar_movies_stream)" + - "*ref(definitions.movies_top_rated_stream)" + - "*ref(definitions.movies_translations_stream)" + - "*ref(definitions.movies_upcoming_stream)" + - "*ref(definitions.movies_videos_stream)" + - "*ref(definitions.movies_watch_providers_stream)" + - "*ref(definitions.trending_stream)" + - "*ref(definitions.search_collections_stream)" + - "*ref(definitions.search_companies_stream)" + - "*ref(definitions.search_keywords_stream)" + - "*ref(definitions.search_movies_stream)" + - "*ref(definitions.search_multi_stream)" + - "*ref(definitions.search_people_stream)" + - "*ref(definitions.search_tv_shows_stream)" + +check: + stream_names: + - "trending" diff --git a/airbyte-integrations/connectors/source-toggl/.dockerignore b/airbyte-integrations/connectors/source-toggl/.dockerignore new file mode 100644 index 0000000000000..e03420dace1a0 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_toggl +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-toggl/Dockerfile b/airbyte-integrations/connectors/source-toggl/Dockerfile new file mode 100644 index 0000000000000..76f3286d281f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_toggl ./source_toggl + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-toggl diff --git a/airbyte-integrations/connectors/source-toggl/README.md b/airbyte-integrations/connectors/source-toggl/README.md new file mode 100644 index 0000000000000..6e6a69e51e9ac --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/README.md @@ -0,0 +1,79 @@ +# Toggl Source + +This is the repository for the Toggl configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/toggl). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-toggl:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/toggl) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_toggl/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source toggl test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-toggl:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-toggl:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-toggl:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-toggl:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-toggl:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-toggl:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-toggl:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-toggl:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-toggl/__init__.py b/airbyte-integrations/connectors/source-toggl/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-toggl/acceptance-test-config.yml b/airbyte-integrations/connectors/source-toggl/acceptance-test-config.yml new file mode 100644 index 0000000000000..6c4fbe6f00d0a --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-toggl:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_toggl/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-toggl/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-toggl/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-toggl/build.gradle b/airbyte-integrations/connectors/source-toggl/build.gradle new file mode 100644 index 0000000000000..5eabc6da3dba2 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_toggl' +} diff --git a/airbyte-integrations/connectors/source-toggl/integration_tests/__init__.py b/airbyte-integrations/connectors/source-toggl/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-toggl/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-toggl/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-toggl/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-toggl/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-toggl/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-toggl/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..4edc23a3ce770 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/integration_tests/configured_catalog.json @@ -0,0 +1,76 @@ +{ + "streams": [ + { + "stream": { + "name": "time_entries", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "organizations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "organizations_users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "organizations_groups", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "workspace", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "workspace_clients", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "workspace_projects", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "workspace_tasks", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-toggl/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-toggl/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..94f9c099be472 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "api_token": "fail-OxxxO-00-1234" +} diff --git a/airbyte-integrations/connectors/source-toggl/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-toggl/integration_tests/sample_config.json new file mode 100644 index 0000000000000..0d10885a7f769 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/integration_tests/sample_config.json @@ -0,0 +1,7 @@ +{ + "api_token": "2e82a63c554d89fca71f80f1c5f0efbe", + "organization_id": 6771652, + "workspace_id": 6799421, + "start_date": "2022-10-01", + "end_date": "2022-10-30" +} diff --git a/airbyte-integrations/connectors/source-toggl/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-toggl/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-toggl/main.py b/airbyte-integrations/connectors/source-toggl/main.py new file mode 100644 index 0000000000000..cc94af68a4b98 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_toggl import SourceToggl + +if __name__ == "__main__": + source = SourceToggl() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-toggl/requirements.txt b/airbyte-integrations/connectors/source-toggl/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-toggl/setup.py b/airbyte-integrations/connectors/source-toggl/setup.py new file mode 100644 index 0000000000000..907383d992cff --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_toggl", + description="Source implementation for Toggl.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/__init__.py b/airbyte-integrations/connectors/source-toggl/source_toggl/__init__.py new file mode 100644 index 0000000000000..31c3963b464e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceToggl + +__all__ = ["SourceToggl"] diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations.json b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations.json new file mode 100644 index 0000000000000..7636929e95c2e --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations.json @@ -0,0 +1,54 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "admin": { + "type": "boolean" + }, + "at": { + "type": "string" + }, + "created_at": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "is_chargify": { + "type": "boolean" + }, + "is_multi_workspace_enabled": { + "type": "boolean" + }, + "is_unified": { + "type": "boolean" + }, + "max_workspaces": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "owner": { + "type": "boolean" + }, + "payment_methods": { + "type": "string" + }, + "pricing_plan_id": { + "type": "integer" + }, + "server_deleted_at": { + "type": ["null", "string"] + }, + "suspended_at": { + "type": ["null", "string"] + }, + "trial_info": { + "type": "object" + }, + "user_count": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations_groups.json b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations_groups.json new file mode 100644 index 0000000000000..0b41b87766e34 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations_groups.json @@ -0,0 +1,21 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "at": { + "type": "string" + }, + "group_id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "users": { + "type": ["null", "array"] + }, + "workspaces": { + "type": "array" + } + } +} diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations_users.json b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations_users.json new file mode 100644 index 0000000000000..40e42d3027532 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/organizations_users.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "admin": { + "type": "boolean" + }, + "avatar_url": { + "type": "string" + }, + "can_edit_email": { + "type": "boolean" + }, + "email": { + "type": "string" + }, + "groups": { + "type": ["null", "array"] + }, + "id": { + "type": "integer" + }, + "inactive": { + "type": "boolean" + }, + "invitation_code": { + "type": ["null", "string"] + }, + "joined": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "owner": { + "type": "boolean" + }, + "user_id": { + "type": "integer" + }, + "workspaces": { + "type": "array" + } + } +} diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/time_entries.json b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/time_entries.json new file mode 100644 index 0000000000000..336270c5a7e4a --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/time_entries.json @@ -0,0 +1,63 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "at": { + "type": "string" + }, + "billable": { + "type": "boolean" + }, + "description": { + "type": ["null", "string"] + }, + "duration": { + "type": "integer" + }, + "duronly": { + "type": "boolean" + }, + "id": { + "type": "integer" + }, + "pid": { + "type": "integer" + }, + "project_id": { + "type": ["null", "integer"] + }, + "server_deleted_at": { + "type": ["null", "string"] + }, + "start": { + "type": "string" + }, + "stop": { + "type": "string" + }, + "tag_ids": { + "type": ["null", "array"] + }, + "tags": { + "type": ["null", "array"] + }, + "task_id": { + "type": ["null", "integer"] + }, + "tid": { + "type": "integer" + }, + "uid": { + "type": "integer" + }, + "user_id": { + "type": "integer" + }, + "wid": { + "type": "integer" + }, + "workspace_id": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace.json b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace.json new file mode 100644 index 0000000000000..1c75da442fee5 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace.json @@ -0,0 +1,15 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "admins": { + "type": "array" + }, + "groups_count": { + "type": "integer" + }, + "members_count": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_clients.json b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_clients.json new file mode 100644 index 0000000000000..89dfd0742a280 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_clients.json @@ -0,0 +1,24 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "archived": { + "type": "boolean" + }, + "at": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "server_deleted_at": { + "type": "string" + }, + "wid": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_projects.json b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_projects.json new file mode 100644 index 0000000000000..3e02632305e9c --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_projects.json @@ -0,0 +1,81 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "active": { + "type": "boolean" + }, + "actual_hours": { + "type": ["null", "integer"] + }, + "at": { + "type": "string" + }, + "auto_estimates": { + "type": ["null", "boolean"] + }, + "billable": { + "type": ["null", "boolean"] + }, + "cid": { + "type": ["null", "integer"] + }, + "client_id": { + "type": ["null", "integer"] + }, + "color": { + "type": "string" + }, + "created_at": { + "type": "string" + }, + "currency": { + "type": ["null", "string"] + }, + "current_period": { + "type": ["null", "object"] + }, + "estimated_hours": { + "type": ["null", "integer"] + }, + "first_time_entry": { + "type": "string" + }, + "fixed_fee": { + "type": ["null", "number"] + }, + "id": { + "type": "integer" + }, + "is_private": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "rate": { + "type": ["null", "number"] + }, + "rate_last_updated": { + "type": ["null", "string"] + }, + "recurring": { + "type": "boolean" + }, + "recurring_parameters": { + "type": ["null", "array"] + }, + "server_deleted_at": { + "type": ["null", "string"] + }, + "template": { + "type": ["null", "boolean"] + }, + "wid": { + "type": "integer" + }, + "workspace_id": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_tasks.json b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_tasks.json new file mode 100644 index 0000000000000..3593ca824830c --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/schemas/workspace_tasks.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "active": { + "type": "boolean" + }, + "at": { + "type": "string" + }, + "estimated_seconds": { + "type": ["null", "integer"] + }, + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "project_id": { + "type": "integer" + }, + "recurring": { + "type": "boolean" + }, + "server_deleted_at": { + "type": ["null", "string"] + }, + "tracked_seconds": { + "type": "integer" + }, + "user_id": { + "type": ["null", "integer"] + }, + "workspace_id": { + "type": "integer" + } + } +} diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/source.py b/airbyte-integrations/connectors/source-toggl/source_toggl/source.py new file mode 100644 index 0000000000000..30b928ef3e14f --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceToggl(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "toggl.yaml"}) diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/spec.yaml b/airbyte-integrations/connectors/source-toggl/source_toggl/spec.yaml new file mode 100644 index 0000000000000..c29b7b33b0992 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/spec.yaml @@ -0,0 +1,47 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/toggl +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Toggl Spec + type: object + required: + - api_token + - organization_id + - workspace_id + - start_date + - end_date + additionalProperties: true + properties: + api_token: + title: API token + type: string + description: >- + Your API Token. See here. The token is + case sensitive. + airbyte_secret: true + organization_id: + title: Organization ID + type: integer + description: >- + Your organization id. See here. + workspace_id: + title: Workspace ID + type: integer + description: >- + Your workspace id. See here. + start_date: + title: Start date + type: string + description: To retrieve time entries created after the given date (inclusive). + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + examples: + - YYYY-MM-DD + end_date: + title: End date + type: string + description: To retrieve time entries created before the given date (inclusive). + pattern: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ + examples: + - YYYY-MM-DD diff --git a/airbyte-integrations/connectors/source-toggl/source_toggl/toggl.yaml b/airbyte-integrations/connectors/source-toggl/source_toggl/toggl.yaml new file mode 100644 index 0000000000000..74041b3860550 --- /dev/null +++ b/airbyte-integrations/connectors/source-toggl/source_toggl/toggl.yaml @@ -0,0 +1,112 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + data_selector: + extractor: + field_pointer: ["data"] + requester: + url_base: "https://api.track.toggl.com" + http_method: "GET" + request_options_provider: + request_parameters: + start_date: "{{ config['start_date'] }}" + end_date: "{{ config['end_date'] }}" + authenticator: + type: BasicHttpAuthenticator + username: "{{ config['api_token'] }}" + password: "api_token" + increment_paginator: + type: "DefaultPaginator" + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "per_page" + pagination_strategy: + type: "PageIncrement" + page_size: 50 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + time_entries_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "time_entries" + primary_key: "id" + path: "/api/v9/me/time_entries" + organizations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "organizations" + primary_key: "id" + path: "/api/v9/organizations/{{ config['organization_id'] }}" + organizations_users_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "organizations_users" + primary_key: "id" + path: "/api/v9/organizations/{{ config['organization_id'] }}/users" + organizations_groups_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "organizations_groups" + primary_key: "group_id" + path: "/api/v9/organizations/{{ config['organization_id'] }}/groups" + workspace_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "workspace" + primary_key: "" + path: "/api/v9/workspaces/{{ config['workspace_id'] }}/statistics" + workspace_clients_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "workspace_clients" + primary_key: "id" + path: "/api/v9/workspaces/{{ config['workspace_id'] }}/clients" + workspace_projects_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "workspace_projects" + primary_key: "id" + path: "/api/v9/workspaces/{{ config['workspace_id'] }}/projects" + workspace_tasks_stream: + retriever: + record_selector: + $ref: "*ref(definitions.data_selector)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + requester: + $ref: "*ref(definitions.requester)" + request_options_provider: + request_parameters: "" + $options: + name: "workspace_tasks" + primary_key: "id" + path: "/api/v9/workspaces/{{ config['workspace_id'] }}/tasks" + +streams: + - "*ref(definitions.time_entries_stream)" + - "*ref(definitions.organizations_stream)" + - "*ref(definitions.organizations_users_stream)" + - "*ref(definitions.organizations_groups_stream)" + - "*ref(definitions.workspace_stream)" + - "*ref(definitions.workspace_clients_stream)" + - "*ref(definitions.workspace_projects_stream)" + - "*ref(definitions.workspace_tasks_stream)" + +check: + stream_names: + - "time_entries" diff --git a/airbyte-integrations/connectors/source-tplcentral/Dockerfile b/airbyte-integrations/connectors/source-tplcentral/Dockerfile index 1a37735dfd614..f6fe8952451fd 100644 --- a/airbyte-integrations/connectors/source-tplcentral/Dockerfile +++ b/airbyte-integrations/connectors/source-tplcentral/Dockerfile @@ -34,5 +34,5 @@ COPY source_tplcentral ./source_tplcentral ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] -LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.version=0.1.1 LABEL io.airbyte.name=airbyte/source-tplcentral diff --git a/airbyte-integrations/connectors/source-tplcentral/acceptance-test-config.yml b/airbyte-integrations/connectors/source-tplcentral/acceptance-test-config.yml index 89e1e040f2c71..c831d6c6a7901 100644 --- a/airbyte-integrations/connectors/source-tplcentral/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-tplcentral/acceptance-test-config.yml @@ -9,6 +9,8 @@ tests: status: "succeed" - config_path: "integration_tests/invalid_config.json" status: "failed" + - config_path: "integration_tests/config_http_url.json" + status: "failed" discovery: - config_path: "secrets/config.json" basic_read: diff --git a/airbyte-integrations/connectors/source-tplcentral/integration_tests/config_http_url.json b/airbyte-integrations/connectors/source-tplcentral/integration_tests/config_http_url.json new file mode 100644 index 0000000000000..0d545e12bafe6 --- /dev/null +++ b/airbyte-integrations/connectors/source-tplcentral/integration_tests/config_http_url.json @@ -0,0 +1,3 @@ +{ + "url_base": "http://secure-wms.com/" +} diff --git a/airbyte-integrations/connectors/source-tplcentral/source_tplcentral/source.py b/airbyte-integrations/connectors/source-tplcentral/source_tplcentral/source.py index e25c8cb15f62b..42adbf8288898 100644 --- a/airbyte-integrations/connectors/source-tplcentral/source_tplcentral/source.py +++ b/airbyte-integrations/connectors/source-tplcentral/source_tplcentral/source.py @@ -22,6 +22,7 @@ def __init__( client_secret: str, user_login_id: int = None, user_login: str = None, + scopes: List[str] = None, ): super().__init__( token_refresh_endpoint=token_refresh_endpoint, @@ -30,6 +31,12 @@ def __init__( refresh_token=None, ) + self.token_refresh_endpoint = token_refresh_endpoint + self.client_id = client_id + self.client_secret = client_secret + self.scopes = scopes + self.access_token_name = "access_token" + self.expires_in_name = "expires_in" self.user_login_id = user_login_id self.user_login = user_login diff --git a/airbyte-integrations/connectors/source-tplcentral/source_tplcentral/spec.json b/airbyte-integrations/connectors/source-tplcentral/source_tplcentral/spec.json index 88333ddc7ea69..110856b613983 100644 --- a/airbyte-integrations/connectors/source-tplcentral/source_tplcentral/spec.json +++ b/airbyte-integrations/connectors/source-tplcentral/source_tplcentral/spec.json @@ -11,7 +11,8 @@ "title": "URL base", "type": "string", "format": "uri", - "default": "https://secure-wms.com/" + "default": "https://secure-wms.com/", + "pattern": "^https://" }, "client_id": { "title": "Client ID", diff --git a/airbyte-integrations/connectors/source-tplcentral/unit_tests/test_source.py b/airbyte-integrations/connectors/source-tplcentral/unit_tests/test_source.py index 6a43b34024b9b..6bbf782a025c3 100644 --- a/airbyte-integrations/connectors/source-tplcentral/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-tplcentral/unit_tests/test_source.py @@ -11,24 +11,22 @@ @fixture def config(): return { - "config": { - "url_base": "https://secure-wms.com/", - "client_id": "xxx", - "client_secret": "yyy", - "user_login_id": 123, - "tpl_key": "{00000000-0000-0000-0000-000000000000}", - "customer_id": 4, - "facility_id": 5, - "start_date": "2021-10-01", - } + "url_base": "https://secure-wms.com/", + "client_id": "xxx", + "client_secret": "yyy", + "user_login_id": 123, + "tpl_key": "{00000000-0000-0000-0000-000000000000}", + "customer_id": 4, + "facility_id": 5, + "start_date": "2021-10-01", } -def test_check_connection(mocker, requests_mock, config): +def test_check_connection(requests_mock, config): source = SourceTplcentral() logger_mock = MagicMock() requests_mock.post( - f"{config['config']['url_base']}AuthServer/api/Token", + f"{config['url_base']}AuthServer/api/Token", json={ "access_token": "the_token", "token_type": "Bearer", @@ -37,12 +35,11 @@ def test_check_connection(mocker, requests_mock, config): "scope": None, }, ) - assert source.check_connection(logger_mock, **config) == (True, None) + assert source.check_connection(logger_mock, config) == (True, None) -def test_streams(mocker): +def test_streams(config): source = SourceTplcentral() - config_mock = MagicMock() - streams = source.streams(config_mock) + streams = source.streams(config) expected_streams_number = 6 assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-twitter/.dockerignore b/airbyte-integrations/connectors/source-twitter/.dockerignore new file mode 100644 index 0000000000000..1869303e032c4 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_twitter +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-twitter/Dockerfile b/airbyte-integrations/connectors/source-twitter/Dockerfile new file mode 100644 index 0000000000000..4239905d6d13c --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_twitter ./source_twitter + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-twitter diff --git a/airbyte-integrations/connectors/source-twitter/README.md b/airbyte-integrations/connectors/source-twitter/README.md new file mode 100644 index 0000000000000..8a07a781a4c41 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/README.md @@ -0,0 +1,79 @@ +# Twitter Source + +This is the repository for the Twitter configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/twitter). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-twitter:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/twitter) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_twitter/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source twitter test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-twitter:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-twitter:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-twitter:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-twitter:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-twitter:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-twitter:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-twitter:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-twitter:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-twitter/__init__.py b/airbyte-integrations/connectors/source-twitter/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-twitter/acceptance-test-config.yml b/airbyte-integrations/connectors/source-twitter/acceptance-test-config.yml new file mode 100644 index 0000000000000..4bb7ee3d61816 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/acceptance-test-config.yml @@ -0,0 +1,38 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-twitter:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_twitter/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-twitter/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-twitter/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-twitter/build.gradle b/airbyte-integrations/connectors/source-twitter/build.gradle new file mode 100644 index 0000000000000..1b0112733ad3d --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_twitter' +} diff --git a/airbyte-integrations/connectors/source-twitter/integration_tests/__init__.py b/airbyte-integrations/connectors/source-twitter/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-twitter/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-twitter/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-twitter/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-twitter/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-twitter/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-twitter/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..9be5c18e5384e --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "tweets", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-twitter/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-twitter/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..d4bab989f5073 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "AAAAAAAAAM15n5csdO42sm244kzyw9I0jIXEYxt6HmR7H3ZAcGXA", + "query": "sivakasi" +} diff --git a/airbyte-integrations/connectors/source-twitter/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-twitter/integration_tests/sample_config.json new file mode 100644 index 0000000000000..7f5e4c18e392f --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "api_key": "your api key", + "query": "your search query", + "start_time": "2022-11-10T10:00:50Z", + "end_time": "2022-11-11T10:00:50Z" +} diff --git a/airbyte-integrations/connectors/source-twitter/main.py b/airbyte-integrations/connectors/source-twitter/main.py new file mode 100644 index 0000000000000..3601a5ce6634a --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_twitter import SourceTwitter + +if __name__ == "__main__": + source = SourceTwitter() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-twitter/requirements.txt b/airbyte-integrations/connectors/source-twitter/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-twitter/setup.py b/airbyte-integrations/connectors/source-twitter/setup.py new file mode 100644 index 0000000000000..e6094aaae3495 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_twitter", + description="Source implementation for Twitter.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-twitter/source_twitter/__init__.py b/airbyte-integrations/connectors/source-twitter/source_twitter/__init__.py new file mode 100644 index 0000000000000..5b8b833b42f17 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/source_twitter/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceTwitter + +__all__ = ["SourceTwitter"] diff --git a/airbyte-integrations/connectors/source-twitter/source_twitter/schemas/tweets.json b/airbyte-integrations/connectors/source-twitter/source_twitter/schemas/tweets.json new file mode 100644 index 0000000000000..521cf8cd7e23a --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/source_twitter/schemas/tweets.json @@ -0,0 +1,15 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "edit_history_tweet_ids": { + "type": ["null", "array"] + }, + "id": { + "type": ["null", "string"] + }, + "text": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-twitter/source_twitter/source.py b/airbyte-integrations/connectors/source-twitter/source_twitter/source.py new file mode 100644 index 0000000000000..d5bedefc9da98 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/source_twitter/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceTwitter(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "twitter.yaml"}) diff --git a/airbyte-integrations/connectors/source-twitter/source_twitter/spec.yaml b/airbyte-integrations/connectors/source-twitter/source_twitter/spec.yaml new file mode 100644 index 0000000000000..1cfa39bbd003e --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/source_twitter/spec.yaml @@ -0,0 +1,27 @@ +documentationUrl: https://docsurl.com +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Twitter Spec + type: object + required: + - api_key + - query + additionalProperties: true + properties: + api_key: + title: API Key Token + description: API Key + type: string + airbyte_secret: true + query: + title: Twitter Search Query + description: Twitter Search query + type: string + start_date: + title: Start Date + description: From what date you want to start retrieving dta + type: string + end_date: + title: End Date + description: End data to retrieve data + type: string diff --git a/airbyte-integrations/connectors/source-twitter/source_twitter/twitter.yaml b/airbyte-integrations/connectors/source-twitter/source_twitter/twitter.yaml new file mode 100644 index 0000000000000..0c4dc189f94f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-twitter/source_twitter/twitter.yaml @@ -0,0 +1,70 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["data"] + requester: + url_base: "https://api.twitter.com/2/tweets" + http_method: "GET" + authenticator: + type: "BearerAuthenticator" + header: "apikey" + api_token: "{{ config['api_key'] }}" + request_options_provider: + request_parameters: + query: "{{ config['query'] }}" + start_time: "{{ config['start_time'] }}" + end_time: "{{ config['end_time'] }}" + stream_slicer: + type: DatetimeStreamSlicer + start_datetime: + datetime: "{{ config['start_time'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%S" + end_datetime: + datetime: "{{ config['end_time'] }}" + datetime_format: "%Y-%m-%dT%H:%M:%S" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + step: 1d + start_time_option: + field_name: start_time + inject_into: request_parameter + end_time_option: + field_name: end_time + inject_into: request_parameter + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response.meta.next_token }}" + stop_condition: "{{ 'next_token' not in response['meta'] }}" + page_size: 100 + page_size_option: + field_name: "max_results" + inject_into: "request_parameter" + page_token_option: + field_name: "next_token" + inject_into: "request_parameter" + url_base: "*ref(definitions.requester.url_base)" + requester: + $ref: "*ref(definitions.requester)" + + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + + tweets_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tweets" + path: "/search/recent" + +streams: + - "*ref(definitions.tweets_stream)" + +check: + stream_names: + - "tweets" diff --git a/airbyte-integrations/connectors/source-tyntec-sms/.dockerignore b/airbyte-integrations/connectors/source-tyntec-sms/.dockerignore new file mode 100644 index 0000000000000..4fdb3e5390c44 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_tyntec_sms +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-tyntec-sms/Dockerfile b/airbyte-integrations/connectors/source-tyntec-sms/Dockerfile new file mode 100644 index 0000000000000..8303c3904da8d --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_tyntec_sms ./source_tyntec_sms + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-tyntec-sms diff --git a/airbyte-integrations/connectors/source-tyntec-sms/README.md b/airbyte-integrations/connectors/source-tyntec-sms/README.md new file mode 100644 index 0000000000000..3bc098e6234ed --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/README.md @@ -0,0 +1,79 @@ +# Tyntec Sms Source + +This is the repository for the Tyntec Sms configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/tyntec-sms). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-tyntec-sms:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/tyntec-sms) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_tyntec_sms/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source tyntec-sms test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-tyntec-sms:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-tyntec-sms:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-tyntec-sms:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-tyntec-sms:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-tyntec-sms:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-tyntec-sms:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-tyntec-sms:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-tyntec-sms:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-tyntec-sms/__init__.py b/airbyte-integrations/connectors/source-tyntec-sms/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-tyntec-sms/acceptance-test-config.yml b/airbyte-integrations/connectors/source-tyntec-sms/acceptance-test-config.yml new file mode 100644 index 0000000000000..f6b741455c36a --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/acceptance-test-config.yml @@ -0,0 +1,29 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-tyntec-sms:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_tyntec_sms/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + timeout_seconds: 300 + - config_path: "integration_tests/invalid_config.json" + status: "failed" + timeout_seconds: 300 + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + incremental: + bypass_reason: "This connector does not implement incremental sync" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-tyntec-sms/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-tyntec-sms/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-tyntec-sms/build.gradle b/airbyte-integrations/connectors/source-tyntec-sms/build.gradle new file mode 100644 index 0000000000000..1dfd05a6049f0 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_tyntec_sms' +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/__init__.py b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..446d13879ec9f --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/configured_catalog.json @@ -0,0 +1,49 @@ +{ + "streams": [ + { + "stream": { + "name": "contacts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "messages", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "phones", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "registrations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "sms", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..afd4bb75d5207 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "api_key": "asl;dkfjsdFX%ZSIfqel5thera", + "to": "+17023759672", + "from": "+17023759672", + "message": "Hello, this is your first Tyntec message." +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/sample_config.json new file mode 100644 index 0000000000000..ca542f64632d7 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "api_key": "My API Key", + "to": "+17023759672", + "from": "+17023759672", + "message": "Hello, this is your first Tyntec message." +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/main.py b/airbyte-integrations/connectors/source-tyntec-sms/main.py new file mode 100644 index 0000000000000..f7e0258beda1c --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_tyntec_sms import SourceTyntecSms + +if __name__ == "__main__": + source = SourceTyntecSms() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-tyntec-sms/requirements.txt b/airbyte-integrations/connectors/source-tyntec-sms/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-tyntec-sms/setup.py b/airbyte-integrations/connectors/source-tyntec-sms/setup.py new file mode 100644 index 0000000000000..808a2f4ab62f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_tyntec_sms", + description="Source implementation for Tyntec Sms.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/__init__.py b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/__init__.py new file mode 100644 index 0000000000000..47f93d269e8a6 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceTyntecSms + +__all__ = ["SourceTyntecSms"] diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/contacts.json b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/contacts.json new file mode 100644 index 0000000000000..4b128e3aeff33 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/contacts.json @@ -0,0 +1,38 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "contacts": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "companyAddress": { + "type": ["null", "string"] + }, + "companyName": { + "type": ["null", "string"] + }, + "contactEmail": { + "type": ["null", "string"] + }, + "contactName": { + "type": ["null", "string"] + }, + "contactPhone": { + "type": ["null", "string"] + }, + "contactTitle": { + "type": ["null", "string"] + }, + "friendlyName": { + "type": ["null", "string"] + } + } + } + }, + "size": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/messages.json b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/messages.json new file mode 100644 index 0000000000000..ddfc3e398febf --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/messages.json @@ -0,0 +1,92 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "doneDate": { + "type": ["null", "string"] + }, + "errorCode": { + "type": ["null", "string"] + }, + "errorReason": { + "type": ["null", "string"] + }, + "from": { + "type": ["null", "string"] + }, + "href": { + "type": ["null", "string"] + }, + "mccmnc": { + "type": ["null", "string"] + }, + "parts": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "currency": { + "type": ["null", "string"] + }, + "deliveryState": { + "type": ["null", "string"] + }, + "doneDate": { + "type": ["null", "string"] + }, + "errorCode": { + "type": ["null", "string"] + }, + "partId": { + "type": ["null", "string"] + }, + "price": { + "type": ["null", "string"] + }, + "priceEffective": { + "type": ["null", "string"], + "format": "date-time" + }, + "sendDate": { + "type": ["null", "string"] + }, + "statusText": { + "type": ["null", "string"] + } + } + } + }, + "overallPrice": { + "type": ["null", "string"] + }, + "priceEffective": { + "type": ["null", "string"], + "format": "date-time" + }, + "reference": { + "type": ["null", "string"] + }, + "requestId": { + "type": ["null", "string"] + }, + "sentDate": { + "type": ["null", "string"] + }, + "size": { + "type": ["null", "integer"] + }, + "status": { + "type": ["null", "string"] + }, + "submitDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "to": { + "type": ["null", "string"] + }, + "ttid": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/phones.json b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/phones.json new file mode 100644 index 0000000000000..c216d7e69d408 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/phones.json @@ -0,0 +1,32 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "provisioningRequests": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "accountId": { + "type": ["null", "string"] + }, + "contactId": { + "type": ["null", "string"] + }, + "friendlyName": { + "type": ["null", "string"] + }, + "requestId": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + } + } + } + }, + "size": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/registrations.json b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/registrations.json new file mode 100644 index 0000000000000..c216d7e69d408 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/registrations.json @@ -0,0 +1,32 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "provisioningRequests": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "accountId": { + "type": ["null", "string"] + }, + "contactId": { + "type": ["null", "string"] + }, + "friendlyName": { + "type": ["null", "string"] + }, + "requestId": { + "type": ["null", "string"] + }, + "status": { + "type": ["null", "string"] + } + } + } + }, + "size": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/sms.json b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/sms.json new file mode 100644 index 0000000000000..ddfc3e398febf --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/schemas/sms.json @@ -0,0 +1,92 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "doneDate": { + "type": ["null", "string"] + }, + "errorCode": { + "type": ["null", "string"] + }, + "errorReason": { + "type": ["null", "string"] + }, + "from": { + "type": ["null", "string"] + }, + "href": { + "type": ["null", "string"] + }, + "mccmnc": { + "type": ["null", "string"] + }, + "parts": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "currency": { + "type": ["null", "string"] + }, + "deliveryState": { + "type": ["null", "string"] + }, + "doneDate": { + "type": ["null", "string"] + }, + "errorCode": { + "type": ["null", "string"] + }, + "partId": { + "type": ["null", "string"] + }, + "price": { + "type": ["null", "string"] + }, + "priceEffective": { + "type": ["null", "string"], + "format": "date-time" + }, + "sendDate": { + "type": ["null", "string"] + }, + "statusText": { + "type": ["null", "string"] + } + } + } + }, + "overallPrice": { + "type": ["null", "string"] + }, + "priceEffective": { + "type": ["null", "string"], + "format": "date-time" + }, + "reference": { + "type": ["null", "string"] + }, + "requestId": { + "type": ["null", "string"] + }, + "sentDate": { + "type": ["null", "string"] + }, + "size": { + "type": ["null", "integer"] + }, + "status": { + "type": ["null", "string"] + }, + "submitDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "to": { + "type": ["null", "string"] + }, + "ttid": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/source.py b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/source.py new file mode 100644 index 0000000000000..bf81253de26b5 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceTyntecSms(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "tyntec_sms.yaml"}) diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/spec.yaml b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/spec.yaml new file mode 100644 index 0000000000000..372e2a73a0bb8 --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/spec.yaml @@ -0,0 +1,32 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/tyntec-sms +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Tyntec Sms Spec + type: object + required: + - api_key + - to + - from + additionalProperties: true + properties: + api_key: + type: string + title: Tyntec API Key + description: Your Tyntec API Key. See here + order: 0 + airbyte_secret: true + to: + type: string + title: SMS Message Recipient Phone + description: The phone number of the SMS message recipient (international). + order: 1 + from: + type: string + title: SMS Message Sender Phone + description: The phone number of the SMS message sender (international). + order: 2 + message: + type: string + title: SMS Message Body + description: The content of the SMS message to be sent. + order: 3 diff --git a/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/tyntec_sms.yaml b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/tyntec_sms.yaml new file mode 100644 index 0000000000000..715d766c63bba --- /dev/null +++ b/airbyte-integrations/connectors/source-tyntec-sms/source_tyntec_sms/tyntec_sms.yaml @@ -0,0 +1,81 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + # API Docs: https://api.tyntec.com/reference/sms/current.html#sms-api + url_base: "https://api.tyntec.com" + http_method: "GET" + authenticator: + type: ApiKeyAuthenticator + header: "apikey" + api_token: "{{ config['api_key'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + # API Docs: https://api.tyntec.com/reference/sms/current.html#sms-api-Send%20SMS%20(GET) + sms_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "sms" + primary_key: "requestId" + path: "/messaging/v1/sms?to={{ config['to'] }}&from={{ config['from']}}&message={{ config['message'] or ''}}" + sms_stream_slicer: + type: SubstreamSlicer + parent_stream_configs: + - stream: "*ref(definitions.sms_stream)" + parent_key: "requestId" + stream_slice_field: "requestId" + # API Docs: https://api.tyntec.com/reference/sms/current.html#sms-api-Read%20SMS%20status + messages_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "messages" + primary_key: "requestId" + retriever: + $ref: "*ref(definitions.retriever)" + record_selector: + $ref: "*ref(definitions.selector)" + requester: + $ref: "*ref(definitions.requester)" + path: "/messaging/v1/messages/{{ stream_slice.requestId }}" + stream_slicer: + $ref: "*ref(definitions.sms_stream_slicer)" + # API Docs: https://api.tyntec.com/reference/sms/current.html#sms-api-List%20all%20contacts + contacts_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "contacts" + path: "/byon/contacts/v1" + # API Docs: https://api.tyntec.com/reference/sms/current.html#sms-api-List%20all%20phone%20numbers + phones_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "phones" + path: "/byon/phonebook/v1/numbers" + # API Docs: https://api.tyntec.com/reference/sms/current.html#sms-api-List%20all%20phones + registrations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "registrations" + path: "/byon/provisioning/v1" + +streams: + - "*ref(definitions.sms_stream)" + - "*ref(definitions.messages_stream)" + - "*ref(definitions.contacts_stream)" + - "*ref(definitions.phones_stream)" + - "*ref(definitions.registrations_stream)" + +check: + stream_names: + - "phones" diff --git a/airbyte-integrations/connectors/source-vantage/.dockerignore b/airbyte-integrations/connectors/source-vantage/.dockerignore new file mode 100644 index 0000000000000..c0388c41ee97c --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_vantage +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-vantage/Dockerfile b/airbyte-integrations/connectors/source-vantage/Dockerfile new file mode 100644 index 0000000000000..98f4a59e8e038 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_vantage ./source_vantage + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-vantage diff --git a/airbyte-integrations/connectors/source-vantage/README.md b/airbyte-integrations/connectors/source-vantage/README.md new file mode 100644 index 0000000000000..27ab78efacdf6 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/README.md @@ -0,0 +1,79 @@ +# Vantage Source Test + +This is the repository for the Vantage configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/vantage). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-vantage:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/vantage) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_vantage/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source vantage test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-vantage:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-vantage:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-vantage:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-vantage:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-vantage:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-vantage:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-vantage:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-vantage:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-vantage/__init__.py b/airbyte-integrations/connectors/source-vantage/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-vantage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-vantage/acceptance-test-config.yml new file mode 100644 index 0000000000000..6a4e59447022e --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/acceptance-test-config.yml @@ -0,0 +1,40 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-vantage:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_vantage/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: + - name: reports + bypass_reason: "This stream can't be seeded in our sandbox account" +# TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file +# expect_records: +# path: "integration_tests/expected_records.txt" +# extra_fields: no +# exact_order: no +# extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" +# TODO uncomment this block this block if your connector implements incremental sync: +# tests: +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-vantage/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-vantage/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-vantage/build.gradle b/airbyte-integrations/connectors/source-vantage/build.gradle new file mode 100644 index 0000000000000..a6274c8c2cbb9 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_vantage' +} diff --git a/airbyte-integrations/connectors/source-vantage/integration_tests/__init__.py b/airbyte-integrations/connectors/source-vantage/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-vantage/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-vantage/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-vantage/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-vantage/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-vantage/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-vantage/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..555aeb01ee494 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/integration_tests/configured_catalog.json @@ -0,0 +1,40 @@ +{ + "streams": [ + { + "stream": { + "name": "providers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "services", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "products", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "reports", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-vantage/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-vantage/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..e7823c2be36c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "access_token": "" +} diff --git a/airbyte-integrations/connectors/source-vantage/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-vantage/integration_tests/sample_config.json new file mode 100644 index 0000000000000..1dcd58d49b1d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "access_token": "6F8-ffwMe87mOd6K3pQopAoZXD4PuNCZYBpB7pgUaRg" +} diff --git a/airbyte-integrations/connectors/source-vantage/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-vantage/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-vantage/main.py b/airbyte-integrations/connectors/source-vantage/main.py new file mode 100644 index 0000000000000..af2112debc97e --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_vantage import SourceVantage + +if __name__ == "__main__": + source = SourceVantage() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-vantage/requirements.txt b/airbyte-integrations/connectors/source-vantage/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-vantage/setup.py b/airbyte-integrations/connectors/source-vantage/setup.py new file mode 100644 index 0000000000000..1ddfdb789b6c2 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_vantage", + description="Source implementation for Vantage.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-vantage/source_vantage/__init__.py b/airbyte-integrations/connectors/source-vantage/source_vantage/__init__.py new file mode 100644 index 0000000000000..95bc26eee66e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/source_vantage/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceVantage + +__all__ = ["SourceVantage"] diff --git a/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/products.json b/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/products.json new file mode 100644 index 0000000000000..811becb81adcd --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/products.json @@ -0,0 +1,34 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "category": { + "type": "string", + "example": "compute", + "description": "The category of the cloud product" + }, + "name": { + "type": "string", + "example": "EC2", + "description": "The common name of the product." + }, + "service_id": { + "type": "string", + "example": "aws-ec2", + "description": "A unique slug for the service the product belongs to." + }, + "provider_id": { + "type": "string", + "example": "aws", + "description": "A unique slug for the provider the product belongs to." + }, + "details": { + "type": "object", + "description": "An object of metadata about the product." + } + }, + "description": "Products model", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/providers.json b/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/providers.json new file mode 100644 index 0000000000000..f8c9ddc07d5ec --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/providers.json @@ -0,0 +1,20 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string", + "example": "AWS", + "description": "The common name of the provider." + }, + "description": { + "type": "string", + "example": "Amazon Web Services", + "description": "The full descriptive name of the provider." + } + }, + "description": "Providers model", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/reports.json b/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/reports.json new file mode 100644 index 0000000000000..2f9cd14b3e6a6 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/reports.json @@ -0,0 +1,34 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "title": { + "type": "string", + "example": "Production Environment", + "description": "The title of the cost report." + }, + "earliest_cost_date": { + "type": "string", + "example": "2021-07-09T00:00:00Z", + "description": "The date and time, in UTC, the report was created. ISO 8601 Formatted." + }, + "latest_cost_date": { + "type": "string", + "example": "2021-07-09T00:00:00Z", + "description": "The date and time, in UTC, the report was created. ISO 8601 Formatted." + }, + "created_at": { + "type": "string", + "example": "2021-07-09T00:00:00Z", + "description": "The date and time, in UTC, the report was created. ISO 8601 Formatted." + }, + "workspace": { + "type": "string", + "description": "The name of the workspace the report is a part of." + } + }, + "description": "Reports model", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/services.json b/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/services.json new file mode 100644 index 0000000000000..4e5c87a52288f --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/source_vantage/schemas/services.json @@ -0,0 +1,25 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "category": { + "type": "string", + "example": "compute", + "description": "The type of cloud service." + }, + "name": { + "type": "string", + "example": "EC2", + "description": "The common name of the service. Usually an abbreviation." + }, + "description": { + "type": "string", + "example": "Elastic Compute Cloud", + "description": "The full name of the service." + } + }, + "description": "Services model", + "$schema": "http://json-schema.org/schema#" +} diff --git a/airbyte-integrations/connectors/source-vantage/source_vantage/source.py b/airbyte-integrations/connectors/source-vantage/source_vantage/source.py new file mode 100644 index 0000000000000..0d907c3358a69 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/source_vantage/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceVantage(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "vantage.yaml"}) diff --git a/airbyte-integrations/connectors/source-vantage/source_vantage/spec.yaml b/airbyte-integrations/connectors/source-vantage/source_vantage/spec.yaml new file mode 100644 index 0000000000000..531ad4ac13726 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/source_vantage/spec.yaml @@ -0,0 +1,16 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/vantage +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Vantage Spec + type: object + required: + - access_token + additionalProperties: true + properties: + access_token: + title: API Access Token + type: string + description: >- + Your API Access token. See here. + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-vantage/source_vantage/vantage.yaml b/airbyte-integrations/connectors/source-vantage/source_vantage/vantage.yaml new file mode 100644 index 0000000000000..76c2367c7a805 --- /dev/null +++ b/airbyte-integrations/connectors/source-vantage/source_vantage/vantage.yaml @@ -0,0 +1,67 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["{{ options['name'] }}"] + requester: + url_base: "https://api.vantage.sh/v1" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['access_token'] }}" + increment_paginator: + type: "DefaultPaginator" + url_base: "*ref(definitions.requester.url_base)" + page_size_option: + inject_into: "request_parameter" + field_name: "limit" + pagination_strategy: + type: "PageIncrement" + page_size: 100 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + $ref: "*ref(definitions.increment_paginator)" + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + providers_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "providers" + primary_key: "id" + path: "/providers" + services_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "services" + primary_key: "id" + path: "/services" + products_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "products" + primary_key: "id" + path: "/products" + reports_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "reports" + primary_key: "id" + path: "/reports" +streams: + - "*ref(definitions.providers_stream)" + - "*ref(definitions.services_stream)" + - "*ref(definitions.products_stream)" + - "*ref(definitions.reports_stream)" + +check: + stream_names: + - "providers" diff --git a/airbyte-integrations/connectors/source-visma-economic/.dockerignore b/airbyte-integrations/connectors/source-visma-economic/.dockerignore new file mode 100644 index 0000000000000..8b516cdb252ea --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_visma_economic +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-visma-economic/BOOTSTRAP.md b/airbyte-integrations/connectors/source-visma-economic/BOOTSTRAP.md new file mode 100644 index 0000000000000..be8113df80fa9 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/BOOTSTRAP.md @@ -0,0 +1,5 @@ +# Visma e-conomic +Visma e-conomic is an accounting program. +Using the api it is possible interact with most of the functionality in the software. +The streams implemented allows you to do reporting based on invoices. +For more information about use cases of e-conomic please visit [this page](https://developer.visma.com/api/e-conomic/). \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-visma-economic/Dockerfile b/airbyte-integrations/connectors/source-visma-economic/Dockerfile new file mode 100644 index 0000000000000..f85cd6906f4a7 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_visma_economic ./source_visma_economic + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-visma-economic diff --git a/airbyte-integrations/connectors/source-visma-economic/README.md b/airbyte-integrations/connectors/source-visma-economic/README.md new file mode 100644 index 0000000000000..8ebca9d2a472f --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/README.md @@ -0,0 +1,134 @@ +# Visma Economic Source + +This is the repository for the Visma Economic source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/visma-economic). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-visma-economic:build +``` + +#### Create credentials +For demo credentials see `sample_files/demo_config.json`. + +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/visma-economic) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_visma_economic/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. + +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source visma-economic test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-visma-economic:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-visma-economic:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-visma-economic:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-visma-economic:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-visma-economic:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-visma-economic:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-visma-economic:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-visma-economic:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-visma-economic/acceptance-test-config.yml b/airbyte-integrations/connectors/source-visma-economic/acceptance-test-config.yml new file mode 100644 index 0000000000000..95b1c75f6ae6c --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/acceptance-test-config.yml @@ -0,0 +1,26 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-visma-economic:dev +tests: + spec: + - spec_path: "source_visma_economic/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + timeout_seconds: 120 + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["invoices_total"] + expect_records: + path: "integration_tests/expected_records.txt" + extra_fields: no + exact_order: no + extra_records: yes + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-visma-economic/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-visma-economic/acceptance-test-docker.sh new file mode 100755 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-visma-economic/build.gradle b/airbyte-integrations/connectors/source-visma-economic/build.gradle new file mode 100644 index 0000000000000..8e6e22808faef --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_visma_economic' +} diff --git a/airbyte-integrations/connectors/source-visma-economic/integration_tests/__init__.py b/airbyte-integrations/connectors/source-visma-economic/integration_tests/__init__.py new file mode 100644 index 0000000000000..46b7376756ec6 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-visma-economic/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-visma-economic/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-visma-economic/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-visma-economic/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-visma-economic/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..fff4c4000a985 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/integration_tests/configured_catalog.json @@ -0,0 +1,67 @@ +{ + "streams": [ + { + "stream": { + "name": "accounts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "customers", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "invoices_booked", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "invoices_booked_document", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "invoices_paid", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "invoices_total", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "products", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-visma-economic/integration_tests/expected_records.txt b/airbyte-integrations/connectors/source-visma-economic/integration_tests/expected_records.txt new file mode 100644 index 0000000000000..c6787903abcf6 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/integration_tests/expected_records.txt @@ -0,0 +1,12 @@ +{"stream": "accounts", "data": {"accountNumber": 1000, "accountType": "heading", "balance": 0.0, "blockDirectEntries": false, "debitCredit": "debit", "name": "RESULTATOPG\u00d8RELSE", "accountingYears": "https://restapi.e-conomic.com/accounts/1000/accounting-years", "self": "https://restapi.e-conomic.com/accounts/1000"}, "emitted_at": 1667233244356} +{"stream": "accounts", "data": {"accountNumber": 1010, "accountType": "profitAndLoss", "balance": -6010.0, "blockDirectEntries": false, "debitCredit": "credit", "name": "Salg af varer/ydelser m/moms", "vatAccount": {"vatCode": "U25", "self": "https://restapi.e-conomic.com/vat-accounts/U25"}, "accountingYears": "https://restapi.e-conomic.com/accounts/1010/accounting-years", "self": "https://restapi.e-conomic.com/accounts/1010"}, "emitted_at": 1667233244360} +{"stream": "customers", "data": {"customerNumber": 1, "currency": "DKK", "paymentTerms": {"paymentTermsNumber": 1, "self": "https://restapi.e-conomic.com/payment-terms/1"}, "customerGroup": {"customerGroupNumber": 1, "self": "https://restapi.e-conomic.com/customer-groups/1"}, "address": "Avenue des Arts No 5", "balance": -1200.0, "dueAmount": 0.0, "city": "Brussels", "country": "Belgium", "email": "customerone@mailinator.com", "name": "Decathlon", "zip": "1040", "telephoneAndFaxNumber": "08343242525", "vatZone": {"vatZoneNumber": 1, "self": "https://restapi.e-conomic.com/vat-zones/1"}, "attention": {"customerContactNumber": 1, "customer": {"customerNumber": 1, "self": "https://restapi.e-conomic.com/customers/1"}, "self": "https://restapi.e-conomic.com/customers/1/contacts/1"}, "customerContact": {"customerContactNumber": 2, "customer": {"customerNumber": 1, "self": "https://restapi.e-conomic.com/customers/1"}, "self": "https://restapi.e-conomic.com/customers/1/contacts/2"}, "salesPerson": {"employeeNumber": 1, "self": "https://restapi.e-conomic.com/employees/1"}, "lastUpdated": "2022-06-02T08:53:29Z", "contacts": "https://restapi.e-conomic.com/customers/1/contacts", "defaultDeliveryLocation": {"deliveryLocationNumber": 1, "self": "https://restapi.e-conomic.com/customers/1/delivery-locations/1"}, "templates": {"invoice": "https://restapi.e-conomic.com/customers/1/templates/invoice", "invoiceLine": "https://restapi.e-conomic.com/customers/1/templates/invoiceline", "self": "https://restapi.e-conomic.com/customers/1/templates"}, "totals": {"drafts": "https://restapi.e-conomic.com/invoices/totals/drafts/customers/1", "booked": "https://restapi.e-conomic.com/invoices/totals/booked/customers/1", "self": "https://restapi.e-conomic.com/customers/1/totals"}, "deliveryLocations": "https://restapi.e-conomic.com/customers/1/delivery-locations", "invoices": {"drafts": "https://restapi.e-conomic.com/customers/1/invoices/drafts", "booked": "https://restapi.e-conomic.com/customers/1/invoices/booked", "self": "https://restapi.e-conomic.com/customers/1/invoices"}, "mobilePhone": "066567657657575", "eInvoicingDisabledByDefault": false, "self": "https://restapi.e-conomic.com/customers/1"}, "emitted_at": 1667233244985} +{"stream": "customers", "data": {"customerNumber": 5, "currency": "EUR", "paymentTerms": {"paymentTermsNumber": 1, "self": "https://restapi.e-conomic.com/payment-terms/1"}, "customerGroup": {"customerGroupNumber": 4, "self": "https://restapi.e-conomic.com/customer-groups/4"}, "address": "Avenue Centrale No 3", "balance": 0.0, "dueAmount": 0.0, "city": "Paris", "country": "France", "email": "freres@mailinator.com", "name": "Les Freres Heureux", "zip": "1231", "telephoneAndFaxNumber": "5475685685", "vatZone": {"vatZoneNumber": 17, "self": "https://restapi.e-conomic.com/vat-zones/17"}, "attention": {"customerContactNumber": 15, "customer": {"customerNumber": 5, "self": "https://restapi.e-conomic.com/customers/5"}, "self": "https://restapi.e-conomic.com/customers/5/contacts/15"}, "customerContact": {"customerContactNumber": 16, "customer": {"customerNumber": 5, "self": "https://restapi.e-conomic.com/customers/5"}, "self": "https://restapi.e-conomic.com/customers/5/contacts/16"}, "salesPerson": {"employeeNumber": 2, "self": "https://restapi.e-conomic.com/employees/2"}, "lastUpdated": "2022-06-07T12:09:44Z", "contacts": "https://restapi.e-conomic.com/customers/5/contacts", "defaultDeliveryLocation": {"deliveryLocationNumber": 5, "self": "https://restapi.e-conomic.com/customers/5/delivery-locations/5"}, "templates": {"invoice": "https://restapi.e-conomic.com/customers/5/templates/invoice", "invoiceLine": "https://restapi.e-conomic.com/customers/5/templates/invoiceline", "self": "https://restapi.e-conomic.com/customers/5/templates"}, "totals": {"drafts": "https://restapi.e-conomic.com/invoices/totals/drafts/customers/5", "booked": "https://restapi.e-conomic.com/invoices/totals/booked/customers/5", "self": "https://restapi.e-conomic.com/customers/5/totals"}, "deliveryLocations": "https://restapi.e-conomic.com/customers/5/delivery-locations", "invoices": {"drafts": "https://restapi.e-conomic.com/customers/5/invoices/drafts", "booked": "https://restapi.e-conomic.com/customers/5/invoices/booked", "self": "https://restapi.e-conomic.com/customers/5/invoices"}, "mobilePhone": "09675675685", "eInvoicingDisabledByDefault": false, "self": "https://restapi.e-conomic.com/customers/5"}, "emitted_at": 1667233244987} +{"stream": "invoices_booked", "data": {"bookedInvoiceNumber": 1, "orderNumber": 1, "date": "2022-06-02", "currency": "DKK", "exchangeRate": 100.0, "netAmount": 70.0, "netAmountInBaseCurrency": 70.0, "grossAmount": 87.5, "grossAmountInBaseCurrency": 87.5, "vatAmount": 17.5, "roundingAmount": 0.0, "remainder": 0.0, "remainderInBaseCurrency": 0.0, "dueDate": "2022-06-10", "paymentTerms": {"paymentTermsNumber": 1, "daysOfCredit": 8, "description": "Netto 8 dage", "name": "Netto 8 dage", "paymentTermsType": "net", "self": "https://restapi.e-conomic.com/payment-terms/1"}, "customer": {"customerNumber": 1, "self": "https://restapi.e-conomic.com/customers/1"}, "recipient": {"name": "Customer 1", "address": "Avenue des Arts No 5", "zip": "1040", "city": "Brussels", "country": "Belgium", "attention": {"customerContactNumber": 1, "self": "https://restapi.e-conomic.com/customers/1/contacts/1"}, "vatZone": {"name": "Domestic", "vatZoneNumber": 1, "enabledForCustomer": true, "enabledForSupplier": true, "self": "https://restapi.e-conomic.com/vat-zones/1"}}, "deliveryLocation": {"deliveryLocationNumber": 1, "self": "https://restapi.e-conomic.com/customers/1/delivery-locations/1"}, "delivery": {"address": "Langebrogade 1", "zip": "1411", "city": "K\u00f8benhavn K", "country": "Denmark"}, "references": {"customerContact": {"customerContactNumber": 2, "self": "https://restapi.e-conomic.com/customers/1/contacts/2"}, "salesPerson": {"employeeNumber": 1, "self": "https://restapi.e-conomic.com/employees/1"}}, "layout": {"layoutNumber": 21, "self": "https://restapi.e-conomic.com/layouts/21"}, "pdf": {"download": "https://restapi.e-conomic.com/invoices/booked/1/pdf"}, "sent": "https://restapi.e-conomic.com/invoices/booked/1/sent", "self": "https://restapi.e-conomic.com/invoices/booked/1"}, "emitted_at": 1667233245263} +{"stream": "invoices_booked", "data": {"bookedInvoiceNumber": 2, "orderNumber": 2, "date": "2022-06-02", "currency": "DKK", "exchangeRate": 100.0, "netAmount": 950.0, "netAmountInBaseCurrency": 950.0, "grossAmount": 1187.5, "grossAmountInBaseCurrency": 1187.5, "vatAmount": 237.5, "roundingAmount": 0.0, "remainder": 0.0, "remainderInBaseCurrency": 0.0, "dueDate": "2022-06-10", "paymentTerms": {"paymentTermsNumber": 1, "daysOfCredit": 8, "description": "Netto 8 dage", "name": "Netto 8 dage", "paymentTermsType": "net", "self": "https://restapi.e-conomic.com/payment-terms/1"}, "customer": {"customerNumber": 4, "self": "https://restapi.e-conomic.com/customers/4"}, "recipient": {"name": "Customer with EAN", "address": "Avenue des arts 4", "zip": "3221", "city": "Brussels", "country": "Belgium", "ean": "9773365646824", "attention": {"customerContactNumber": 7, "self": "https://restapi.e-conomic.com/customers/4/contacts/7"}, "vatZone": {"name": "Domestic", "vatZoneNumber": 1, "enabledForCustomer": true, "enabledForSupplier": true, "self": "https://restapi.e-conomic.com/vat-zones/1"}}, "deliveryLocation": {"deliveryLocationNumber": 4, "self": "https://restapi.e-conomic.com/customers/4/delivery-locations/4"}, "delivery": {"address": "Avenue des arts no 3", "zip": "1212", "city": "K\u00f8benhavn K", "country": "Denmark"}, "notes": {"heading": "Customer 4"}, "references": {"customerContact": {"customerContactNumber": 8, "self": "https://restapi.e-conomic.com/customers/4/contacts/8"}, "salesPerson": {"employeeNumber": 6, "self": "https://restapi.e-conomic.com/employees/6"}}, "layout": {"layoutNumber": 21, "self": "https://restapi.e-conomic.com/layouts/21"}, "project": {"projectNumber": 1, "self": "https://restapi.e-conomic.com/projects/1"}, "pdf": {"download": "https://restapi.e-conomic.com/invoices/booked/2/pdf"}, "sent": "https://restapi.e-conomic.com/invoices/booked/2/sent", "self": "https://restapi.e-conomic.com/invoices/booked/2"}, "emitted_at": 1667233245266} +{"stream": "invoices_booked_document", "data": {"bookedInvoiceNumber": 1, "orderNumber": 1, "date": "2022-06-02", "currency": "DKK", "exchangeRate": 100.0, "netAmount": 70.0, "netAmountInBaseCurrency": 70.0, "grossAmount": 87.5, "grossAmountInBaseCurrency": 87.5, "vatAmount": 17.5, "roundingAmount": 0.0, "remainder": 0.0, "remainderInBaseCurrency": 0.0, "dueDate": "2022-06-10", "paymentTerms": {"paymentTermsNumber": 1, "daysOfCredit": 8, "description": "Netto 8 dage", "name": "Netto 8 dage", "paymentTermsType": "net", "self": "https://restapi.e-conomic.com/payment-terms/1"}, "customer": {"customerNumber": 1, "self": "https://restapi.e-conomic.com/customers/1"}, "recipient": {"name": "Customer 1", "address": "Avenue des Arts No 5", "zip": "1040", "city": "Brussels", "country": "Belgium", "attention": {"customerContactNumber": 1, "self": "https://restapi.e-conomic.com/customers/1/contacts/1"}, "vatZone": {"name": "Domestic", "vatZoneNumber": 1, "enabledForCustomer": true, "enabledForSupplier": true, "self": "https://restapi.e-conomic.com/vat-zones/1"}}, "deliveryLocation": {"deliveryLocationNumber": 1, "self": "https://restapi.e-conomic.com/customers/1/delivery-locations/1"}, "delivery": {"address": "Langebrogade 1", "zip": "1411", "city": "K\u00f8benhavn K", "country": "Denmark"}, "references": {"customerContact": {"customerContactNumber": 2, "self": "https://restapi.e-conomic.com/customers/1/contacts/2"}, "salesPerson": {"employeeNumber": 1, "self": "https://restapi.e-conomic.com/employees/1"}}, "layout": {"layoutNumber": 21, "self": "https://restapi.e-conomic.com/layouts/21"}, "pdf": {"download": "https://restapi.e-conomic.com/invoices/booked/1/pdf"}, "lines": [{"lineNumber": 1, "sortKey": 1, "description": "T-shirts", "quantity": 1.0, "unitNetPrice": 70.0, "discountPercentage": 0.0, "unitCostPrice": 40.0, "vatRate": 25.0, "vatAmount": 17.5, "totalNetAmount": 70.0, "product": {"productNumber": "1", "self": "https://restapi.e-conomic.com/products/1"}, "unit": {"unitNumber": 1, "name": "stk.", "products": "https://restapi.e-conomic.com/units/1/products", "self": "https://restapi.e-conomic.com/units/1"}}], "sent": "https://restapi.e-conomic.com/invoices/booked/1/sent", "self": "https://restapi.e-conomic.com/invoices/booked/1"}, "emitted_at": 1667233245881} +{"stream": "invoices_booked_document", "data": {"bookedInvoiceNumber": 2, "orderNumber": 2, "date": "2022-06-02", "currency": "DKK", "exchangeRate": 100.0, "netAmount": 950.0, "netAmountInBaseCurrency": 950.0, "grossAmount": 1187.5, "grossAmountInBaseCurrency": 1187.5, "vatAmount": 237.5, "roundingAmount": 0.0, "remainder": 0.0, "remainderInBaseCurrency": 0.0, "dueDate": "2022-06-10", "paymentTerms": {"paymentTermsNumber": 1, "daysOfCredit": 8, "description": "Netto 8 dage", "name": "Netto 8 dage", "paymentTermsType": "net", "self": "https://restapi.e-conomic.com/payment-terms/1"}, "customer": {"customerNumber": 4, "self": "https://restapi.e-conomic.com/customers/4"}, "recipient": {"name": "Customer with EAN", "address": "Avenue des arts 4", "zip": "3221", "city": "Brussels", "country": "Belgium", "ean": "9773365646824", "attention": {"customerContactNumber": 7, "self": "https://restapi.e-conomic.com/customers/4/contacts/7"}, "vatZone": {"name": "Domestic", "vatZoneNumber": 1, "enabledForCustomer": true, "enabledForSupplier": true, "self": "https://restapi.e-conomic.com/vat-zones/1"}}, "deliveryLocation": {"deliveryLocationNumber": 4, "self": "https://restapi.e-conomic.com/customers/4/delivery-locations/4"}, "delivery": {"address": "Avenue des arts no 3", "zip": "1212", "city": "K\u00f8benhavn K", "country": "Denmark"}, "notes": {"heading": "Customer 4"}, "references": {"customerContact": {"customerContactNumber": 8, "self": "https://restapi.e-conomic.com/customers/4/contacts/8"}, "salesPerson": {"employeeNumber": 6, "self": "https://restapi.e-conomic.com/employees/6"}}, "layout": {"layoutNumber": 21, "self": "https://restapi.e-conomic.com/layouts/21"}, "project": {"projectNumber": 1, "self": "https://restapi.e-conomic.com/projects/1"}, "pdf": {"download": "https://restapi.e-conomic.com/invoices/booked/2/pdf"}, "lines": [{"lineNumber": 1, "sortKey": 1, "description": "T-shirts", "quantity": 10.0, "unitNetPrice": 70.0, "discountPercentage": 0.0, "unitCostPrice": 40.0, "vatRate": 25.0, "vatAmount": 175.0, "totalNetAmount": 700.0, "product": {"productNumber": "1", "self": "https://restapi.e-conomic.com/products/1"}, "unit": {"unitNumber": 1, "name": "stk.", "products": "https://restapi.e-conomic.com/units/1/products", "self": "https://restapi.e-conomic.com/units/1"}}, {"lineNumber": 2, "sortKey": 2, "description": "T-shirts", "quantity": 1.0, "unitNetPrice": 250.0, "discountPercentage": 0.0, "unitCostPrice": 40.0, "vatRate": 25.0, "vatAmount": 62.5, "totalNetAmount": 250.0, "product": {"productNumber": "1", "self": "https://restapi.e-conomic.com/products/1"}, "unit": {"unitNumber": 1, "name": "stk.", "products": "https://restapi.e-conomic.com/units/1/products", "self": "https://restapi.e-conomic.com/units/1"}}], "sent": "https://restapi.e-conomic.com/invoices/booked/2/sent", "self": "https://restapi.e-conomic.com/invoices/booked/2"}, "emitted_at": 1667233246020} +{"stream": "invoices_paid", "data": {"bookedInvoiceNumber": 1, "orderNumber": 1, "date": "2022-06-02", "currency": "DKK", "exchangeRate": 100.0, "netAmount": 70.0, "netAmountInBaseCurrency": 70.0, "grossAmount": 87.5, "grossAmountInBaseCurrency": 87.5, "vatAmount": 17.5, "roundingAmount": 0.0, "remainder": 0.0, "remainderInBaseCurrency": 0.0, "dueDate": "2022-06-10", "paymentTerms": {"paymentTermsNumber": 1, "daysOfCredit": 8, "description": "Netto 8 dage", "name": "Netto 8 dage", "paymentTermsType": "net", "self": "https://restapi.e-conomic.com/payment-terms/1"}, "customer": {"customerNumber": 1, "self": "https://restapi.e-conomic.com/customers/1"}, "recipient": {"name": "Customer 1", "address": "Avenue des Arts No 5", "zip": "1040", "city": "Brussels", "country": "Belgium", "attention": {"customerContactNumber": 1, "self": "https://restapi.e-conomic.com/customers/1/contacts/1"}, "vatZone": {"name": "Domestic", "vatZoneNumber": 1, "enabledForCustomer": true, "enabledForSupplier": true, "self": "https://restapi.e-conomic.com/vat-zones/1"}}, "deliveryLocation": {"deliveryLocationNumber": 1, "self": "https://restapi.e-conomic.com/customers/1/delivery-locations/1"}, "delivery": {"address": "Langebrogade 1", "zip": "1411", "city": "K\u00f8benhavn K", "country": "Denmark"}, "references": {"customerContact": {"customerContactNumber": 2, "self": "https://restapi.e-conomic.com/customers/1/contacts/2"}, "salesPerson": {"employeeNumber": 1, "self": "https://restapi.e-conomic.com/employees/1"}}, "layout": {"layoutNumber": 21, "self": "https://restapi.e-conomic.com/layouts/21"}, "pdf": {"download": "https://restapi.e-conomic.com/invoices/booked/1/pdf"}, "sent": "https://restapi.e-conomic.com/invoices/booked/1/sent", "self": "https://restapi.e-conomic.com/invoices/booked/1"}, "emitted_at": 1667233246938} +{"stream": "invoices_paid", "data": {"bookedInvoiceNumber": 2, "orderNumber": 2, "date": "2022-06-02", "currency": "DKK", "exchangeRate": 100.0, "netAmount": 950.0, "netAmountInBaseCurrency": 950.0, "grossAmount": 1187.5, "grossAmountInBaseCurrency": 1187.5, "vatAmount": 237.5, "roundingAmount": 0.0, "remainder": 0.0, "remainderInBaseCurrency": 0.0, "dueDate": "2022-06-10", "paymentTerms": {"paymentTermsNumber": 1, "daysOfCredit": 8, "description": "Netto 8 dage", "name": "Netto 8 dage", "paymentTermsType": "net", "self": "https://restapi.e-conomic.com/payment-terms/1"}, "customer": {"customerNumber": 4, "self": "https://restapi.e-conomic.com/customers/4"}, "recipient": {"name": "Customer with EAN", "address": "Avenue des arts 4", "zip": "3221", "city": "Brussels", "country": "Belgium", "ean": "9773365646824", "attention": {"customerContactNumber": 7, "self": "https://restapi.e-conomic.com/customers/4/contacts/7"}, "vatZone": {"name": "Domestic", "vatZoneNumber": 1, "enabledForCustomer": true, "enabledForSupplier": true, "self": "https://restapi.e-conomic.com/vat-zones/1"}}, "deliveryLocation": {"deliveryLocationNumber": 4, "self": "https://restapi.e-conomic.com/customers/4/delivery-locations/4"}, "delivery": {"address": "Avenue des arts no 3", "zip": "1212", "city": "K\u00f8benhavn K", "country": "Denmark"}, "notes": {"heading": "Customer 4"}, "references": {"customerContact": {"customerContactNumber": 8, "self": "https://restapi.e-conomic.com/customers/4/contacts/8"}, "salesPerson": {"employeeNumber": 6, "self": "https://restapi.e-conomic.com/employees/6"}}, "layout": {"layoutNumber": 21, "self": "https://restapi.e-conomic.com/layouts/21"}, "project": {"projectNumber": 1, "self": "https://restapi.e-conomic.com/projects/1"}, "pdf": {"download": "https://restapi.e-conomic.com/invoices/booked/2/pdf"}, "sent": "https://restapi.e-conomic.com/invoices/booked/2/sent", "self": "https://restapi.e-conomic.com/invoices/booked/2"}, "emitted_at": 1667233246940} +{"stream": "products", "data": {"productNumber": "1", "description": "V-cut T-shirt, size S-XXL", "name": "Noname T-shirt Black", "costPrice": 40.0, "recommendedPrice": 90.0, "salesPrice": 70.0, "barred": false, "minimumStock": 0.0, "lastUpdated": "2022-06-24T08:25:00Z", "productGroup": {"productGroupNumber": 1, "name": "Varer m/moms", "salesAccounts": "https://restapi.e-conomic.com/product-groups/1/sales-accounts", "products": "https://restapi.e-conomic.com/product-groups/1/products", "self": "https://restapi.e-conomic.com/product-groups/1"}, "unit": {"unitNumber": 1, "name": "stk.", "products": "https://restapi.e-conomic.com/units/1/products", "self": "https://restapi.e-conomic.com/units/1"}, "invoices": {"drafts": "https://restapi.e-conomic.com/products/1/invoices/drafts", "booked": "https://restapi.e-conomic.com/products/1/invoices/booked", "self": "https://restapi.e-conomic.com/products/1/invoices"}, "pricing": {"currencySpecificSalesPrices": "https://restapi.e-conomic.com/products/1/pricing/currency-specific-sales-prices"}, "self": "https://restapi.e-conomic.com/products/1"}, "emitted_at": 1667233247352} +{"stream": "products", "data": {"productNumber": "2", "description": "This product has been barred", "name": "Barred product", "costPrice": 20.0, "recommendedPrice": 50.0, "salesPrice": 50.0, "barred": true, "minimumStock": 0.0, "lastUpdated": "2022-06-24T08:25:00Z", "productGroup": {"productGroupNumber": 1, "name": "Varer m/moms", "salesAccounts": "https://restapi.e-conomic.com/product-groups/1/sales-accounts", "products": "https://restapi.e-conomic.com/product-groups/1/products", "self": "https://restapi.e-conomic.com/product-groups/1"}, "unit": {"unitNumber": 1, "name": "stk.", "products": "https://restapi.e-conomic.com/units/1/products", "self": "https://restapi.e-conomic.com/units/1"}, "invoices": {"drafts": "https://restapi.e-conomic.com/products/2/invoices/drafts", "booked": "https://restapi.e-conomic.com/products/2/invoices/booked", "self": "https://restapi.e-conomic.com/products/2/invoices"}, "pricing": {"currencySpecificSalesPrices": "https://restapi.e-conomic.com/products/2/pricing/currency-specific-sales-prices"}, "self": "https://restapi.e-conomic.com/products/2"}, "emitted_at": 1667233247354} diff --git a/airbyte-integrations/connectors/source-visma-economic/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-visma-economic/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..ad36aea487036 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "agreement_grant_token": "invalid_token", + "app_secret_token": "invalid_secret_token" +} diff --git a/airbyte-integrations/connectors/source-visma-economic/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-visma-economic/integration_tests/sample_config.json new file mode 100644 index 0000000000000..6767e46fc2a5d --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "app_secret_token": "example_secret_token", + "agreement_grant_token": "example_agreement_grant_token" +} diff --git a/airbyte-integrations/connectors/source-visma-economic/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-visma-economic/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/main.py b/airbyte-integrations/connectors/source-visma-economic/main.py new file mode 100644 index 0000000000000..0138cafb50f0b --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_visma_economic import SourceVismaEconomic + +if __name__ == "__main__": + source = SourceVismaEconomic() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-visma-economic/requirements.txt b/airbyte-integrations/connectors/source-visma-economic/requirements.txt new file mode 100644 index 0000000000000..78140e52009f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-visma-economic/setup.py b/airbyte-integrations/connectors/source-visma-economic/setup.py new file mode 100644 index 0000000000000..5c35d2a6aa98b --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/setup.py @@ -0,0 +1,25 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = ["pytest~=6.1", "pytest-mock~=3.6.1", "source-acceptance-test", "responses~=0.13.3"] + +setup( + name="source_visma_economic", + description="Source implementation for Visma Economic.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/__init__.py b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/__init__.py new file mode 100644 index 0000000000000..4b60788947b2d --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceVismaEconomic + +__all__ = ["SourceVismaEconomic"] diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/accounts.json b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/accounts.json new file mode 100644 index 0000000000000..657c6bc3cf24b --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/accounts.json @@ -0,0 +1,154 @@ +{ + "$schema": "http://json-schema.org/draft-03/schema#", + "title": "Accounts collection schema", + "description": "A schema for retrieving the accounts of the chart of accounts.", + "type": "object", + "restdocs": "http://restdocs.e-conomic.com/#get-accounts", + "properties": { + "accountNumber": { + "type": "integer", + "filterable": true, + "sortable": true, + "defaultsorting": "ascending", + "description": "The account's number." + }, + "accountType": { + "type": "string", + "default": "profitAndLoss", + "filterable": true, + "sortable": true, + "description": "The type of account in the chart of accounts." + }, + "balance": { + "type": "number", + "maxDecimal": 2, + "filterable": true, + "sortable": true, + "description": "The current balanace of the account." + }, + "draftBalance": { + "type": "number", + "maxDecimals": 2, + "description": "The current balance of the account including draft (not yet booked) entries." + }, + "barred": { + "type": "boolean", + "filterable": true, + "description": "Shows if the account is barred from being used." + }, + "blockDirectEntries": { + "type": "boolean", + "sortable": true, + "filterable": true, + "description": "Determines if the account can be manually updated with entries." + }, + "contraAccount": { + "type": "object", + "description": "The default contra account of the account.", + "properties": { + "accountNumber": { + "type": "integer", + "description": "Account number of the contra account." + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self link of the contra account." + } + } + }, + "debitCredit": { + "enum": ["debit", "credit"], + "default": "debit", + "sortable": true, + "filterable": true, + "description": "Describes the default update type of the account." + }, + "name": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "The name of the account." + }, + "vatAccount": { + "type": "object", + "desciption": "The default VAT code for this account.", + "properties": { + "vatCode": { + "type": "string", + "description": "The VAT code of the VAT account for this account." + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self link of the VAT code." + } + } + }, + "accountsSummed": { + "type": "array", + "description": "An array of the account intervals used for calculating the total for this account.", + "items": { + "type": "object", + "description": "An account interval.", + "properties": { + "fromAccount": { + "type": "object", + "description": "The first account in the interval.", + "properties": { + "accountNumber": { + "type": "integer", + "description": "Account number of the first account in the interval." + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self link of the first account in the interval." + } + } + }, + "toAccount": { + "type": "object", + "description": "The last account in the interval.", + "properties": { + "accountNumber": { + "type": "integer", + "description": "Account number of the last account in the interval." + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self link of the last account in the interval." + } + } + } + } + } + }, + "totalFromAccount": { + "type": "object", + "description": "The account from which the sum total for this account is calculated.", + "properties": { + "accountNumber": { + "type": "integer", + "description": "Account number of the first account." + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self link of the first account." + } + } + }, + "accountingYears": { + "type": "string", + "format": "uri", + "description": "A link to a list of accounting years for which the account is usable." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the account resource." + } + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/customers.json b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/customers.json new file mode 100644 index 0000000000000..02ee90159f9c8 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/customers.json @@ -0,0 +1,337 @@ +{ + "$schema": "http://json-schema.org/draft-03/schema#", + "title": "Customer collection GET schema", + "description": "A schema for fetching a collection of customer, aka. Debtor.", + "type": "object", + "restdocs": "http://restdocs.e-conomic.com/#get-customers", + "properties": { + "address": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "Address for the customer including street and number." + }, + "balance": { + "type": "number", + "readOnly": true, + "sortable": true, + "filterable": true, + "description": "The outstanding amount for this customer." + }, + "barred": { + "type": "boolean", + "filterable": true, + "description": "Boolean indication of whether the customer is barred from invoicing." + }, + "city": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "The customer's city." + }, + "corporateIdentificationNumber": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "Corporate Identification Number. For example CVR in Denmark." + }, + "pNumber": { + "type": "string", + "minLength": 10, + "description": "Extension of corporate identification number (CVR). Identifying separate production unit (p-nummer)." + }, + "country": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "The customer's country." + }, + "creditLimit": { + "type": "number", + "sortable": true, + "filterable": true, + "description": "A maximum credit for this customer. Once the maximum is reached or passed in connection with an order/quotation/invoice for this customer you see a warning in e-conomic." + }, + "currency": { + "type": "string", + "minLength": 3, + "sortable": true, + "filterable": true, + "description": "Default payment currency." + }, + "customerNumber": { + "type": "integer", + "maximum": 999999999, + "minimum": 1, + "sortable": true, + "filterable": true, + "description": "The customer number is a positive unique numerical identifier with a maximum of 9 digits." + }, + "ean": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "European Article Number. EAN is used for invoicing the Danish public sector." + }, + "email": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "Customer e-mail address where e-conomic invoices should be emailed. Note: you can specify multiple email addresses in this field, separated by a space. If you need to send a copy of the invoice or write to other e-mail addresses, you can also create one or more customer contacts." + }, + "lastUpdated": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}Z", + "sortable": true, + "filterable": true, + "description": "The date this customer was last updated. The date is formatted according to ISO-8601." + }, + "name": { + "type": "string", + "minLength": 1, + "sortable": true, + "filterable": true, + "description": "The customer name." + }, + "publicEntryNumber": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "The public entry number is used for electronic invoicing, to define the account invoices will be registered on at the customer." + }, + "telephoneAndFaxNumber": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "The customer's telephone and/or fax number." + }, + "mobilePhone": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "The customer's mobile phone number." + }, + "eInvoicingDisabledByDefault": { + "type": "boolean", + "readonly": false, + "description": "Boolean indication of whether the default sending method should be email instead of e-invoice. This property is updatable only by using PATCH to /customers/:customerNumber" + }, + "vatNumber": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "The customer's value added tax identification number. This field is only available to agreements in Sweden, UK, Germany, Poland and Finland. Not to be mistaken for the danish CVR number, which is defined on the corporateIdentificationNumber property." + }, + "website": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "Customer website, if applicable." + }, + "zip": { + "type": "string", + "sortable": true, + "filterable": true, + "description": "The customer's postcode." + }, + "contacts": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the customer contacts items." + }, + "deliveryLocations": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the customer delivery locations items." + }, + "defaultDeliveryLocation": { + "type": "object", + "description": "Customers default delivery location.", + "properties": { + "deliveryLocationNumber": { + "type": "integer", + "description": "The unique identifier of the delivery location." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the delivery location." + } + } + }, + "attention": { + "type": "object", + "description": "The customer's person of attention.", + "properties": { + "customerContactNumber": { + "type": "integer", + "description": "The unique identifier of the customer employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the customer employee item." + } + } + }, + "customerContact": { + "type": "object", + "description": "Reference to main contact employee at customer.", + "properties": { + "customerContactNumber": { + "type": "integer", + "description": "The unique identifier of the customer contact." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the customer contact item." + } + } + }, + "customerGroup": { + "type": "object", + "description": "Reference to the customer group this customer is attached to.", + "properties": { + "customerGroupNumber": { + "type": "integer", + "description": "The unique identifier of the customer group." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the customer group item." + } + } + }, + "layout": { + "type": "object", + "description": "Layout to be applied for invoices and other documents for this customer.", + "properties": { + "layoutNumber": { + "type": "integer", + "description": "The unique identifier of the layout." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the layout item." + } + } + }, + "paymentTerms": { + "type": "object", + "description": "The default payment terms for the customer.", + "properties": { + "paymentTermsNumber": { + "type": "integer", + "description": "The unique identifier of the payment terms." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the payment terms item." + } + } + }, + "salesPerson": { + "type": "object", + "description": "Reference to the employee responsible for contact with this customer.", + "properties": { + "employeeNumber": { + "type": "integer", + "description": "The unique identifier of the employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the employee resource." + } + } + }, + "vatZone": { + "type": "object", + "description": "Indicates in which VAT-zone the customer is located (e.g.: domestically, in Europe or elsewhere abroad).", + "properties": { + "vatZoneNumber": { + "type": "integer", + "description": "The unique identifier of the VAT-zone." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the VAT-zone item." + } + } + }, + "templates": { + "type": "object", + "description": "", + "properties": { + "invoice": { + "type": "string", + "format": "uri", + "description": "The unique reference to the invoice template." + }, + "invoiceLine": { + "type": "string", + "format": "uri", + "description": "The unique reference to the invoiceLine template." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the templates resource." + } + } + }, + "totals": { + "type": "object", + "description": "", + "properties": { + "drafts": { + "type": "string", + "format": "uri", + "description": "The unique reference to the draft invoice totals for this customer." + }, + "booked": { + "type": "string", + "format": "uri", + "description": "The unique reference to the booked invoice totals for this customer." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the totals resource for this customer." + } + } + }, + "invoices": { + "type": "object", + "description": "", + "properties": { + "drafts": { + "type": "string", + "format": "uri", + "description": "The unique reference to the draft invoices for this customer." + }, + "booked": { + "type": "string", + "format": "uri", + "description": "The unique reference to the booked invoices for this customer." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the invoices resource for this customer." + } + } + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self reference of the customer resource." + } + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_booked.json b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_booked.json new file mode 100644 index 0000000000000..4f8f16dbbf864 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_booked.json @@ -0,0 +1,448 @@ +{ + "$schema": "http://json-schema.org/draft-03/schema#", + "title": "Booked invoice collection schema", + "description": "A schema for retrieving a collection of booked invoices.", + "type": "object", + "restdocs": "http://restdocs.e-conomic.com/#get-invoices-booked", + "properties": { + "bookedInvoiceNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "A reference number for the booked invoice document." + }, + "date": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "filterable": true, + "sortable": true, + "description": "Invoice issue date. Format according to ISO-8601 (YYYY-MM-DD)." + }, + "currency": { + "type": "string", + "pattern": "[a-zA-Z]{3}", + "filterable": true, + "sortable": true, + "description": "The ISO 4217 currency code of the invoice." + }, + "exchangeRate": { + "type": "number", + "filterable": true, + "sortable": true, + "maxDecimal": 6, + "description": "The exchange rate between the invoice currency and the base currency of the agreement. The exchange rate expresses how much it will cost in base currency to buy 100 units of the invoice currency." + }, + "netAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the invoice currency before all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the base currency of the agreement before all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "grossAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the invoice currency after all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "grossAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the base currency of the agreement after all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "vatAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total amount of VAT on the invoice in the invoice currency. This will have the same sign as net amount" + }, + "roundingAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total rounding error, if any, on the invoice in base currency." + }, + "remainder": { + "type": "number", + "maxDecimal": 2, + "readonly": true, + "description": "Remaining amount to be paid." + }, + "remainderInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "readonly": true, + "description": "Remaining amount to be paid in base currency." + }, + "dueDate": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "description": "The date the invoice is due for payment. Only used if the terms of payment is of type 'duedate', in which case it is mandatory. Format according to ISO-8601 (YYYY-MM-DD)." + }, + "paymentTerms": { + "type": "object", + "description": "The terms of payment for the invoice.", + "properties": { + "paymentTermsNumber": { + "type": "integer", + "minimum": 0, + "filterable": true, + "sortable": true, + "description": "A unique identifier of the payment term." + }, + "daysOfCredit": { + "type": "integer", + "minimum": 0, + "description": "The number of days of credit on the invoice. This field is only valid if terms of payment is not of type 'duedate" + }, + "name": { + "type": "string", + "description": "The name of the payment terms." + }, + "paymentTermsType": { + "enum": [ + "net", + "invoiceMonth", + "paidInCash", + "prepaid", + "dueDate", + "factoring", + "invoiceWeekStartingSunday", + "invoiceWeekStartingMonday", + "creditcard" + ], + "description": "The type of payment term." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the payment term resource." + } + } + }, + "customer": { + "type": "object", + "description": "The customer being invoiced.", + "properties": { + "customerNumber": { + "type": "integer", + "maximum": 999999999, + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "The customer id number. The customer id number can be either positive or negative, but it can't be zero." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer resource." + } + } + }, + "recipient": { + "type": "object", + "description": "The actual recipient of the invoice. This may be the same info found on the customer (and will probably be so in most cases) but it may also be a different recipient. For instance, the customer placing the order may be ACME Headquarters, but the recipient of the invoice may be ACME IT.", + "properties": { + "name": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The name of the actual recipient." + }, + "address": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The street address of the actual recipient." + }, + "zip": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The zip code of the actual recipient." + }, + "city": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The city of the actual recipient." + }, + "country": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The country of the actual recipient." + }, + "ean": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The 'European Article Number' of the actual recipient." + }, + "publicEntryNumber": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The public entry number of the actual recipient." + }, + "attention": { + "type": "object", + "description": "The person to whom this invoice is addressed.", + "properties": { + "customerContactNumber": { + "type": "integer", + "description": "Unique identifier of the customer employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer employee." + } + } + }, + "vatZone": { + "type": "object", + "description": "Recipient vat zone.", + "properties": { + "vatZoneNumber": { + "type": "integer", + "filterable": true, + "sortable": true, + "description": "Unique identifier of the vat zone." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the vat zone." + } + } + }, + "cvr": { + "type": "string", + "description": "The Corporate Identification Number of the recipient for example CVR in Denmark." + } + } + }, + "deliveryLocation": { + "type": "object", + "description": "A reference to the place of delivery for the goods on the invoice", + "properties": { + "deliveryLocationNumber": { + "type": "integer", + "filterable": true, + "sortable": true, + "description": "A unique identifier for the delivery location." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the delivery location resource." + } + } + }, + "delivery": { + "type": "object", + "properties": { + "address": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "Street address where the goods must be delivered to the customer." + }, + "zip": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The zip code of the place of delivery." + }, + "city": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The city of the place of delivery" + }, + "country": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The country of the place of delivery" + }, + "deliveryTerms": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "Details about the terms of delivery." + }, + "deliveryDate": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "filterable": true, + "sortable": true, + "description": "The date of delivery." + } + } + }, + "notes": { + "type": "object", + "description": "Notes on the invoice.", + "properties": { + "heading": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The invoice heading. Usually displayed at the top of the invoice." + }, + "textLine1": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The first line of supplementary text on the invoice. This is usually displayed right under the heading in a smaller font." + }, + "textLine2": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The second line of supplementary text in the notes on the invoice. This is usually displayed as a footer on the invoice." + } + } + }, + "references": { + "type": "object", + "description": "Customer and company references related to this invoice.", + "properties": { + "customerContact": { + "type": "object", + "description": "The customer contact is a reference to the employee at the customer to contact regarding the invoice.", + "properties": { + "customerContactNumber": { + "type": "integer", + "minimum": 0, + "description": "Unique identifier of the customer contact." + }, + "customer": { + "type": "object", + "description": "The customer this contact belongs to.", + "properties": { + "customerNumber": { + "type": "integer", + "maximum": 999999999, + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "The customer id number. The customer id number can be either positive or negative, but it can't be zero." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer resource." + } + } + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer contact resource." + } + } + }, + "salesPerson": { + "type": "object", + "description": "The sales person is a reference to the employee who sold the goods on the invoice. This is also the person who is credited with this sale in reports.", + "properties": { + "employeeNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "Unique identifier of the employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the employee resource." + } + } + }, + "vendorReference": { + "type": "object", + "description": "A reference to any second employee involved in the sale.", + "properties": { + "employeeNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "Unique identifier of the employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the employee resource." + } + } + }, + "other": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "A text field that can be used to save any custom reference on the invoice." + } + } + }, + "pdf": { + "type": "object", + "description": "References a pdf representation of this invoice.", + "properties": { + "self": { + "type": "string", + "format": "uri", + "description": "The unique reference of the pdf representation for this booked invoice." + } + } + }, + "layout": { + "type": "object", + "description": "Layout to be applied for this invoice.", + "properties": { + "layoutNumber": { + "type": "integer", + "description": "The unique identifier of the layout." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the layout item." + } + } + }, + "project": { + "type": "object", + "properties": { + "projectNumber": { + "type": "integer", + "minimum": 1, + "description": "A unique identifier of the project." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the project resource." + } + } + }, + "sent": { + "type": "string", + "format": "uri", + "description": "A convenience link to see if the invoice has been sent or not." + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self reference of the booked invoice." + } + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_booked_document.json b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_booked_document.json new file mode 100644 index 0000000000000..165e1157a460a --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_booked_document.json @@ -0,0 +1,541 @@ +{ + "$schema": "http://json-schema.org/draft-03/schema#", + "title": "Booked invoice schema", + "description": "A schema for retrieving a booked invoice.", + "type": "object", + "restdocs": "http://restdocs.e-conomic.com/#get-invoices-booked-bookedinvoicenumber", + "properties": { + "bookedInvoiceNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "A reference number for the booked invoice document." + }, + "date": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "filterable": true, + "sortable": true, + "description": "Invoice issue date. Format according to ISO-8601 (YYYY-MM-DD)." + }, + "currency": { + "type": "string", + "pattern": "[a-zA-Z]{3}", + "filterable": true, + "sortable": true, + "description": "The ISO 4217 currency code of the invoice." + }, + "exchangeRate": { + "type": "number", + "filterable": true, + "sortable": true, + "maxDecimal": 6, + "description": "The exchange rate between the invoice currency and the base currency of the agreement. The exchange rate expresses how much it will cost in base currency to buy 100 units of the invoice currency." + }, + "netAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the invoice currency before all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the base currency of the agreement before all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "grossAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the invoice currency after all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "grossAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the base currency of the agreement after all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "vatAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total amount of VAT on the invoice in the invoice currency. This will have the same sign as net amount" + }, + "roundingAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total rounding error, if any, on the invoice in base currency." + }, + "remainder": { + "type": "number", + "maxDecimal": 2, + "readonly": true, + "description": "Remaining amount to be paid." + }, + "remainderInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "readonly": true, + "description": "Remaining amount to be paid in base currency." + }, + "dueDate": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "description": "The date the invoice is due for payment. Only used if the terms of payment is of type 'duedate', in which case it is mandatory. Format according to ISO-8601 (YYYY-MM-DD)." + }, + "paymentTerms": { + "type": "object", + "description": "The terms of payment for the invoice.", + "properties": { + "paymentTermsNumber": { + "type": "integer", + "minimum": 0, + "filterable": true, + "sortable": true, + "description": "A unique identifier of the payment term." + }, + "daysOfCredit": { + "type": "integer", + "minimum": 0, + "description": "The number of days of credit on the invoice. This field is only valid if terms of payment is not of type 'duedate" + }, + "name": { + "type": "string", + "description": "The name of the payment terms." + }, + "paymentTermsType": { + "type": "string", + "description": "The type of payment term." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the payment term resource." + } + } + }, + "customer": { + "type": "object", + "description": "The customer being invoiced.", + "properties": { + "customerNumber": { + "type": "integer", + "maximum": 999999999, + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "The customer number is a positive unique numerical identifier with a maximum of 9 digits." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer resource." + } + } + }, + "recipient": { + "type": "object", + "description": "The actual recipient of the invoice. This may be the same info found on the customer (and will probably be so in most cases) but it may also be a different recipient. For instance, the customer placing the order may be ACME Headquarters, but the recipient of the invoice may be ACME IT.", + "properties": { + "name": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The name of the actual recipient.", + "empty": true + }, + "address": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The street address of the actual recipient.", + "empty": true + }, + "zip": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The zip code of the actual recipient.", + "empty": true + }, + "city": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The city of the actual recipient.", + "empty": true + }, + "country": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The country of the actual recipient." + }, + "ean": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The 'European Article Number' of the actual recipient." + }, + "publicEntryNumber": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The public entry number of the actual recipient." + }, + "attention": { + "type": "object", + "description": "The person to whom this invoice is addressed.", + "properties": { + "customerContactNumber": { + "type": "integer", + "description": "Unique identifier of the customer employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer employee." + } + } + }, + "vatZone": { + "type": "object", + "description": "Recipient vat zone.", + "properties": { + "vatZoneNumber": { + "type": "integer", + "filterable": true, + "sortable": true, + "description": "Unique identifier of the vat zone." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the vat zone." + } + } + }, + "cvr": { + "type": "string", + "description": "The Corporate Identification Number of the recipient for example CVR in Denmark." + } + } + }, + "deliveryLocation": { + "type": "object", + "description": "A reference to the place of delivery for the goods on the invoice", + "properties": { + "deliveryLocationNumber": { + "type": "integer", + "filterable": true, + "sortable": true, + "description": "A unique identifier for the delivery location." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the delivery location resource." + } + } + }, + "delivery": { + "type": "object", + "properties": { + "address": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "Street address where the goods must be delivered to the customer." + }, + "zip": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The zip code of the place of delivery." + }, + "city": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The city of the place of delivery" + }, + "country": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The country of the place of delivery" + }, + "deliveryTerms": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "Details about the terms of delivery." + }, + "deliveryDate": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "filterable": true, + "sortable": true, + "description": "The date of delivery." + } + } + }, + "notes": { + "type": "object", + "description": "Notes on the invoice.", + "properties": { + "heading": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The invoice heading. Usually displayed at the top of the invoice." + }, + "textLine1": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The first line of supplementary text on the invoice. This is usually displayed right under the heading in a smaller font." + }, + "textLine2": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "The second line of supplementary text in the notes on the invoice. This is usually displayed as a footer on the invoice." + } + } + }, + "references": { + "type": "object", + "description": "Customer and company references related to this invoice.", + "properties": { + "customerContact": { + "type": "object", + "description": "The customer contact is a reference to the employee at the customer to contact regarding the invoice.", + "properties": { + "customerContactNumber": { + "type": "integer", + "minimum": 0, + "description": "Unique identifier of the customer contact." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer contact resource." + } + } + }, + "salesPerson": { + "type": "object", + "description": "The sales person is a reference to the employee who sold the goods on the invoice. This is also the person who is credited with this sale in reports.", + "properties": { + "employeeNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "Unique identifier of the employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the employee resource." + } + } + }, + "vendorReference": { + "type": "object", + "description": "A reference to any second employee involved in the sale.", + "properties": { + "employeeNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "Unique identifier of the employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the employee resource." + } + } + }, + "other": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "A text field that can be used to save any custom reference on the invoice." + } + } + }, + "pdf": { + "type": "object", + "description": "References a pdf representation of this invoice.", + "properties": { + "self": { + "type": "string", + "format": "uri", + "description": "The unique reference of the pdf representation for this booked invoice." + } + } + }, + "layout": { + "type": "object", + "description": "Layout to be applied for this invoice.", + "properties": { + "layoutNumber": { + "type": "integer", + "description": "The unique identifier of the layout." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the layout item." + } + } + }, + "project": { + "type": "object", + "properties": { + "projectNumber": { + "type": "integer", + "minimum": 1, + "description": "A unique identifier of the project." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the project resource." + } + } + }, + "lines": { + "title": "Invoice lines", + "type": "array", + "description": "An array containing the specific invoice lines.", + "items": { + "type": "object", + "description": "An array of the invoice lines that make up the invoice.", + "properties": { + "lineNumber": { + "type": "integer", + "description": "The line number is a unique number within the invoice.", + "minimum": 0 + }, + "sortKey": { + "type": "integer", + "description": "A sort key used to sort the lines in ascending order within the invoice.", + "minimum": 0 + }, + "description": { + "type": "string", + "description": "A description of the product or service sold." + }, + "deliveryDate": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "filterable": true, + "sortable": true, + "description": "Invoice delivery date. The date is formatted according to ISO-8601." + }, + "quantity": { + "type": "number", + "maxDecimal": 2, + "description": "The number of units of goods on the invoice line." + }, + "unitNetPrice": { + "type": "number", + "maxDecimal": 2, + "description": "The price of 1 unit of the goods or services on the invoice line in the invoice currency." + }, + "discountPercentage": { + "type": "number", + "maxDecimal": 2, + "description": "A line discount expressed as a percentage." + }, + "unitCostPrice": { + "type": "number", + "maxDecimal": 2, + "description": "The cost price of 1 unit of the goods or services in the invoice currency." + }, + "vatRate": { + "type": "number", + "maxDecimal": 6, + "description": "The VAT rate in % used to calculate the vat amount on this line." + }, + "vatAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total amount of VAT on the invoice line in the invoice currency. This will have the same sign as total net amount" + }, + "totalNetAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice line amount in the invoice currency before all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "unit": { + "type": "object", + "description": "The unit of measure applied to the invoice line.", + "properties": { + "unitNumber": { + "type": "integer", + "description": "The unique identifier of the unit.", + "minimum": 0 + }, + "name": { + "type": "string", + "description": "The name of the unit (e.g. 'kg' for weight or 'l' for volume)." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the unit resource." + } + } + }, + "product": { + "type": "object", + "description": "The product or service offered on the invoice line.", + "properties": { + "productNumber": { + "type": "string", + "description": "The unique product number. This can be a stock keeping unit identifier (SKU)." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the product resource." + } + } + }, + "departmentalDistribution": { + "type": "object", + "properties": { + "departmentalDistributionNumber": { + "type": "integer", + "minimum": 1, + "description": "A unique identifier of the departmental distribution." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the departmental distribution resource." + } + } + } + } + } + }, + "sent": { + "type": "string", + "format": "uri", + "description": "A convenience link to see if the invoice has been sent or not." + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self reference of the booked invoice." + } + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_paid.json b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_paid.json new file mode 100644 index 0000000000000..44a7a0045b69f --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_paid.json @@ -0,0 +1,444 @@ +{ + "$schema": "http://json-schema.org/draft-03/schema#", + "title": "Paid invoice", + "type": "object", + "restdocs": "http://restdocs.e-conomic.com/#get-invoices-paid", + "properties": { + "bookedInvoiceNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "A reference number for the booked invoice document." + }, + "date": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "filterable": true, + "sortable": true, + "description": "Invoice issue date. Format according to ISO-8601 (YYYY-MM-DD)." + }, + "currency": { + "type": "string", + "pattern": "[a-zA-Z]{3}", + "filterable": true, + "sortable": true, + "description": "The ISO 4217 currency code of the invoice." + }, + "netAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the invoice currency before all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the base currency of the agreement before all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "grossAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount in the invoice currency after all taxes and discounts have been applied. For a credit note this amount will be negative." + }, + "vatAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total amount of VAT on the invoice in the invoice currency. This will have the same sign as net amount" + }, + "roundingAmount": { + "type": "number", + "maxDecimal": 2, + "description": "The total rounding error, if any, on the invoice in base currency." + }, + "remainder": { + "type": "number", + "maxDecimal": 2, + "readonly": true, + "description": "Remaining amount to be paid." + }, + "remainderInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "readonly": true, + "description": "Remaining amount to be paid in base currency." + }, + "dueDate": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "description": "The date the invoice is due for payment. Format according to ISO-8601 (YYYY-MM-DD). This is only used if the terms of payment is of type 'duedate'." + }, + "paymentTerms": { + "type": "object", + "description": "The terms of payment for the invoice.", + "properties": { + "paymentTermsNumber": { + "type": "integer", + "minimum": 0, + "filterable": true, + "sortable": true, + "description": "A unique identifier of the payment term." + }, + "daysOfCredit": { + "type": "integer", + "minimum": 0, + "description": "The number of days of credit on the invoice. This field is only valid if terms of payment is not of type 'duedate" + }, + "name": { + "type": "string", + "maxLength": 50, + "description": "The name of the payment terms." + }, + "paymentTermsType": { + "type": "string", + "maxLength": 30, + "description": "The type the payment term." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the payment term resource." + } + } + }, + "customer": { + "type": "object", + "description": "The customer being invoiced.", + "properties": { + "customerNumber": { + "type": "integer", + "maximum": 999999999, + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "The customer id number. The customer id number can be either positive or negative, but it can't be zero." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer resource." + } + } + }, + "recipient": { + "type": "object", + "description": "The actual recipient of the invoice. This may be the same info found on the customer (and will probably be so in most cases) but it may also be a different recipient. For instance, the customer placing the order may be ACME Headquarters, but the recipient of the invoice may be ACME IT.", + "properties": { + "name": { + "type": "string", + "maxLength": 250, + "filterable": true, + "sortable": true, + "description": "The name of the actual recipient." + }, + "address": { + "type": "string", + "maxLength": 250, + "filterable": true, + "sortable": true, + "description": "The street address of the actual recipient." + }, + "zip": { + "type": "string", + "maxLength": 50, + "filterable": true, + "sortable": true, + "description": "The zip code of the actual recipient." + }, + "city": { + "type": "string", + "maxLength": 250, + "filterable": true, + "sortable": true, + "description": "The city of the actual recipient." + }, + "country": { + "type": "string", + "maxLength": 50, + "filterable": true, + "sortable": true, + "description": "The country of the actual recipient." + }, + "ean": { + "type": "string", + "maxLength": 13, + "filterable": true, + "sortable": true, + "description": "The 'European Article Number' of the actual recipient." + }, + "publicEntryNumber": { + "type": "string", + "maxLength": 40, + "filterable": true, + "sortable": true, + "description": "The public entry number of the actual recipient." + }, + "attention": { + "type": "object", + "description": "The person to whom this invoice is addressed.", + "properties": { + "customerContactNumber": { + "type": "integer", + "description": "Unique identifier of the customer employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer employee." + } + } + }, + "vatZone": { + "type": "object", + "description": "Recipient vat zone.", + "properties": { + "vatZoneNumber": { + "type": "integer", + "filterable": true, + "sortable": true, + "description": "Unique identifier of the vat zone." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the vat zone." + } + } + }, + "cvr": { + "type": "string", + "description": "The Corporate Identification Number of the recipient for example CVR in Denmark.", + "maxLength": 40 + } + } + }, + "deliveryLocation": { + "type": "object", + "description": "A reference to the place of delivery for the goods on the invoice", + "properties": { + "deliveryLocationNumber": { + "type": "integer", + "filterable": true, + "sortable": true, + "description": "A unique identifier for the delivery location." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the delivery location resource." + } + } + }, + "delivery": { + "type": "object", + "properties": { + "address": { + "type": "string", + "maxLength": 255, + "filterable": true, + "sortable": true, + "description": "Street address where the goods must be delivered to the customer." + }, + "zip": { + "type": "string", + "maxLength": 30, + "filterable": true, + "sortable": true, + "description": "The zip code of the place of delivery." + }, + "city": { + "type": "string", + "maxLength": 50, + "filterable": true, + "sortable": true, + "description": "The city of the place of delivery" + }, + "country": { + "type": "string", + "maxLength": 50, + "filterable": true, + "sortable": true, + "description": "The country of the place of delivery" + }, + "deliveryTerms": { + "type": "string", + "maxLength": 100, + "filterable": true, + "sortable": true, + "description": "Details about the terms of delivery." + }, + "deliveryDate": { + "type": "string", + "format": "full-date", + "pattern": "\\d{4}-\\d{2}-\\d{2}", + "filterable": true, + "sortable": true, + "description": "The date of delivery." + } + } + }, + "notes": { + "type": "object", + "description": "Notes on the invoice.", + "properties": { + "heading": { + "type": "string", + "maxLength": 250, + "filterable": true, + "sortable": true, + "description": "The invoice heading. Usually displayed at the top of the invoice." + }, + "textLine1": { + "type": "string", + "maxLength": 1000, + "filterable": true, + "sortable": true, + "description": "The first line of supplementary text on the invoice. This is usually displayed right under the heading in a smaller font." + }, + "textLine2": { + "type": "string", + "maxLength": 1000, + "filterable": true, + "sortable": true, + "description": "The second line of supplementary text in the notes on the invoice. This is usually displayed as a footer on the invoice." + } + } + }, + "references": { + "type": "object", + "description": "Customer and company references related to this invoice.", + "properties": { + "customerContact": { + "type": "object", + "description": "The customer contact is a reference to the employee at the customer to contact regarding the invoice.", + "properties": { + "customerContactNumber": { + "type": "integer", + "minimum": 0, + "description": "Unique identifier of the customer contact." + }, + "customer": { + "type": "object", + "description": "The customer this contact belongs to.", + "properties": { + "customerNumber": { + "type": "integer", + "maximum": 999999999, + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "The customer id number. The customer id number can be either positive or negative, but it can't be zero." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer resource." + } + } + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the customer contact resource." + } + } + }, + "salesPerson": { + "type": "object", + "description": "The sales person is a reference to the employee who sold the goods on the invoice. This is also the person who is credited with this sale in reports.", + "properties": { + "employeeNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "Unique identifier of the employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the employee resource." + } + } + }, + "vendorReference": { + "type": "object", + "description": "A reference to any second employee involved in the sale.", + "properties": { + "employeeNumber": { + "type": "integer", + "minimum": 1, + "filterable": true, + "sortable": true, + "description": "Unique identifier of the employee." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the employee resource." + } + } + }, + "other": { + "type": "string", + "maxLength": 250, + "filterable": true, + "sortable": true, + "description": "A text field that can be used to save any custom reference on the invoice." + } + } + }, + "pdf": { + "type": "object", + "description": "References a pdf representation of this invoice.", + "properties": { + "download": { + "type": "string", + "format": "uri", + "description": "The unique reference of the pdf representation for this booked invoice." + } + } + }, + "layout": { + "type": "object", + "description": "Layout to be applied for this invoice.", + "properties": { + "layoutNumber": { + "type": "integer", + "description": "The unique identifier of the layout." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique link reference to the layout item." + } + } + }, + "project": { + "type": "object", + "properties": { + "projectNumber": { + "type": "integer", + "minimum": 1, + "description": "A unique identifier of the project." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the project resource." + } + } + }, + "sent": { + "type": "string", + "format": "uri", + "description": "A convenience link to see if the invoice has been sent or not." + }, + "self": { + "type": "string", + "format": "uri", + "description": "The unique self reference of the booked invoice." + } + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_total.json b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_total.json new file mode 100644 index 0000000000000..76b0626e41f4d --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/invoices_total.json @@ -0,0 +1,309 @@ +{ + "$schema": "http://json-schema.org/draft-03/schema#", + "title": "Invoice totals GET schema", + "description": "A schema for retrieval of the totals of invoices.", + "type": "object", + "restdocs": "http://restdocs.e-conomic.com/#get-invoices-totals", + "properties": { + "drafts": { + "type": "object", + "description": "The totals for draft invoices.", + "properties": { + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount for all draft invoices in the base currency of the agreement before all taxes and discounts have been applied." + }, + "invoiceCount": { + "type": "integer", + "description": "The number of draft invoices." + }, + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals draft resource." + } + } + }, + "booked": { + "type": "object", + "description": "The totals for booked invoices.", + "properties": { + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount for all booked invoices in the base currency of the agreement before all taxes and discounts have been applied." + }, + "invoiceCount": { + "type": "integer", + "description": "The number of booked invoices." + }, + "description": { + "type": "string", + "description": "A short description about this object." + }, + "paid": { + "type": "object", + "description": "The totals for booked and paid invoices.", + "properties": { + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount for all booked and paid invoices in the base currency of the agreement before all taxes and discounts have been applied." + }, + "invoiceCount": { + "type": "integer", + "description": "The number of booked and paid invoices." + }, + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals booked paid resource." + } + } + }, + "unpaid": { + "type": "object", + "description": "The totals for booked and unpaid invoices.", + "properties": { + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount for all booked and unpaid invoices in the base currency of the agreement before all taxes and discounts have been applied." + }, + "grossRemainderInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The gross total remaining to be paid on the booked unpaid invoices" + }, + "invoiceCount": { + "type": "integer", + "description": "The number of booked and unpaid invoices." + }, + "description": { + "type": "string", + "description": "A short description about this object." + }, + "overdue": { + "type": "object", + "description": "Totals for unpaid booked invoices where due date has been surpassed.", + "properties": { + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount for unpaid booked invoices where due date has been surpassed in the base currency of the agreement before all taxes and discounts have been applied." + }, + "grossRemainderInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The gross total remaining to be paid on the booked, unpaid and overdue invoices" + }, + "invoiceCount": { + "type": "integer", + "description": "The number of unpaid booked invoices where due date has been surpassed." + }, + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals booked unpaid overdue resource." + } + } + }, + "notOverdue": { + "type": "object", + "description": "Totals for unpaid booked invoices where due date still hasn't been surpassed. This includes invoices that are due today.", + "properties": { + "netAmountInBaseCurrency": { + "type": "number", + "maxDecimal": 2, + "description": "The total invoice amount for unpaid booked invoices where due date still hasn't been surpassed. in the base currency of the agreement before all taxes and discounts have been applied." + }, + "invoiceCount": { + "type": "integer", + "description": "The number of unpaid booked invoices where due date still hasn't been surpassed. This includes invoices that are due today." + }, + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals booked unpaid not overdue resource." + } + } + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals booked unpaid resource." + } + } + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals booked resource." + } + } + }, + "predefinedPeriodFilters": { + "type": "object", + "description": "The totals for draft invoices.", + "properties": { + "lastFifteenDays": { + "type": "object", + "description": "Filter the totals to only include the last fifteen days.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for the last 15 days resource." + } + } + }, + "lastMonth": { + "type": "object", + "description": "Filter the totals to only include invoices from the last calendar month.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for the last month resource." + } + } + }, + "lastSevenDays": { + "type": "object", + "description": "Filter the totals to only include the last 7 days days.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for the last 7 days resource." + } + } + }, + "lastThirtyDays": { + "type": "object", + "description": "Filter the totals to only include the last 30 days days.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for the last 30 days resource." + } + } + }, + "lastWeek": { + "type": "object", + "description": "Filter the totals to only include invoices from the previous week, starting last Monday.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for the last week resource." + } + } + }, + "lastYear": { + "type": "object", + "description": "Filter the totals to only include invoices from last calendar year.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for the last year resource." + } + } + }, + "thisMonth": { + "type": "object", + "description": "Filter the totals to only include invoices from this calendar month.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for this calendar month resource." + } + } + }, + "thisWeek": { + "type": "object", + "description": "Filter the totals to only include invoices from this week, starting Monday.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for this week resource." + } + } + }, + "thisYear": { + "type": "object", + "description": "Filter the totals to only include invoices from this calendar years.", + "properties": { + "description": { + "type": "string", + "description": "A short description about this object." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals for this calendar year resource." + } + } + } + } + }, + "self": { + "type": "string", + "format": "uri", + "description": "A reference to the invoices totals booked resource." + } + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/products.json b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/products.json new file mode 100644 index 0000000000000..23ddd9767dda8 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/schemas/products.json @@ -0,0 +1,396 @@ +{ + "$schema": "http://json-schema.org/draft-03/schema#", + "title": "Products collection GET schema", + "restdocs": "http://restdocs.e-conomic.com/#get-products", + "type": "object", + "description": "A schema for retrieval of a collection of products.", + "properties": { + "productNumber": { + "type": "string", + "minLength": 1, + "filterable": true, + "sortable": true, + "description": "Unique alphanumeric product number." + }, + "description": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "Free text description of product." + }, + "name": { + "type": "string", + "minLength": 1, + "filterable": true, + "sortable": true, + "description": "Descriptive name of the product." + }, + "costPrice": { + "type": "number", + "filterable": true, + "sortable": true, + "maxDecimal": 2, + "description": "The cost of the goods. If you have the inventory module enabled, this is read-only and will just be ignored." + }, + "recommendedPrice": { + "type": "number", + "filterable": true, + "sortable": true, + "maxDecimal": 2, + "description": "Recommended retail price of the goods." + }, + "salesPrice": { + "type": "number", + "filterable": true, + "sortable": true, + "maxDecimal": 2, + "description": "This is the unit net price that will appear on invoice lines when a product is added to an invoice line." + }, + "barCode": { + "type": "string", + "filterable": true, + "sortable": true, + "description": "String representation of a machine readable barcode symbol that represents this product." + }, + "barred": { + "type": "boolean", + "filterable": true, + "sortable": true, + "description": "If this value is true, then the product can no longer be sold, and trying to book an invoice with this product will not be possible." + }, + "lastUpdated": { + "type": "string", + "filterable": true, + "sortable": true, + "readOnly": true, + "description": "The last time the product was updated, either directly or through inventory changed. The date is formatted according to ISO-8601." + }, + "invoices": { + "type": "object", + "description": "A collection of convenience links to invoices that contains this product.", + "properties": { + "drafts": { + "type": "string", + "format": "uri", + "description": "A unique reference to the draft invoices containing this product." + }, + "booked": { + "type": "string", + "format": "uri", + "description": "A unique reference to the booked invoices containing this product." + } + } + }, + "inventory": { + "type": "object", + "description": "A collection of properties that are only applicable if the inventory module is enabled.", + "properties": { + "available": { + "type": "number", + "readOnly": true, + "maxDecimal": 2, + "description": "The number of units available to sell. This is the difference between the amount in stock and the amount ordered by customers." + }, + "inStock": { + "type": "number", + "readOnly": true, + "maxDecimal": 2, + "description": "The number of units in stock including any that have been ordered by customers." + }, + "orderedByCustomers": { + "type": "number", + "readOnly": true, + "maxDecimal": 2, + "description": "The number of units that have been ordered by customers, but haven't been sold yet." + }, + "orderedFromSuppliers": { + "type": "number", + "readOnly": true, + "maxDecimal": 2, + "description": "The number of units that have been ordered from your suppliers, but haven't been delivered to you yet." + }, + "packageVolume": { + "type": "number", + "filterable": true, + "sortable": true, + "maxDecimal": 2, + "description": "The volume the shipped package makes up." + }, + "grossWeight": { + "type": "number", + "filterable": true, + "sortable": true, + "maxDecimal": 2, + "readOnly": true, + "description": "The gross weight of the product." + }, + "netWeight": { + "type": "number", + "filterable": true, + "sortable": true, + "maxDecimal": 2, + "readOnly": true, + "description": "The net weight of the product." + }, + "inventoryLastUpdated": { + "type": "string", + "readOnly": true, + "description": "The last time this product was updated with regards to inventory." + }, + "recommendedCostPrice": { + "type": "number", + "filterable": true, + "maxDecimal": 2, + "description": "The recommendedCostPrice of the product." + } + } + }, + "unit": { + "type": "object", + "description": "A reference to the unit this product is counted in.", + "properties": { + "unitNumber": { + "type": "integer", + "filterable": true, + "description": "Unique number identifying the unit." + }, + "name": { + "type": "string", + "description": "The name of the unit." + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the unit resource." + } + } + }, + "productGroup": { + "type": "object", + "description": "A reference to the product group this product is contained within.", + "properties": { + "productGroupNumber": { + "type": "integer", + "filterable": true, + "description": "Unique number identifying the product group." + }, + "name": { + "type": "string", + "minLength": 1, + "description": "Descriptive name of the product group." + }, + "salesAccounts": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "A reference to the sales accounts in this product group resource." + }, + "products": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "A reference to the products in this product group resource." + }, + "inventoryEnabled": { + "type": "boolean", + "readOnly": true, + "description": "States if the product group is inventory enabled or not." + }, + "accrual": { + "type": "object", + "readOnly": true, + "description": "A reference to the accrual account this product group is connected to.", + "properties": { + "accountNumber": { + "type": "integer", + "readOnly": true, + "description": "Unique number identifying the accruals account." + }, + "accountType": { + "type": "string", + "readOnly": true, + "description": "The type of account in the chart of accounts." + }, + "balance": { + "type": "number", + "maxDecimal": 2, + "readOnly": true, + "description": "The current balance of the accruals account." + }, + "draftBalance": { + "type": "number", + "maxDecimals": 2, + "readOnly": true, + "description": "The current balance of the account including draft (not yet booked) entries." + }, + "barred": { + "type": "boolean", + "readOnly": true, + "description": "Shows if the account is barred from being used." + }, + "blockDirectEntries": { + "type": "boolean", + "readOnly": true, + "description": "Determines if the account can be manually updated with entries." + }, + "contraAccount": { + "type": "object", + "readOnly": true, + "description": "The default contra account of the account.", + "properties": { + "accountNumber": { + "type": "integer", + "readOnly": true, + "description": "Account number of the contra account." + }, + "self": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "The unique self link of the contra account." + } + } + }, + "debitCredit": { + "type": "string", + "readOnly": true, + "description": "Describes the default update type of the account." + }, + "name": { + "type": "string", + "readOnly": true, + "description": "The name of the account." + }, + "vatAccount": { + "type": "object", + "readOnly": true, + "desciption": "The default VAT code for this account.", + "properties": { + "vatCode": { + "type": "string", + "readOnly": true, + "description": "The VAT code of the VAT account for this account." + }, + "self": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "The unique self link of the VAT code." + } + } + }, + "accountsSummed": { + "type": "array", + "readOnly": true, + "description": "An array of the account intervals used for calculating the total for this account.", + "items": { + "type": "object", + "readOnly": true, + "description": "An account interval.", + "properties": { + "fromAccount": { + "type": "object", + "readOnly": true, + "description": "The first account in the interval.", + "properties": { + "accountNumber": { + "type": "integer", + "readOnly": true, + "description": "Account number of the first account in the interval." + }, + "self": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "The unique self link of the first account in the interval." + } + } + }, + "toAccount": { + "type": "object", + "readOnly": true, + "description": "The last account in the interval.", + "properties": { + "accountNumber": { + "type": "integer", + "readOnly": true, + "description": "Account number of the last account in the interval." + }, + "self": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "The unique self link of the last account in the interval." + } + } + } + } + } + }, + "totalFromAccount": { + "type": "object", + "readOnly": true, + "description": "The account from which the sum total for this account is calculated.", + "properties": { + "accountNumber": { + "type": "integer", + "readOnly": true, + "description": "Account number of the first account." + }, + "self": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "The unique self link of the first account." + } + } + }, + "accountingYears": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "A link to a list of accounting years for which the account is usable." + } + }, + "self": { + "type": "string", + "format": "uri", + "readOnly": true, + "description": "A unique reference to the accruals account resource." + } + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the product group resource." + } + } + }, + "departmentalDistribution": { + "type": "object", + "description": "A departmental distribution defines which departments this entry is distributed between. This requires the departments module to be enabled.", + "properties": { + "departmentalDistributionNumber": { + "type": "integer", + "filterable": true, + "minimum": 1, + "description": "A unique identifier of the departmental distribution." + }, + "distributionType": { + "type": "string", + "description": "Type of the distribution" + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to the departmental distribution resource." + } + } + }, + "self": { + "type": "string", + "format": "uri", + "description": "A unique reference to this product resource." + } + } +} diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/source.py b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/source.py new file mode 100644 index 0000000000000..5035bfb1b6620 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/source.py @@ -0,0 +1,147 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple +from urllib.parse import parse_qs, urlparse + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream + + +class VismaEconomicStream(HttpStream, ABC): + url_base: str = "https://restapi.e-conomic.com/" + page_size: int = 1000 + + def __init__(self, app_secret_token: str = None, agreement_grant_token: str = None): + self.app_secret_token: str = app_secret_token + self.agreement_grant_token: str = agreement_grant_token + super().__init__() + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + response_json = response.json() + if "nextPage" in response_json.get("pagination", {}).keys(): + parsed_url = urlparse(response_json["pagination"]["nextPage"]) + query_params = parse_qs(parsed_url.query) + return query_params + else: + return None + + def request_params(self, next_page_token: Mapping[str, Any] = None, **kwargs) -> MutableMapping[str, Any]: + if next_page_token: + return dict(next_page_token) + else: + return {"skippages": 0, "pagesize": self.page_size} + + def request_headers(self, **kwargs) -> Mapping[str, Any]: + return {"X-AppSecretToken": self.app_secret_token, "X-AgreementGrantToken": self.agreement_grant_token} + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + yield from response.json().get("collection", []) + + +class Accounts(VismaEconomicStream): + primary_key = "accountNumber" + + def path(self, **kwargs) -> str: + return "accounts" + + +class Customers(VismaEconomicStream): + primary_key = "customerNumber" + + def path(self, **kwargs) -> str: + return "customers" + + +class Products(VismaEconomicStream): + primary_key = "productNumber" + + def path(self, **kwargs) -> str: + return "products" + + +class InvoicesTotal(VismaEconomicStream): + primary_key = None + + def path(self, **kwargs) -> str: + return "invoices/totals" + + +class InvoicesPaid(VismaEconomicStream): + primary_key = "bookedInvoiceNumber" + + def path(self, **kwargs) -> str: + return "invoices/paid" + + +class InvoicesBooked(VismaEconomicStream): + primary_key = "bookedInvoiceNumber" + + def path(self, **kwargs) -> str: + return "invoices/booked" + + +class InvoicesBookedDocument(HttpSubStream, VismaEconomicStream): + primary_key = "bookedInvoiceNumber" + + def __init__(self, **kwargs): + super().__init__(InvoicesBooked(**kwargs), **kwargs) + + def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: + booked_invoice_number = stream_slice["parent"]["bookedInvoiceNumber"] + return f"invoices/booked/{booked_invoice_number}" + + def __is_missing_booked_invoice_number(self, response: requests.Response) -> bool: + try: + response.raise_for_status() + except requests.HTTPError as exc: + response_json = response.json() + if "error_code" in response_json and response_json.get("error_code") == "NO_SUCH_BOOKED_INVOICE_NUMBER": + self.logger.info(response.text) + return True + else: + self.logger.error(response.text) + raise exc + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + if not self.__is_missing_booked_invoice_number(response): + yield response.json() + + +class SourceVismaEconomic(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + """ + :param config: the user-input config object conforming to the connector's spec.yaml + :param logger: logger object + :return Tuple[bool, any]: (True, None) if the input config can be used to connect to the API successfully, (False, error) otherwise. + """ + + try: + stream = Accounts(**config) + stream.page_size = 1 + _ = list(stream.read_records(sync_mode=SyncMode.full_refresh)) + return True, None + except Exception as e: + logger.error(e) + return False, repr(e) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """ + :param config: A Mapping of the user input configuration as defined in the connector spec. + """ + stream_list = [ + Accounts(**config), + Customers(**config), + InvoicesBooked(**config), + InvoicesPaid(**config), + InvoicesTotal(**config), + Products(**config), + InvoicesBookedDocument(**config), + ] + + return stream_list diff --git a/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/spec.yaml b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/spec.yaml new file mode 100644 index 0000000000000..c0c56c93ba7ac --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/source_visma_economic/spec.yaml @@ -0,0 +1,19 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/visma-economic +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Visma E-conomic Spec + type: object + required: + - app_secret_token + - agreement_grant_token + properties: + app_secret_token: + title: App Secret Token + type: string + description: Identification token for app accessing data + airbyte_secret: true + agreement_grant_token: + title: Agreement Grant Token + type: string + description: Identifier for the grant issued by an agreement + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-visma-economic/unit_tests/__init__.py b/airbyte-integrations/connectors/source-visma-economic/unit_tests/__init__.py new file mode 100644 index 0000000000000..46b7376756ec6 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-visma-economic/unit_tests/test_source.py b/airbyte-integrations/connectors/source-visma-economic/unit_tests/test_source.py new file mode 100644 index 0000000000000..7dc1bbc4945b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/unit_tests/test_source.py @@ -0,0 +1,26 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +import responses +from source_visma_economic.source import SourceVismaEconomic + + +@responses.activate +def test_check_connection(mocker): + responses.add(responses.GET, "https://restapi.e-conomic.com/accounts?skippages=0&pagesize=1", json={"collection": []}) + + source = SourceVismaEconomic() + logger_mock, config_mock = MagicMock(), MagicMock() + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker): + source = SourceVismaEconomic() + config_mock = MagicMock() + streams = source.streams(config_mock) + # TODO: replace this with your streams number + expected_streams_number = 7 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-visma-economic/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-visma-economic/unit_tests/test_streams.py new file mode 100644 index 0000000000000..b9c815da10a99 --- /dev/null +++ b/airbyte-integrations/connectors/source-visma-economic/unit_tests/test_streams.py @@ -0,0 +1,92 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +import requests +from source_visma_economic.source import VismaEconomicStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(VismaEconomicStream, "path", "v0/example_endpoint") + mocker.patch.object(VismaEconomicStream, "primary_key", "test_primary_key") + mocker.patch.object(VismaEconomicStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = VismaEconomicStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {"pagesize": 1000, "skippages": 0} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = VismaEconomicStream() + response = MagicMock(requests.Response) + json = { + "pagination": { + "maxPageSizeAllowed": 1000, + "skipPages": 0, + "pageSize": 100, + "results": 200, + "resultsWithoutFilter": 200, + "firstPage": "https://restapi.e-conomic.com/stream?skippages=0&pagesize=100", + "nextPage": "https://restapi.e-conomic.com/stream?skippages=1&pagesize=100", + "lastPage": "https://restapi.e-conomic.com/stream?skippages=1&pagesize=100", + } + } + response.json = MagicMock(return_value=json) + inputs = {"response": response} + + expected_token = {"skippages": ["1"], "pagesize": ["100"]} + assert stream.next_page_token(**inputs) == expected_token + + +def test_no_next_page_token(patch_base_class): + stream = VismaEconomicStream() + response = MagicMock(requests.Response) + response.json = MagicMock(return_value={}) + inputs = {"response": response} + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + +def test_request_headers(patch_base_class): + stream = VismaEconomicStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {"X-AgreementGrantToken": None, "X-AppSecretToken": None} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = VismaEconomicStream() + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = VismaEconomicStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = VismaEconomicStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-vitally/.dockerignore b/airbyte-integrations/connectors/source-vitally/.dockerignore new file mode 100644 index 0000000000000..068cea629ed89 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_vitally +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-vitally/Dockerfile b/airbyte-integrations/connectors/source-vitally/Dockerfile new file mode 100644 index 0000000000000..1fafb01130811 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_vitally ./source_vitally + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-vitally diff --git a/airbyte-integrations/connectors/source-vitally/README.md b/airbyte-integrations/connectors/source-vitally/README.md new file mode 100644 index 0000000000000..d55eeffce48f9 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/README.md @@ -0,0 +1,79 @@ +# Vitally Source + +This is the repository for the Vitally configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/vitally). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-vitally:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/vitally) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_vitally/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source vitally test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-vitally:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-vitally:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-vitally:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-vitally:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-vitally:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-vitally:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-vitally:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-vitally:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-vitally/__init__.py b/airbyte-integrations/connectors/source-vitally/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-vitally/acceptance-test-config.yml b/airbyte-integrations/connectors/source-vitally/acceptance-test-config.yml new file mode 100644 index 0000000000000..55feae87911d4 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/acceptance-test-config.yml @@ -0,0 +1,25 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-vitally:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_vitally/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-vitally/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-vitally/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-vitally/build.gradle b/airbyte-integrations/connectors/source-vitally/build.gradle new file mode 100644 index 0000000000000..840a10c657192 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_vitally' +} diff --git a/airbyte-integrations/connectors/source-vitally/integration_tests/__init__.py b/airbyte-integrations/connectors/source-vitally/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-vitally/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-vitally/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-vitally/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-vitally/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..0c6c1ffde33bc --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/integration_tests/configured_catalog.json @@ -0,0 +1,67 @@ +{ + "streams": [ + { + "stream": { + "name": "accounts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "admins", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "conversations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "notes", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "nps_responses", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "tasks", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-vitally/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-vitally/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..7ecd678016d65 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "", + "status": "not-a-goog-status" +} diff --git a/airbyte-integrations/connectors/source-vitally/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-vitally/integration_tests/sample_config.json new file mode 100644 index 0000000000000..05a2b165dcbd8 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "", + "status": "active" +} diff --git a/airbyte-integrations/connectors/source-vitally/integration_tests/simple_catalog.json b/airbyte-integrations/connectors/source-vitally/integration_tests/simple_catalog.json new file mode 100644 index 0000000000000..3fdb9270c00d3 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/integration_tests/simple_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "accounts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-vitally/main.py b/airbyte-integrations/connectors/source-vitally/main.py new file mode 100644 index 0000000000000..a95b707667ad4 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_vitally import SourceVitally + +if __name__ == "__main__": + source = SourceVitally() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-vitally/requirements.txt b/airbyte-integrations/connectors/source-vitally/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-vitally/setup.py b/airbyte-integrations/connectors/source-vitally/setup.py new file mode 100644 index 0000000000000..10ee0bbfcbb41 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_vitally", + description="Source implementation for Vitally.", + author="Elliot Trabac", + author_email="elliot.trabac1@gmail.com", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/__init__.py b/airbyte-integrations/connectors/source-vitally/source_vitally/__init__.py new file mode 100644 index 0000000000000..e788d215f0500 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceVitally + +__all__ = ["SourceVitally"] diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/accounts.json b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/accounts.json new file mode 100644 index 0000000000000..28ee630e24f13 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/accounts.json @@ -0,0 +1,90 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "updatedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "externalId": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "traits": { + "type": ["null", "object"] + }, + "organizationId": { + "type": ["null", "string"] + }, + "accountOwnerId": { + "type": ["null", "string"] + }, + "churnedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "firstSeenTimestamp": { + "type": ["null", "string"], + "format": "date-time" + }, + "lastSeenTimestamp": { + "type": ["null", "string"], + "format": "date-time" + }, + "lastInboundMessageTimestamp": { + "type": ["null", "string"], + "format": "date-time" + }, + "lastOutboundMessageTimestamp": { + "type": ["null", "string"], + "format": "date-time" + }, + "mrr": { + "type": ["null", "number"] + }, + "nextRenewalDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "trialEndDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "usersCount": { + "type": ["null", "integer"] + }, + "npsDetractorCount": { + "type": ["null", "integer"] + }, + "npsPassiveCount": { + "type": ["null", "integer"] + }, + "npsPromoterCount": { + "type": ["null", "integer"] + }, + "npsScore": { + "type": ["null", "number"] + }, + "healthScore": { + "type": ["null", "number"] + }, + "csmId": { + "type": ["null", "string"] + }, + "accountExecutiveId": { + "type": ["null", "string"] + }, + "segments": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/admins.json b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/admins.json new file mode 100644 index 0000000000000..28e76582d6d5a --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/admins.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "licenseStatus": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/conversations.json b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/conversations.json new file mode 100644 index 0000000000000..0e6d804bd0403 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/conversations.json @@ -0,0 +1,23 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "updatedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "externalId": { + "type": ["null", "string"] + }, + "subject": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/notes.json b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/notes.json new file mode 100644 index 0000000000000..802b5d6738741 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/notes.json @@ -0,0 +1,57 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "externalId": { + "type": ["null", "string"] + }, + "organizationId": { + "type": ["null", "string"] + }, + "categoryId": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "updatedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "noteDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "note": { + "type": ["null", "string"] + }, + "account": { + "type": ["null", "object"] + }, + "organization": { + "type": ["null", "string"] + }, + "author": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "tag": { + "type": ["null", "array"] + }, + "accountId": { + "type": ["null", "string"] + }, + "authorId": { + "type": ["null", "string"] + }, + "traits": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/nps_responses.json b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/nps_responses.json new file mode 100644 index 0000000000000..e263835aaa8eb --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/nps_responses.json @@ -0,0 +1,40 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "updatedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "externalId": { + "type": ["null", "string"] + }, + "userId": { + "type": ["null", "string"] + }, + "user": { + "type": ["null", "object"] + }, + "dismissedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "respondedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "score": { + "type": ["null", "number"] + }, + "feedback": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/organizations.json b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/organizations.json new file mode 100644 index 0000000000000..849ce45c36bc0 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/organizations.json @@ -0,0 +1,56 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "updatedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "externalId": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "traits": { + "type": ["null", "object"] + }, + "accountOwnerId": { + "type": ["null", "string"] + }, + "churnedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "mrr": { + "type": ["null", "number"] + }, + "nextRenewalDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "trialEndDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "usersCount": { + "type": ["null", "integer"] + }, + "csmId": { + "type": ["null", "string"] + }, + "keyRoles": { + "type": ["null", "array"] + }, + "accountExecutiveId": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/tasks.json b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/tasks.json new file mode 100644 index 0000000000000..30721425a8e0e --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/tasks.json @@ -0,0 +1,73 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "updatedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "externalId": { + "type": ["null", "string"] + }, + "organizationId": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "completedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "dueDate": { + "type": ["null", "string"], + "format": "date" + }, + "account": { + "type": ["null", "object"] + }, + "organization": { + "type": ["null", "string"] + }, + "tag": { + "type": ["null", "array"] + }, + "accountId": { + "type": ["null", "string"] + }, + "categoryId": { + "type": ["null", "string"] + }, + "assignedToId": { + "type": ["null", "string"] + }, + "completedById": { + "type": ["null", "string"] + }, + "assignedTo": { + "type": ["null", "object"] + }, + "completedBy": { + "type": ["null", "string"] + }, + "category": { + "type": ["null", "string"] + }, + "projects": { + "type": ["null", "array"] + }, + "traits": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/users.json b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/users.json new file mode 100644 index 0000000000000..cac343e62f555 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/schemas/users.json @@ -0,0 +1,78 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "createdAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "updatedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "externalId": { + "type": ["null", "string"] + }, + "accounts": { + "type": ["null", "array"] + }, + "organizations": { + "type": ["null", "array"] + }, + "name": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "avatar": { + "type": ["null", "string"] + }, + "traits": { + "type": ["null", "object"] + }, + "firstKnown": { + "type": ["null", "string"], + "format": "date-time" + }, + "lastSeenTimestamp": { + "type": ["null", "string"], + "format": "date-time" + }, + "lastInboundMessageTimestamp": { + "type": ["null", "string"], + "format": "date-time" + }, + "lastOutboundMessageTimestamp": { + "type": ["null", "string"], + "format": "date-time" + }, + "npsLastScore": { + "type": ["null", "number"] + }, + "npsLastFeedback": { + "type": ["null", "string"] + }, + "npsLastRespondedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "unsubscribedFromConversations": { + "type": ["null", "boolean"] + }, + "deactivatedAt": { + "type": ["null", "string"], + "format": "date-time" + }, + "segments": { + "type": ["null", "array"] + }, + "joinDate": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/source.py b/airbyte-integrations/connectors/source-vitally/source_vitally/source.py new file mode 100644 index 0000000000000..1cb69b35a7799 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceVitally(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "vitally.yaml"}) diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/spec.yaml b/airbyte-integrations/connectors/source-vitally/source_vitally/spec.yaml new file mode 100644 index 0000000000000..57c75bd02c129 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/spec.yaml @@ -0,0 +1,23 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/vitally +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Vitally Spec + type: object + required: + - api_key + - status + additionalProperties: true + properties: + api_key: + type: string + title: API Token + description: The API Token for a Vitally account. + airbyte_secret: true + status: + type: string + title: Status + description: Status of the Vitally accounts. One of the following values; active, churned, activeOrChurned. + enum: + - active + - churned + - activeOrChurned diff --git a/airbyte-integrations/connectors/source-vitally/source_vitally/vitally.yaml b/airbyte-integrations/connectors/source-vitally/source_vitally/vitally.yaml new file mode 100644 index 0000000000000..4b073c5b80d47 --- /dev/null +++ b/airbyte-integrations/connectors/source-vitally/source_vitally/vitally.yaml @@ -0,0 +1,107 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["results"] + requester: + url_base: "https://rest.vitally.io/resources/" + http_method: "GET" + authenticator: + type: BasicHttpAuthenticator + username: "{{ config['api_key'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: DefaultPaginator + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response.next }}" + page_size: 100 + page_size_option: + field_name: "limit" + inject_into: "request_parameter" + page_token_option: + field_name: "from" + inject_into: "request_parameter" + url_base: + $ref: "*ref(definitions.requester.url_base)" + requester: + $ref: "*ref(definitions.requester)" + + # base stream + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + + # stream definitions + accounts_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "accounts" + primary_key: "id" + path: "/accounts" + retriever: + $ref: "*ref(definitions.base_stream.retriever)" + requester: + $ref: "*ref(definitions.requester)" + request_options_provider: + request_parameters: + status: "{{ config['status'] }}" + admins_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "admins" + primary_key: "id" + path: "/admins" + conversations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "conversations" + primary_key: "id" + path: "/conversations" + notes_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "notes" + primary_key: "id" + path: "/notes" + nps_responses_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "nps_responses" + primary_key: "id" + path: "/npsResponses" + organizations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "organizations" + primary_key: "id" + path: "/organizations" + tasks_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "tasks" + primary_key: "id" + path: "/tasks" + users_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "users" + primary_key: "id" + path: "/users" + +streams: + - "*ref(definitions.accounts_stream)" + - "*ref(definitions.admins_stream)" + - "*ref(definitions.conversations_stream)" + - "*ref(definitions.notes_stream)" + - "*ref(definitions.nps_responses_stream)" + - "*ref(definitions.organizations_stream)" + - "*ref(definitions.tasks_stream)" + - "*ref(definitions.users_stream)" + +check: + stream_names: + - "accounts" diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-waiteraid/integration_tests/configured_catalog.json index 7b8975e6ceac8..a7bb57bc48f64 100644 --- a/airbyte-integrations/connectors/source-waiteraid/integration_tests/configured_catalog.json +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/configured_catalog.json @@ -4,7 +4,7 @@ "stream": { "name": "booking", "json_schema": {}, - "supported_sync_modes": ["full_refresh","incremental"] + "supported_sync_modes": ["full_refresh", "incremental"] }, "sync_mode": "incremental", "destination_sync_mode": "overwrite" diff --git a/airbyte-integrations/connectors/source-waiteraid/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-waiteraid/integration_tests/invalid_config.json index 1f97957187475..fe190a578f297 100644 --- a/airbyte-integrations/connectors/source-waiteraid/integration_tests/invalid_config.json +++ b/airbyte-integrations/connectors/source-waiteraid/integration_tests/invalid_config.json @@ -1 +1 @@ -{"start_date": "2022-09-01", "auth_hash": "1nval1dk3y", "restid": "666"} +{ "start_date": "2022-09-01", "auth_hash": "1nval1dk3y", "restid": "666" } diff --git a/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/schemas/booking.json b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/schemas/booking.json index ec922ebe34dd4..e446ac1db69c9 100644 --- a/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/schemas/booking.json +++ b/airbyte-integrations/connectors/source-waiteraid/source_waiteraid/schemas/booking.json @@ -1,151 +1,151 @@ { "type": "object", - "properties": { + "properties": { "id": { - "type": ["null", "number"] + "type": ["null", "number"] }, "amount": { - "type": ["null", "number"] + "type": ["null", "number"] }, "children_amount": { - "type": ["null", "number"] + "type": ["null", "number"] }, "placed": { - "type": ["null", "number"] + "type": ["null", "number"] }, "placed_manually": { - "type": ["null", "number"] + "type": ["null", "number"] }, "start": { - "type": ["null", "number"] + "type": ["null", "number"] }, "end": { - "type": ["null", "number"] + "type": ["null", "number"] }, "length": { - "type": ["null", "number"] + "type": ["null", "number"] }, "status": { - "type": ["null", "string"] + "type": ["null", "string"] }, "arrived": { - "type": ["null", "number"] + "type": ["null", "number"] }, "all_seated": { - "type": ["null", "number"] + "type": ["null", "number"] }, "guest_left": { - "type": ["null", "number"] + "type": ["null", "number"] }, "comment": { - "type": ["null", "string"] + "type": ["null", "string"] }, "confirmed": { - "type": ["null", "number"] + "type": ["null", "number"] }, "waitinbar": { - "type": ["null", "number"] + "type": ["null", "number"] }, "internet_booking": { - "type": ["null", "number"] + "type": ["null", "number"] }, "internet_booking_confirmed": { - "type": ["null", "number"] + "type": ["null", "number"] }, "paid": { - "type": ["null", "number"] + "type": ["null", "number"] }, "langid": { - "type": ["null", "number"] + "type": ["null", "number"] }, "meal": { - "type": ["null", "string"] + "type": ["null", "string"] }, "tables": { - "type": ["null", "number"] + "type": ["null", "number"] }, "meal_abbr": { - "type": ["null", "string"] + "type": ["null", "string"] }, "table_ids": { - "type": ["null", "number"] + "type": ["null", "number"] }, "products": { - "type": ["null", "number"] + "type": ["null", "number"] }, "waitinlist": { - "type": ["null", "number"] + "type": ["null", "number"] }, "date": { - "type": ["null", "string"] + "type": ["null", "string"] }, "time": { - "type": ["null", "string"] + "type": ["null", "string"] }, "guest": { - "type": "object", - "properties": { - "id": { - "type": ["null", "number"] - }, - "firstname": { - "type": ["null", "string"] - }, - "lastname": { - "type": ["null", "string"] - }, - "name": { - "type": ["null", "string"] - }, - "address": { - "type": ["null", "string"] - }, - "postalcode": { - "type": ["null", "string"] - }, - "city": { - "type": ["null", "string"] - }, - "company": { - "type": ["null", "string"] - }, - "telephone": { - "type": ["null", "string"] - }, - "mobile": { - "type": ["null", "string"] - }, - "email": { - "type": ["null", "string"] - }, - "comment": { - "type": ["null", "string"] - }, - "other_contact": { - "type": ["null", "string"] - }, - "restaurant_newsletter": { - "type": ["null", "boolean"] - } - } + "type": "object", + "properties": { + "id": { + "type": ["null", "number"] + }, + "firstname": { + "type": ["null", "string"] + }, + "lastname": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "address": { + "type": ["null", "string"] + }, + "postalcode": { + "type": ["null", "string"] + }, + "city": { + "type": ["null", "string"] + }, + "company": { + "type": ["null", "string"] + }, + "telephone": { + "type": ["null", "string"] + }, + "mobile": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "comment": { + "type": ["null", "string"] + }, + "other_contact": { + "type": ["null", "string"] + }, + "restaurant_newsletter": { + "type": ["null", "boolean"] + } + } }, "booking_date": { - "type": ["null", "number"] + "type": ["null", "number"] }, "payStarted": { - "type": ["null", "boolean"] + "type": ["null", "boolean"] }, "payClosed": { - "type": ["null", "boolean"] + "type": ["null", "boolean"] }, "payCharged": { - "type": ["null", "boolean"] + "type": ["null", "boolean"] }, "payActivated": { - "type": ["null", "boolean"] + "type": ["null", "boolean"] }, "has_message": { - "type": ["null", "number"] + "type": ["null", "number"] } } } diff --git a/airbyte-integrations/connectors/source-weatherstack/.dockerignore b/airbyte-integrations/connectors/source-weatherstack/.dockerignore new file mode 100644 index 0000000000000..722b987368f43 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_weatherstack +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-weatherstack/Dockerfile b/airbyte-integrations/connectors/source-weatherstack/Dockerfile new file mode 100644 index 0000000000000..ab35ad6bb8f51 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_weatherstack ./source_weatherstack + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-weatherstack diff --git a/airbyte-integrations/connectors/source-weatherstack/README.md b/airbyte-integrations/connectors/source-weatherstack/README.md new file mode 100644 index 0000000000000..767a218e6e142 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/README.md @@ -0,0 +1,132 @@ +# Weatherstack Source + +This is the repository for the Weatherstack source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/weatherstack). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-weatherstack:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/weatherstack) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_weatherstack/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source weatherstack test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-weatherstack:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-weatherstack:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-weatherstack:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-weatherstack:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-weatherstack:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-weatherstack:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-weatherstack:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-weatherstack:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-weatherstack/acceptance-test-config.yml b/airbyte-integrations/connectors/source-weatherstack/acceptance-test-config.yml new file mode 100644 index 0000000000000..76c9d6502efb3 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/acceptance-test-config.yml @@ -0,0 +1,20 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-weatherstack:dev +tests: + spec: + - spec_path: "source_weatherstack/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-weatherstack/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-weatherstack/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-weatherstack/build.gradle b/airbyte-integrations/connectors/source-weatherstack/build.gradle new file mode 100644 index 0000000000000..9bc51f8f770c9 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/build.gradle @@ -0,0 +1,13 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_weatherstack' +} + +dependencies { + implementation files(project(':airbyte-integrations:bases:source-acceptance-test').airbyteDocker.outputs) +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-weatherstack/integration_tests/__init__.py b/airbyte-integrations/connectors/source-weatherstack/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-weatherstack/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-weatherstack/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..453c3e569e626 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/integration_tests/abnormal_state.json @@ -0,0 +1,14 @@ +{ + "current_weather": { + "historical_date": "9999-01-01" + }, + "forecast": { + "historical_date": "9999-01-01" + }, + "historical": { + "historical_date": "9999-01-01" + }, + "location_lookup": { + "historical_date": "9999-01-01" + } +} diff --git a/airbyte-integrations/connectors/source-weatherstack/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-weatherstack/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-weatherstack/integration_tests/catalog.json b/airbyte-integrations/connectors/source-weatherstack/integration_tests/catalog.json new file mode 100644 index 0000000000000..ab05028fab1f8 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/integration_tests/catalog.json @@ -0,0 +1,400 @@ +{ + "streams": [ + { + "stream": { + "name": "current_weather", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "request": { + "type": ["null", "object"], + "properties": { + "type": { + "type": "string" + }, + "query": { + "type": "string" + }, + "language": { + "type": "string" + }, + "unit": { + "type": "string" + } + } + }, + "location": { + "type": ["null", "object"], + "properties": { + "name": { + "type": "string" + }, + "country": { + "type": "string" + }, + "region": { + "type": "string" + }, + "lat": { + "type": "string" + }, + "lon": { + "type": "string" + }, + "timezone_id": { + "type": "string" + }, + "localtime": { + "type": "string" + }, + "localtime_epoch": { + "type": "number" + }, + "utc_offset": { + "type": "string" + } + } + }, + "current": { + "type": ["null", "object"], + "properties": { + "observation_time": { + "type": "string" + }, + "temperature": { + "type": "number" + }, + "weather_code": { + "type": "number" + }, + "weather_icons": { + "type": "array" + }, + "weather_descriptions": { + "type": "array" + }, + "wind_speed": { + "type": "number" + }, + "wind_degree": { + "type": "number" + }, + "wind_dir": { + "type": "string" + }, + "pressure": { + "type": "number" + }, + "precip": { + "type": "number" + }, + "humidity": { + "type": "number" + }, + "cloudcover": { + "type": "number" + }, + "feelslike": { + "type": "number" + }, + "uv_index": { + "type": "number" + }, + "visibility": { + "type": "number" + } + } + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "forecast", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "request": { + "type": ["null", "object"], + "properties": { + "type": { + "type": "string" + }, + "query": { + "type": "string" + }, + "language": { + "type": "string" + }, + "unit": { + "type": "string" + } + } + }, + "location": { + "type": ["null", "object"], + "properties": { + "name": { + "type": "string" + }, + "country": { + "type": "string" + }, + "region": { + "type": "string" + }, + "lat": { + "type": "string" + }, + "lon": { + "type": "string" + }, + "timezone_id": { + "type": "string" + }, + "localtime": { + "type": "string" + }, + "localtime_epoch": { + "type": "number" + }, + "utc_offset": { + "type": "string" + } + } + }, + "current": { + "type": ["null", "object"], + "properties": { + "observation_time": { + "type": "string" + }, + "temperature": { + "type": "number" + }, + "weather_code": { + "type": "number" + }, + "weather_icons": { + "type": "array" + }, + "weather_descriptions": { + "type": "array" + }, + "wind_speed": { + "type": "number" + }, + "wind_degree": { + "type": "number" + }, + "wind_dir": { + "type": "string" + }, + "pressure": { + "type": "number" + }, + "precip": { + "type": "number" + }, + "humidity": { + "type": "number" + }, + "cloudcover": { + "type": "number" + }, + "feelslike": { + "type": "number" + }, + "uv_index": { + "type": "number" + }, + "visibility": { + "type": "number" + } + } + }, + "forecast": { + "type": ["null", "object"] + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "historical", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "request": { + "type": ["null", "object"], + "properties": { + "type": { + "type": "string" + }, + "query": { + "type": "string" + }, + "language": { + "type": "string" + }, + "unit": { + "type": "string" + } + } + }, + "location": { + "type": ["null", "object"], + "properties": { + "name": { + "type": "string" + }, + "country": { + "type": "string" + }, + "region": { + "type": "string" + }, + "lat": { + "type": "string" + }, + "lon": { + "type": "string" + }, + "timezone_id": { + "type": "string" + }, + "localtime": { + "type": "string" + }, + "localtime_epoch": { + "type": "number" + }, + "utc_offset": { + "type": "string" + } + } + }, + "current": { + "type": ["null", "object"], + "properties": { + "observation_time": { + "type": "string" + }, + "temperature": { + "type": "number" + }, + "weather_code": { + "type": "number" + }, + "weather_icons": { + "type": "array" + }, + "weather_descriptions": { + "type": "array" + }, + "wind_speed": { + "type": "number" + }, + "wind_degree": { + "type": "number" + }, + "wind_dir": { + "type": "string" + }, + "pressure": { + "type": "number" + }, + "precip": { + "type": "number" + }, + "humidity": { + "type": "number" + }, + "cloudcover": { + "type": "number" + }, + "feelslike": { + "type": "number" + }, + "uv_index": { + "type": "number" + }, + "visibility": { + "type": "number" + } + } + }, + "historical": { + "type": ["null", "object"] + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "location_lookup", + "json_schema": { + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "request": { + "type": ["null", "object"], + "properties": { + "query": { + "type": "string" + }, + "results": { + "type": "number" + } + } + }, + "results": { + "type": ["null", "array"], + "properties": { + "name": { + "type": "string" + }, + "country": { + "type": "string" + }, + "region": { + "type": "string" + }, + "lon": { + "type": "string" + }, + "lat": { + "type": "string" + }, + "timezone_id": { + "type": "string" + }, + "utc_offset": { + "type": "string" + } + } + } + } + }, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-weatherstack/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-weatherstack/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..e783187d3e78c --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/integration_tests/configured_catalog.json @@ -0,0 +1,40 @@ +{ + "streams": [ + { + "stream": { + "name": "current_weather", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "forecast", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "historical", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "location_lookup", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-weatherstack/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-weatherstack/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..cebdb4b3d5620 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "query": "datto", + "access_key": "abc", + "historical_date": "9999-99-99", + "is_paid_account": 0 +} diff --git a/airbyte-integrations/connectors/source-weatherstack/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-weatherstack/integration_tests/sample_config.json new file mode 100644 index 0000000000000..7c9ae0f5f2a77 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "query": "London", + "access_key": "abc", + "historical_date": "2000-01-01", + "is_paid_account": true +} diff --git a/airbyte-integrations/connectors/source-weatherstack/main.py b/airbyte-integrations/connectors/source-weatherstack/main.py new file mode 100644 index 0000000000000..df41898a18eab --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_weatherstack import SourceWeatherstack + +if __name__ == "__main__": + source = SourceWeatherstack() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-weatherstack/requirements.txt b/airbyte-integrations/connectors/source-weatherstack/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-weatherstack/setup.py b/airbyte-integrations/connectors/source-weatherstack/setup.py new file mode 100644 index 0000000000000..a751044e2679a --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_weatherstack", + description="Source implementation for Weatherstack.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/__init__.py b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/__init__.py new file mode 100644 index 0000000000000..306e9c7b022b9 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceWeatherstack + +__all__ = ["SourceWeatherstack"] diff --git a/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/constants.py b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/constants.py new file mode 100644 index 0000000000000..b5970968f5040 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/constants.py @@ -0,0 +1,5 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +url_base = "http://api.weatherstack.com/" diff --git a/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/current_weather.json b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/current_weather.json new file mode 100644 index 0000000000000..051330b35fd65 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/current_weather.json @@ -0,0 +1,105 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "request": { + "type": ["null", "object"], + "properties": { + "type": { + "type": "string" + }, + "query": { + "type": "string" + }, + "language": { + "type": "string" + }, + "unit": { + "type": "string" + } + } + }, + "location": { + "type": ["null", "object"], + "properties": { + "name": { + "type": "string" + }, + "country": { + "type": "string" + }, + "region": { + "type": "string" + }, + "lat": { + "type": "string" + }, + "lon": { + "type": "string" + }, + "timezone_id": { + "type": "string" + }, + "localtime": { + "type": "string" + }, + "localtime_epoch": { + "type": "number" + }, + "utc_offset": { + "type": "string" + } + } + }, + "current": { + "type": ["null", "object"], + "properties": { + "observation_time": { + "type": "string" + }, + "temperature": { + "type": "number" + }, + "weather_code": { + "type": "number" + }, + "weather_icons": { + "type": "array" + }, + "weather_descriptions": { + "type": "array" + }, + "wind_speed": { + "type": "number" + }, + "wind_degree": { + "type": "number" + }, + "wind_dir": { + "type": "string" + }, + "pressure": { + "type": "number" + }, + "precip": { + "type": "number" + }, + "humidity": { + "type": "number" + }, + "cloudcover": { + "type": "number" + }, + "feelslike": { + "type": "number" + }, + "uv_index": { + "type": "number" + }, + "visibility": { + "type": "number" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/forecast.json b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/forecast.json new file mode 100644 index 0000000000000..fd54728b70f9f --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/forecast.json @@ -0,0 +1,108 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "request": { + "type": ["null", "object"], + "properties": { + "type": { + "type": "string" + }, + "query": { + "type": "string" + }, + "language": { + "type": "string" + }, + "unit": { + "type": "string" + } + } + }, + "location": { + "type": ["null", "object"], + "properties": { + "name": { + "type": "string" + }, + "country": { + "type": "string" + }, + "region": { + "type": "string" + }, + "lat": { + "type": "string" + }, + "lon": { + "type": "string" + }, + "timezone_id": { + "type": "string" + }, + "localtime": { + "type": "string" + }, + "localtime_epoch": { + "type": "number" + }, + "utc_offset": { + "type": "string" + } + } + }, + "current": { + "type": ["null", "object"], + "properties": { + "observation_time": { + "type": "string" + }, + "temperature": { + "type": "number" + }, + "weather_code": { + "type": "number" + }, + "weather_icons": { + "type": "array" + }, + "weather_descriptions": { + "type": "array" + }, + "wind_speed": { + "type": "number" + }, + "wind_degree": { + "type": "number" + }, + "wind_dir": { + "type": "string" + }, + "pressure": { + "type": "number" + }, + "precip": { + "type": "number" + }, + "humidity": { + "type": "number" + }, + "cloudcover": { + "type": "number" + }, + "feelslike": { + "type": "number" + }, + "uv_index": { + "type": "number" + }, + "visibility": { + "type": "number" + } + } + }, + "forecast": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/historical.json b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/historical.json new file mode 100644 index 0000000000000..53bcf944e60e3 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/historical.json @@ -0,0 +1,108 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "request": { + "type": ["null", "object"], + "properties": { + "type": { + "type": "string" + }, + "query": { + "type": "string" + }, + "language": { + "type": "string" + }, + "unit": { + "type": "string" + } + } + }, + "location": { + "type": ["null", "object"], + "properties": { + "name": { + "type": "string" + }, + "country": { + "type": "string" + }, + "region": { + "type": "string" + }, + "lat": { + "type": "string" + }, + "lon": { + "type": "string" + }, + "timezone_id": { + "type": "string" + }, + "localtime": { + "type": "string" + }, + "localtime_epoch": { + "type": "number" + }, + "utc_offset": { + "type": "string" + } + } + }, + "current": { + "type": ["null", "object"], + "properties": { + "observation_time": { + "type": "string" + }, + "temperature": { + "type": "number" + }, + "weather_code": { + "type": "number" + }, + "weather_icons": { + "type": "array" + }, + "weather_descriptions": { + "type": "array" + }, + "wind_speed": { + "type": "number" + }, + "wind_degree": { + "type": "number" + }, + "wind_dir": { + "type": "string" + }, + "pressure": { + "type": "number" + }, + "precip": { + "type": "number" + }, + "humidity": { + "type": "number" + }, + "cloudcover": { + "type": "number" + }, + "feelslike": { + "type": "number" + }, + "uv_index": { + "type": "number" + }, + "visibility": { + "type": "number" + } + } + }, + "historical": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/location_lookup.json b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/location_lookup.json new file mode 100644 index 0000000000000..949c47c479a4b --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/schemas/location_lookup.json @@ -0,0 +1,43 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "request": { + "type": ["null", "object"], + "properties": { + "query": { + "type": "string" + }, + "results": { + "type": "number" + } + } + }, + "results": { + "type": ["null", "array"], + "properties": { + "name": { + "type": "string" + }, + "country": { + "type": "string" + }, + "region": { + "type": "string" + }, + "lon": { + "type": "string" + }, + "lat": { + "type": "string" + }, + "timezone_id": { + "type": "string" + }, + "utc_offset": { + "type": "string" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/source.py b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/source.py new file mode 100644 index 0000000000000..714108ac8bf9b --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/source.py @@ -0,0 +1,334 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.auth import NoAuth + +from .constants import url_base + + +class CurrentWeather(HttpStream): + url_base = "http://api.weatherstack.com/" + + # Set this as a noop. + primary_key = None + + def __init__(self, config: Mapping[str, Any], **kwargs): + super().__init__() + self.query = config["query"] + self.access_key = config["access_key"] + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + # The "/current" path gives us the latest current city weather + return "current" + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + # The api requires that we include api_key as a query param so we do that in this method + return {"access_key": self.access_key, "query": self.query} + + def backoff_time(self, response: requests.Response) -> Optional[float]: + delay_time = response.headers.get("Retry-After") + if delay_time: + return int(delay_time) + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + # The response is a simple JSON whose schema matches our stream's schema exactly, + # so we just return a list containing the response + return [response.json()] + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + # The API does not offer pagination, + # so we return None to indicate there are no more pages in the response + return None + + +class Weatherstack(HttpStream): + url_base = "http://api.weatherstack.com/" + + # Set this as a noop. + primary_key = None + + def __init__(self, config: Mapping[str, Any], **kwargs): + super().__init__() + self.query = config["query"] + self.access_key = config["access_key"] + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + # The "/current" path gives us the latest current city weather + return "current" + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + # The api requires that we include api_key as a query param so we do that in this method + return {"access_key": self.access_key, "query": self.query} + + def backoff_time(self, response: requests.Response) -> Optional[float]: + delay_time = response.headers.get("Retry-After") + if delay_time: + return int(delay_time) + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + # The response is a simple JSON whose schema matches our stream's schema exactly, + # so we just return a list containing the response + return [response.json()] + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + # The API does not offer pagination, + # so we return None to indicate there are no more pages in the response + return None + + +class IncrementalWeatherstack(HttpStream): + url_base = "http://api.weatherstack.com/" + + # Set this as a noop. + primary_key = None + + def __init__(self, config: Mapping[str, Any], **kwargs): + super().__init__() + self.query = config["query"] + self.access_key = config["access_key"] + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + # The "/current" path gives us the latest current city weather + return "current" + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + # The api requires that we include api_key as a query param so we do that in this method + return {"access_key": self.access_key, "query": self.query} + + def backoff_time(self, response: requests.Response) -> Optional[float]: + delay_time = response.headers.get("Retry-After") + if delay_time: + return int(delay_time) + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + # The response is a simple JSON whose schema matches our stream's schema exactly, + # so we just return a list containing the response + return [response.json()] + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + # The API does not offer pagination, + # so we return None to indicate there are no more pages in the response + return None + + +class Forecast(HttpStream): + url_base = "http://api.weatherstack.com/" + + # Set this as a noop. + primary_key = None + + def __init__(self, config: Mapping[str, Any], **kwargs): + super().__init__() + self.query = config["query"] + self.access_key = config["access_key"] + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + # The "/current" path gives us the latest current city weather + return "forecast" + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + # The api requires that we include api_key as a query param so we do that in this method + return {"access_key": self.access_key, "query": self.query} + + def backoff_time(self, response: requests.Response) -> Optional[float]: + delay_time = response.headers.get("Retry-After") + if delay_time: + return int(delay_time) + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + # The response is a simple JSON whose schema matches our stream's schema exactly, + # so we just return a list containing the response + return [response.json()] + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + # The API does not offer pagination, + # so we return None to indicate there are no more pages in the response + return None + + +class Historical(HttpStream): + url_base = "http://api.weatherstack.com/" + + # Set this as a noop. + primary_key = None + + def __init__(self, config: Mapping[str, Any], **kwargs): + super().__init__() + self.query = config["query"] + self.access_key = config["access_key"] + self.historical_date = config["historical_date"] + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "historical" + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + # The api requires that we include api_key as a query param so we do that in this method + return {"access_key": self.access_key, "query": self.query, "historical_date": self.historical_date} + + def backoff_time(self, response: requests.Response) -> Optional[float]: + delay_time = response.headers.get("Retry-After") + if delay_time: + return int(delay_time) + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + # The response is a simple JSON whose schema matches our stream's schema exactly, + # so we just return a list containing the response + return [response.json()] + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + # The API does not offer pagination, + # so we return None to indicate there are no more pages in the response + return None + + +class LocationLookup(HttpStream): + url_base = "http://api.weatherstack.com/" + + # Set this as a noop. + primary_key = None + + def __init__(self, config: Mapping[str, Any], **kwargs): + super().__init__() + self.query = config["query"] + self.access_key = config["access_key"] + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "autocomplete" + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + # The api requires that we include api_key as a query param so we do that in this method + return {"access_key": self.access_key, "query": self.query} + + def backoff_time(self, response: requests.Response) -> Optional[float]: + delay_time = response.headers.get("Retry-After") + if delay_time: + return int(delay_time) + + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + # The response is a simple JSON whose schema matches our stream's schema exactly, + # so we just return a list containing the response + return [response.json()] + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + # The API does not offer pagination, + # so we return None to indicate there are no more pages in the response + return None + + +# Source +class SourceWeatherstack(AbstractSource): + def check_connection(self, logger, config) -> Tuple[bool, any]: + + try: + query = config["query"] + access_key = config["access_key"] + + response = requests.get(f"{url_base}/current?access_key={access_key}&query={query}") + response = response.text + + if response.find('"success": false') != -1: + return False, "Check Query and Access Key" + else: + return True, None + except requests.exceptions.RequestException as e: + return False, repr(e) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + auth = NoAuth() + streams = [ + CurrentWeather(authenticator=auth, config=config), + Forecast(authenticator=auth, config=config), + ] + + # Historical stream is only supported by paid accounts + if config["is_paid_account"] is not False: + streams.append(LocationLookup(authenticator=auth, config=config)) + streams.append(Historical(authenticator=auth, config=config)) + + return streams diff --git a/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/spec.yaml b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/spec.yaml new file mode 100644 index 0000000000000..3a278b05adae2 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/source_weatherstack/spec.yaml @@ -0,0 +1,42 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/weatherstack +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Weatherstack Spec + type: object + required: + - access_key + - query + - historical_date + properties: + is_paid_account: + order: 0 + title: Is Paid Account + description: >- + Toggle if you're using a Paid subscription + type: boolean + default: false + access_key: + order: 1 + type: string + description: API access key used to retrieve data from the Weatherstack API.(https://weatherstack.com/product) + airbyte_secret: true + query: + order: 2 + type: string + description: + A location to query such as city, IP, latitudeLongitude, or zipcode. + Multiple locations with semicolon seperated if using a professional plan or higher. + For more info- (https://weatherstack.com/documentation#query_parameter) + examples: + - New York + - London + - "98101" + historical_date: + order: 3 + type: string + description: This is required for enabling the Historical date API with format- (YYYY-MM-DD). * Note, only supported by paid accounts + examples: + - "2015-01-21" + default": "2000-01-01" + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2}" diff --git a/airbyte-integrations/connectors/source-weatherstack/unit_tests/__init__.py b/airbyte-integrations/connectors/source-weatherstack/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-weatherstack/unit_tests/test_source.py b/airbyte-integrations/connectors/source-weatherstack/unit_tests/test_source.py new file mode 100644 index 0000000000000..fff35d48daa50 --- /dev/null +++ b/airbyte-integrations/connectors/source-weatherstack/unit_tests/test_source.py @@ -0,0 +1,15 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_weatherstack.source import SourceWeatherstack + + +def test_streams(mocker): + source = SourceWeatherstack() + config_mock = MagicMock() + streams = source.streams(config_mock) + expected_streams_number = 4 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/schemas/per-article.json b/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/schemas/per-article.json index 6f1907a4713c6..7d095d498a747 100644 --- a/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/schemas/per-article.json +++ b/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/schemas/per-article.json @@ -25,4 +25,4 @@ } }, "$schema": "http://json-schema.org/schema#" -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/schemas/top.json b/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/schemas/top.json index bb3641384605e..5b9b9b03749e1 100644 --- a/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/schemas/top.json +++ b/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/schemas/top.json @@ -37,4 +37,4 @@ } }, "$schema": "http://json-schema.org/schema#" -} \ No newline at end of file +} diff --git a/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/wikipedia_pageviews.yaml b/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/wikipedia_pageviews.yaml index 03a81cb06bf7e..91e0e9dcd83fd 100755 --- a/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/wikipedia_pageviews.yaml +++ b/airbyte-integrations/connectors/source-wikipedia-pageviews/source_wikipedia_pageviews/wikipedia_pageviews.yaml @@ -3,7 +3,7 @@ version: "0.1.0" definitions: selector: extractor: - field_pointer: [ "items" ] + field_pointer: ["items"] requester: url_base: "https://wikimedia.org/api/rest_v1/metrics/pageviews" http_method: "GET" @@ -12,10 +12,10 @@ definitions: "User-Agent": "AirbyteWikipediaPageviewsConnector/1.0 (https://github.com/airbytehq/airbyte)" top_stream_slicer: type: DatetimeStreamSlicer - start_datetime: + start_datetime: datetime: "{{config.start}}" datetime_format: "%Y%m%d" - end_datetime: + end_datetime: datetime: "{{config.start}}" datetime_format: "%Y%m%d" step: "1d" @@ -73,11 +73,11 @@ definitions: $options: name: "top" -streams: +streams: - "*ref(definitions.per_article_stream)" - "*ref(definitions.top_stream)" check: - stream_names: + stream_names: - "per-article" - "top" diff --git a/airbyte-integrations/connectors/source-workramp/.dockerignore b/airbyte-integrations/connectors/source-workramp/.dockerignore new file mode 100644 index 0000000000000..9491a29588113 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_workramp +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-workramp/Dockerfile b/airbyte-integrations/connectors/source-workramp/Dockerfile new file mode 100644 index 0000000000000..480186d2268f7 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_workramp ./source_workramp + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-workramp diff --git a/airbyte-integrations/connectors/source-workramp/README.md b/airbyte-integrations/connectors/source-workramp/README.md new file mode 100644 index 0000000000000..a8449bb3a7a1d --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/README.md @@ -0,0 +1,79 @@ +# Workramp Source + +This is the repository for the Workramp configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/workramp). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-workramp:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/workramp) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_workramp/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source workramp test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-workramp:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-workramp:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-workramp:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-workramp:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-workramp:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-workramp:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-workramp:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-workramp:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-workramp/__init__.py b/airbyte-integrations/connectors/source-workramp/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-workramp/acceptance-test-config.yml b/airbyte-integrations/connectors/source-workramp/acceptance-test-config.yml new file mode 100644 index 0000000000000..3a53f45f45980 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/acceptance-test-config.yml @@ -0,0 +1,25 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-workramp:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_workramp/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["paths_users"] + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-workramp/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-workramp/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-workramp/build.gradle b/airbyte-integrations/connectors/source-workramp/build.gradle new file mode 100644 index 0000000000000..6e0241d036777 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_workramp' +} diff --git a/airbyte-integrations/connectors/source-workramp/integration_tests/__init__.py b/airbyte-integrations/connectors/source-workramp/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-workramp/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-workramp/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-workramp/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-workramp/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..18f9d4fbe4040 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/integration_tests/configured_catalog.json @@ -0,0 +1,58 @@ +{ + "streams": [ + { + "stream": { + "name": "awarded_certifications", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "certifications", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "paths_users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "registrations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "trainings", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "users", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-workramp/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-workramp/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..c6b359c0daf89 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "", + "academy_id": "" +} diff --git a/airbyte-integrations/connectors/source-workramp/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-workramp/integration_tests/sample_config.json new file mode 100644 index 0000000000000..045b8353527dd --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "", + "academy_id": "" +} diff --git a/airbyte-integrations/connectors/source-workramp/integration_tests/simple_catalog.json b/airbyte-integrations/connectors/source-workramp/integration_tests/simple_catalog.json new file mode 100644 index 0000000000000..d2348f3efcdbb --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/integration_tests/simple_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "registrations", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-workramp/main.py b/airbyte-integrations/connectors/source-workramp/main.py new file mode 100644 index 0000000000000..37eec5276a796 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_workramp import SourceWorkramp + +if __name__ == "__main__": + source = SourceWorkramp() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-workramp/requirements.txt b/airbyte-integrations/connectors/source-workramp/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-workramp/setup.py b/airbyte-integrations/connectors/source-workramp/setup.py new file mode 100644 index 0000000000000..518d31655d278 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_workramp", + description="Source implementation for Workramp.", + author="Elliot Trabac", + author_email="elliot.trabac1@gmail.com", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/__init__.py b/airbyte-integrations/connectors/source-workramp/source_workramp/__init__.py new file mode 100644 index 0000000000000..6f29f8fb92c3d --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceWorkramp + +__all__ = ["SourceWorkramp"] diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/awarded_certifications.json b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/awarded_certifications.json new file mode 100644 index 0000000000000..be57887230a5b --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/awarded_certifications.json @@ -0,0 +1,27 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "user_id": { + "type": ["null", "string"] + }, + "user_name": { + "type": ["null", "string"] + }, + "user_email": { + "type": ["null", "string"] + }, + "awarded_at": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/certifications.json b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/certifications.json new file mode 100644 index 0000000000000..0dd9a03c78d40 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/certifications.json @@ -0,0 +1,25 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "description": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"], + "format": "date-time" + }, + "awarded_by_name": { + "type": ["null", "string"] + }, + "awarded_by_title": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/paths_users.json b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/paths_users.json new file mode 100644 index 0000000000000..28ce586990ec4 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/paths_users.json @@ -0,0 +1,9 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/registrations.json b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/registrations.json new file mode 100644 index 0000000000000..2395749ab7702 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/registrations.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "completed_at": { + "type": ["null", "number"] + }, + "is_completed": { + "type": ["null", "boolean"] + }, + "completion_percentage": { + "type": ["null", "number"] + }, + "score": { + "type": ["null", "number"] + }, + "due_at": { + "type": ["null", "string"] + }, + "due_date": { + "type": ["null", "string"] + }, + "pass_status": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "number"] + }, + "user": { + "type": ["null", "object"] + }, + "content": { + "type": ["null", "object"] + } + } +} diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/trainings.json b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/trainings.json new file mode 100644 index 0000000000000..2fda1fa07af2b --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/trainings.json @@ -0,0 +1,18 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "type": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/users.json b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/users.json new file mode 100644 index 0000000000000..a323620d94d65 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/schemas/users.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "email": { + "type": ["null", "string"] + }, + "segments": { + "type": ["null", "array"] + }, + "display_name": { + "type": ["null", "string"] + }, + "first_name": { + "type": ["null", "string"] + }, + "last_name": { + "type": ["null", "string"] + }, + "created_at": { + "type": ["null", "string"] + }, + "custom_registration_field_values": { + "type": ["null", "array"] + } + } +} diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/source.py b/airbyte-integrations/connectors/source-workramp/source_workramp/source.py new file mode 100644 index 0000000000000..999cd736e1b12 --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceWorkramp(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "workramp.yaml"}) diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/spec.yaml b/airbyte-integrations/connectors/source-workramp/source_workramp/spec.yaml new file mode 100644 index 0000000000000..30ced9b01086e --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/spec.yaml @@ -0,0 +1,19 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/workramp +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Workramp Spec + type: object + required: + - api_key + - academy_id + additionalProperties: true + properties: + api_key: + type: string + title: API Token + description: The API Token for Workramp + airbyte_secret: true + academy_id: + type: string + title: Academy ID + description: The id of the Academy diff --git a/airbyte-integrations/connectors/source-workramp/source_workramp/workramp.yaml b/airbyte-integrations/connectors/source-workramp/source_workramp/workramp.yaml new file mode 100644 index 0000000000000..a3d130694786b --- /dev/null +++ b/airbyte-integrations/connectors/source-workramp/source_workramp/workramp.yaml @@ -0,0 +1,83 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: ["data", "{{ options.name }}"] + requester: + url_base: "https://app.workramp.com/api/v1/academies/{{ config['academy_id'] }}" + http_method: "GET" + authenticator: + type: BearerAuthenticator + api_token: "{{ config['api_key'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: "DefaultPaginator" + page_size_option: + inject_into: "request_parameter" + field_name: "per_page" + pagination_strategy: + type: "PageIncrement" + page_size: 100 + page_token_option: + inject_into: "request_parameter" + field_name: "page" + url_base: + $ref: "*ref(definitions.requester.url_base)" + requester: + $ref: "*ref(definitions.requester)" + + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + + awarded_certifications_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "awarded_certifications" + primary_key: "id" + path: "/awarded_certifications" + certifications_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "certifications" + primary_key: "id" + path: "/certifications" + paths_users_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "paths_users" + primary_key: "id" + path: "/paths_users" + registrations_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "registrations" + primary_key: "id" + path: "/registrations" + trainings_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "trainings" + primary_key: "id" + path: "/trainings" + users_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "users" + primary_key: "id" + path: "/users" + +streams: + - "*ref(definitions.awarded_certifications_stream)" + - "*ref(definitions.certifications_stream)" + - "*ref(definitions.paths_users_stream)" + - "*ref(definitions.registrations_stream)" + - "*ref(definitions.trainings_stream)" + - "*ref(definitions.users_stream)" + +check: + stream_names: + - "users" diff --git a/airbyte-integrations/connectors/source-xero/.dockerignore b/airbyte-integrations/connectors/source-xero/.dockerignore new file mode 100644 index 0000000000000..190409539bdc9 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_xero +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-xero/Dockerfile b/airbyte-integrations/connectors/source-xero/Dockerfile new file mode 100644 index 0000000000000..b9bb395fd1501 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_xero ./source_xero + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-xero diff --git a/airbyte-integrations/connectors/source-xero/README.md b/airbyte-integrations/connectors/source-xero/README.md new file mode 100644 index 0000000000000..464447527fcf3 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/README.md @@ -0,0 +1,132 @@ +# Xero Source + +This is the repository for the Xero source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/xero). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-xero:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/xero) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_xero/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source xero test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-xero:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-xero:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-xero:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-xero:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-xero:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-xero:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-xero:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-xero:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-xero/acceptance-test-config.yml b/airbyte-integrations/connectors/source-xero/acceptance-test-config.yml new file mode 100644 index 0000000000000..561ba6e64d519 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/acceptance-test-config.yml @@ -0,0 +1,24 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-xero:dev +tests: + spec: + - spec_path: "source_xero/spec.yaml" + connection: + - config_path: "integration_tests/sample_config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "integration_tests/sample_config.json" + basic_read: + - config_path: "integration_tests/sample_config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: ["manual_journals", "credit_notes", "overpayments", "bank_transfers", "purchase_orders", "prepayments"] + incremental: + - config_path: "integration_tests/sample_config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "integration_tests/sample_config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-xero/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-xero/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-xero/build.gradle b/airbyte-integrations/connectors/source-xero/build.gradle new file mode 100644 index 0000000000000..53f3682cb2d2e --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_xero' +} diff --git a/airbyte-integrations/connectors/source-xero/integration_tests/__init__.py b/airbyte-integrations/connectors/source-xero/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-xero/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-xero/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..bc92812067cbe --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/integration_tests/abnormal_state.json @@ -0,0 +1,44 @@ +{ + "bank_transactions": { + "date": "2033-10-28T11:08:03+00:00" + }, + "contacts": { + "date": "2033-10-30T10:47:59+00:00" + }, + "credit_notes": { + "date": "2033-10-28T11:06:20+00:00" + }, + "invoices": { + "date": "2033-10-28T16:36:42+00:00" + }, + "manual_journals": { + "date": "2033-10-28T12:08:59+00:00" + }, + "overpayments": { + "date": "2033-10-28T12:30:01+00:00" + }, + "prepayments": { + "date": "2033-10-18T11:51:45+00:00" + }, + "purchase_orders": { + "date": "2033-10-18T11:51:45+00:00" + }, + "accounts": { + "date": "2033-10-18T11:51:45+00:00" + }, + "bank_transfers": { + "date": "2033-10-28T07:28:42+00:00" + }, + "employees": { + "date": "2033-10-18T11:51:45+00:00" + }, + "items": { + "date": "2033-10-18T11:51:45+00:00" + }, + "payments": { + "date": "2033-10-28T16:36:42+00:00" + }, + "users": { + "date": "2033-08-30T09:53:59+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-xero/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-xero/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-xero/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-xero/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..d9858746b1fb1 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/integration_tests/configured_catalog.json @@ -0,0 +1,16646 @@ +{ + "streams": [ + { + "stream": { + "name": "bank_transactions", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "BankAccount": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "DateString": { + "type": ["null", "string"], + "format": "date-time" + }, + "Reference": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Url": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "BankTransactionID": { + "type": ["string"] + }, + "PrepaymentID": { + "type": ["null", "string"] + }, + "OverpaymentID": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ExternalLinkProviderName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["BankTransactionID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "contacts", + "json_schema": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["ContactID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "credit_notes", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "CreditNoteID": { + "type": ["string"] + }, + "CreditNoteNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["CreditNoteID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "invoices", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "ExpectedPaymentDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Payments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "PaymentType": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Account": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "CreditNote": { + "type": ["null", "object"], + "properties": { + "CreditNoteNumber": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PrepaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "Overpayment": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "OverpaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "BankAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "PaymentID": { + "type": ["string"] + }, + "HasAccount": { + "type": ["null", "boolean"] + }, + "BatchPaymentID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "CreditNotes": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "CreditNoteID": { + "type": ["string"] + }, + "CreditNoteNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "PrepaymentID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + } + }, + "Payments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "PaymentType": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Account": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "CreditNote": { + "type": ["null", "object"], + "properties": { + "CreditNoteNumber": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PrepaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "Overpayment": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "OverpaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "BankAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "PaymentID": { + "type": ["string"] + }, + "HasAccount": { + "type": ["null", "boolean"] + }, + "BatchPaymentID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "Overpayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Status": { + "type": ["null", "string"] + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "OverpaymentID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + } + }, + "Payments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "PaymentType": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Account": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "CreditNote": { + "type": ["null", "object"], + "properties": { + "CreditNoteNumber": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PrepaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "Overpayment": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "OverpaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "BankAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "PaymentID": { + "type": ["string"] + }, + "HasAccount": { + "type": ["null", "boolean"] + }, + "BatchPaymentID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "Reference": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ID": { + "type": ["null", "string"] + }, + "DateString": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true + } + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["InvoiceID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "manual_journals", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "Narration": { + "type": ["null", "string"] + }, + "JournalLines": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Description": { + "type": ["null", "string"] + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountID": { + "type": ["null", "string"] + }, + "AccountCode": { + "type": ["null", "string"] + }, + "IsBlank": { + "type": ["null", "boolean"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "Url": { + "type": ["null", "string"] + }, + "ShowOnCashBasisReports": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ManualJournalID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["ManualJournalID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "overpayments", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Status": { + "type": ["null", "string"] + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "OverpaymentID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + } + }, + "Payments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "PaymentType": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Account": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "CreditNote": { + "type": ["null", "object"], + "properties": { + "CreditNoteNumber": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PrepaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "Overpayment": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "OverpaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "BankAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "PaymentID": { + "type": ["string"] + }, + "HasAccount": { + "type": ["null", "boolean"] + }, + "BatchPaymentID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "Reference": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ID": { + "type": ["null", "string"] + }, + "DateString": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["OverpaymentID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "prepayments", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "PrepaymentID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + } + }, + "Payments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "PaymentType": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Account": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "CreditNote": { + "type": ["null", "object"], + "properties": { + "CreditNoteNumber": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PrepaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "Overpayment": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "OverpaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "BankAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "PaymentID": { + "type": ["string"] + }, + "HasAccount": { + "type": ["null", "boolean"] + }, + "BatchPaymentID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["PrepaymentID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "purchase_orders", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DeliveryDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "PurchaseOrderNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "DeliveryAddress": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "Telephone": { + "type": ["null", "string"] + }, + "DeliveryInstructions": { + "type": ["null", "string"] + }, + "ExpectedArrivalDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "PurchaseOrderID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "DeliveryDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "ExpectedArrivalDateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["PurchaseOrderID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "accounts", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["AccountID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "bank_transfers", + "json_schema": { + "type": ["null", "object"], + "properties": { + "FromBankAccount": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "ToBankAccount": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "DateString": { + "type": ["null", "string"], + "format": "date-time" + }, + "BankTransferID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FromBankTransactionID": { + "type": ["null", "string"] + }, + "ToBankTransactionID": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "CreatedDateUTCString": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["CreatedDateUTC"], + "source_defined_primary_key": [["BankTransferID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "employees", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "ExternalLink": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "EmployeeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["EmployeeID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "items", + "json_schema": { + "type": ["null", "object"], + "properties": { + "ItemID": { + "type": ["string"] + }, + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "IsSold": { + "type": ["null", "boolean"] + }, + "IsPurchased": { + "type": ["null", "boolean"] + }, + "Description": { + "type": ["null", "string"] + }, + "PurchaseDescription": { + "type": ["null", "string"] + }, + "PurchaseDetails": { + "properties": { + "TaxType": { + "type": ["null", "string"] + }, + "COGSAccountCode": { + "type": ["null", "string"] + }, + "UnitPrice": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"], + "additionalProperties": true + }, + "SalesDetails": { + "properties": { + "TaxType": { + "type": ["null", "string"] + }, + "UnitPrice": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"], + "additionalProperties": true + }, + "IsTrackedAsInventory": { + "type": ["null", "boolean"] + }, + "InventoryAssetAccountCode": { + "type": ["null", "string"] + }, + "TotalCostPool": { + "type": ["null", "number"] + }, + "QuantityOnHand": { + "type": ["null", "number"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["ItemID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "payments", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "PaymentType": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Account": { + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "Invoice": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "CreditNote": { + "type": ["null", "object"], + "properties": { + "CreditNoteNumber": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PrepaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "Overpayment": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "OverpaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "BankAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "PaymentID": { + "type": ["string"] + }, + "HasAccount": { + "type": ["null", "boolean"] + }, + "BatchPaymentID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["PaymentID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "users", + "json_schema": { + "type": ["null", "object"], + "properties": { + "UserID": { + "type": ["string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsSubscriber": { + "type": ["null", "boolean"] + }, + "OrganisationRole": { + "type": ["null", "string"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh", "incremental"], + "source_defined_cursor": true, + "default_cursor_field": ["UpdatedDateUTC"], + "source_defined_primary_key": [["UserID"]] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "branding_themes", + "json_schema": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["BrandingThemeID"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "contact_groups", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["ContactGroupID"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "currencies", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Code": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["Code"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "organisations", + "json_schema": { + "type": ["null", "object"], + "properties": { + "APIKey": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "LegalName": { + "type": ["null", "string"] + }, + "PaysTax": { + "type": ["null", "boolean"] + }, + "Version": { + "type": ["null", "string"] + }, + "OrganisationType": { + "type": ["null", "string"] + }, + "BaseCurrency": { + "type": ["null", "string"] + }, + "CountryCode": { + "type": ["null", "string"] + }, + "IsDemoCompany": { + "type": ["null", "boolean"] + }, + "OrganisationStatus": { + "type": ["null", "string"] + }, + "RegistrationNumber": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "FinancialYearEndDay": { + "type": ["null", "integer"] + }, + "FinancialYearEndMonth": { + "type": ["null", "integer"] + }, + "SalesTaxBasis": { + "type": ["null", "string"] + }, + "SalesTaxPeriod": { + "type": ["null", "string"] + }, + "DefaultSalesTax": { + "type": ["null", "string"] + }, + "DefaultPurchasesTax": { + "type": ["null", "string"] + }, + "PeriodLockDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "EndOfYearLockDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Timezone": { + "type": ["null", "string"] + }, + "OrganisationEntityType": { + "type": ["null", "string"] + }, + "ShortCode": { + "type": ["null", "string"] + }, + "OrganisationID": { + "type": ["string"] + }, + "LineOfBusiness": { + "type": ["null", "string"] + }, + "Addresses": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "Phones": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "ExternalLinks": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "LinkType": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["OrganisationID"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "repeating_invoices", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "ContactGroups": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + } + }, + "additionalProperties": true + }, + "Schedule": { + "type": ["null", "object"], + "properties": { + "Unit": { + "type": ["null", "string"] + }, + "DueDateType": { + "type": ["null", "string"] + }, + "StartDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "DueDate": { + "type": ["null", "integer"] + }, + "EndDate": { + "type": ["null", "string"] + }, + "NextScheduledDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "Period": { + "type": ["null", "integer"] + } + }, + "additionalProperties": true + }, + "LineItems": { + "items": { + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RepeatingInvoiceID": { + "type": ["string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["RepeatingInvoiceID"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "tax_rates", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Name": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["string"] + }, + "TaxComponents": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Name": { + "type": ["null", "string"] + }, + "IsCompound": { + "type": ["null", "boolean"] + }, + "IsNonRecoverable": { + "type": ["null", "boolean"] + }, + "Rate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + } + }, + "additionalProperties": true + } + }, + "Status": { + "type": ["null", "string"] + }, + "ReportTaxType": { + "type": ["null", "string"] + }, + "CanApplyToAssets": { + "type": ["null", "boolean"] + }, + "CanApplyToEquity": { + "type": ["null", "boolean"] + }, + "CanApplyToExpenses": { + "type": ["null", "boolean"] + }, + "CanApplyToLiabilities": { + "type": ["null", "boolean"] + }, + "CanApplyToRevenue": { + "type": ["null", "boolean"] + }, + "DisplayTaxRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "EffectiveRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["Name"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "tracking_categories", + "json_schema": { + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "supported_sync_modes": ["full_refresh"], + "source_defined_primary_key": [["TrackingCategoryID"]] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-xero/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-xero/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..a575a5624d089 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/integration_tests/invalid_config.json @@ -0,0 +1,10 @@ +{ + "client_id": "client_id", + "client_secret": "client_secret", + "tenant_id": "tenant_id", + "scopes": "scope1, scope2", + "authentication": { + "auth_type": null + }, + "start_date": "2020-01-01T00:00:00Z" +} diff --git a/airbyte-integrations/connectors/source-xero/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-xero/integration_tests/sample_config.json new file mode 100644 index 0000000000000..f9edb2855001a --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/integration_tests/sample_config.json @@ -0,0 +1,10 @@ +{ + "client_id": "389D15E4EE7D4EC09F91F2926EDE5B9B", + "client_secret": "rumSF2vR4wGY-bhNOTqo1lttMgi0h_XobxyrKYBzrRUxj9nR", + "tenant_id": "22b6dfc8-a47d-4e3d-a4ab-a30f22a5681d", + "scopes": "accounting.attachments accounting.contacts accounting.transactions assets.read accounting.attachments.read accounting.contacts.read accounting.settings accounting.settings.read accounting.journals.read accounting.budgets.read accounting.reports.tenninetynine.read accounting.reports.read assets accounting.transactions.read", + "authentication": { + "auth_type": "custom_connection" + }, + "start_date": "2021-01-01T00:00:00Z" +} diff --git a/airbyte-integrations/connectors/source-xero/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-xero/integration_tests/sample_state.json new file mode 100644 index 0000000000000..5272dc96ea38a --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/integration_tests/sample_state.json @@ -0,0 +1,36 @@ +{ + "bank_transactions": { + "date": "2021-10-28T11:08:03+00:00" + }, + "contacts": { + "date": "2021-10-30T10:47:59+00:00" + }, + "credit_notes": { + "date": "2021-10-28T11:06:20+00:00" + }, + "invoices": { + "date": "2021-10-28T16:36:42+00:00" + }, + "manual_journals": { + "date": "2021-10-28T12:08:59+00:00" + }, + "overpayments": { + "date": "2021-10-29T12:30:01+00:00" + }, + "prepayments": {}, + "purchase_orders": {}, + "accounts": { + "date": "2021-10-18T11:51:45+00:00" + }, + "bank_transfers": { + "date": "2021-10-28T07:28:42+00:00" + }, + "employees": {}, + "items": {}, + "payments": { + "date": "2021-10-28T16:36:42+00:00" + }, + "users": { + "date": "2021-08-30T09:53:59+00:00" + } +} diff --git a/airbyte-integrations/connectors/source-xero/main.py b/airbyte-integrations/connectors/source-xero/main.py new file mode 100644 index 0000000000000..621db168b0f05 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_xero import SourceXero + +if __name__ == "__main__": + source = SourceXero() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-xero/requirements.txt b/airbyte-integrations/connectors/source-xero/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-xero/setup.py b/airbyte-integrations/connectors/source-xero/setup.py new file mode 100644 index 0000000000000..b1169baf208ea --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_xero", + description="Source implementation for Xero.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-xero/source_xero/__init__.py b/airbyte-integrations/connectors/source-xero/source_xero/__init__.py new file mode 100644 index 0000000000000..0acbe42f5ac6f --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceXero + +__all__ = ["SourceXero"] diff --git a/airbyte-integrations/connectors/source-xero/source_xero/bootstrap.md b/airbyte-integrations/connectors/source-xero/source_xero/bootstrap.md new file mode 100644 index 0000000000000..b5cf4bcf4a2c1 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/bootstrap.md @@ -0,0 +1,7 @@ +# Xero + +The Xero source connector interacts with [Xero Accounting API](https://developer.xero.com/documentation/api/accounting/overview), which provides all accounting data such as invoices, contacts, bank transactions etc. + +Unfortunately, it requires [Xero Custom Connections](https://developer.xero.com/documentation/guides/oauth2/custom-connections/) subscription to work as default Xero OAuth2 Authentication supports only short-lived access_tokens with frequently updated refresh_tokens. + +For testing and development purposes you can use [Xero demo company](https://developer.xero.com/documentation/development-accounts/#accessing-the-xero-demo-company) with Xero Custom Connections free of charge. diff --git a/airbyte-integrations/connectors/source-xero/source_xero/oauth.py b/airbyte-integrations/connectors/source-xero/source_xero/oauth.py new file mode 100644 index 0000000000000..32444ded4418d --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/oauth.py @@ -0,0 +1,75 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import base64 +import logging +from typing import Any, List, Mapping, MutableMapping, Tuple + +import pendulum +import requests +from airbyte_cdk.sources.streams.http.requests_native_auth import Oauth2Authenticator + +logger = logging.getLogger("airbyte") + + +class XeroCustomConnectionsOauth2Authenticator(Oauth2Authenticator): + """ + Generates OAuth2.0 access tokens from an OAuth2.0 refresh token and client credentials. + The generated access token is attached to each request via the Authorization header. + """ + + def __init__( + self, + token_refresh_endpoint: str, + client_id: str, + client_secret: str, + scopes: List[str] = None, + token_expiry_date: pendulum.DateTime = None, + access_token_name: str = "access_token", + expires_in_name: str = "expires_in", + ): + self.token_refresh_endpoint = token_refresh_endpoint + self.client_secret = client_secret + self.client_id = client_id + self.scopes = scopes + self.access_token_name = access_token_name + self.expires_in_name = expires_in_name + + self._token_expiry_date = token_expiry_date or pendulum.now().subtract(days=1) + self._access_token = None + + def __call__(self, request): + request.headers.update(self.get_auth_header()) + return request + + def get_auth_header(self) -> Mapping[str, Any]: + return {"Authorization": f"Bearer {self.get_access_token()}"} + + def token_has_expired(self) -> bool: + return pendulum.now() > self._token_expiry_date + + def get_refresh_request_body(self) -> Mapping[str, Any]: + payload: MutableMapping[str, Any] = { + "grant_type": "client_credentials", + } + + if self.scopes: + payload["scopes"] = self.scopes + + return payload + + def get_refresh_request_headers(self) -> Mapping[str, Any]: + headers: MutableMapping[str, Any] = { + "Authorization": "Basic " + str(base64.b64encode(bytes(self.client_id + ":" + self.client_secret, "utf-8")), "utf-8") + } + + return headers + + def refresh_access_token(self) -> Tuple[str, int]: + response = requests.request( + method="POST", url=self.token_refresh_endpoint, data=self.get_refresh_request_body(), headers=self.get_refresh_request_headers() + ) + response.raise_for_status() + response_json = response.json() + return response_json[self.access_token_name], response_json[self.expires_in_name] diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/accounts.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/accounts.json new file mode 100644 index 0000000000000..489e12a4f1775 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/accounts.json @@ -0,0 +1,61 @@ +{ + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/addresses.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/addresses.json new file mode 100644 index 0000000000000..8562d43038bbd --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/addresses.json @@ -0,0 +1,36 @@ +{ + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/allocations.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/allocations.json new file mode 100644 index 0000000000000..c64fbd72fa6da --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/allocations.json @@ -0,0 +1,18 @@ +{ + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Invoice": { + "$ref": "nested_invoice.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/attachments.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/attachments.json new file mode 100644 index 0000000000000..76395ac27ec79 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/attachments.json @@ -0,0 +1,8 @@ +{ + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/bank_transactions.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/bank_transactions.json new file mode 100644 index 0000000000000..1fe6783b4fb77 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/bank_transactions.json @@ -0,0 +1,86 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "$ref": "line_items.json" + } + }, + "BankAccount": { + "$ref": "accounts.json" + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "DateString": { + "type": ["null", "string"], + "format": "date-time" + }, + "Reference": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Url": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "BankTransactionID": { + "type": ["string"] + }, + "PrepaymentID": { + "type": ["null", "string"] + }, + "OverpaymentID": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ExternalLinkProviderName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/bank_transfers.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/bank_transfers.json new file mode 100644 index 0000000000000..96f5bbd2de03d --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/bank_transfers.json @@ -0,0 +1,50 @@ +{ + "type": ["null", "object"], + "properties": { + "FromBankAccount": { + "$ref": "accounts.json" + }, + "ToBankAccount": { + "$ref": "accounts.json" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "DateString": { + "type": ["null", "string"], + "format": "date-time" + }, + "BankTransferID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FromBankTransactionID": { + "type": ["null", "string"] + }, + "ToBankTransactionID": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "CreatedDateUTCString": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/branding_themes.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/branding_themes.json new file mode 100644 index 0000000000000..2f050d55ded29 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/branding_themes.json @@ -0,0 +1,19 @@ +{ + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/contact_groups.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/contact_groups.json new file mode 100644 index 0000000000000..44e93c435b3c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/contact_groups.json @@ -0,0 +1,18 @@ +{ + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/contacts.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/contacts.json new file mode 100644 index 0000000000000..07fddcf746077 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/contacts.json @@ -0,0 +1,199 @@ +{ + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "$ref": "addresses.json" + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "$ref": "phones.json" + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "$ref": "tracking_categories.json" + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "$ref": "tracking_categories.json" + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "$ref": "payment_terms.json" + }, + "ContactGroups": { + "items": { + "$ref": "contact_groups.json" + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "$ref": "branding_themes.json" + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "$ref": "attachments.json" + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "$ref": "validation_errors.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/credit_notes.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/credit_notes.json new file mode 100644 index 0000000000000..b1a28bc7b43d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/credit_notes.json @@ -0,0 +1,108 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "$ref": "line_items.json" + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "CreditNoteID": { + "type": ["string"] + }, + "CreditNoteNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "items": { + "$ref": "allocations.json" + }, + "type": ["null", "array"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/currencies.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/currencies.json new file mode 100644 index 0000000000000..a72704a7bbc5e --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/currencies.json @@ -0,0 +1,12 @@ +{ + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Code": { + "type": ["string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/customers.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/customers.json new file mode 100644 index 0000000000000..9a4b134858363 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/customers.json @@ -0,0 +1,16 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "id": { + "type": ["null", "string"] + }, + "name": { + "type": ["null", "string"] + }, + "signup_date": { + "type": ["null", "string"], + "format": "date-time" + } + } +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/employees.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/employees.json new file mode 100644 index 0000000000000..4716d1b42767c --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/employees.json @@ -0,0 +1,25 @@ +{ + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "ExternalLink": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "EmployeeID": { + "type": ["string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/expense_claims.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/expense_claims.json new file mode 100644 index 0000000000000..94fd3506444fe --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/expense_claims.json @@ -0,0 +1,54 @@ +{ + "type": ["null", "object"], + "properties": { + "User": { + "$ref": "users.json" + }, + "Receipts": { + "type": ["null", "array"], + "items": { + "$ref": "receipts.json" + } + }, + "ExpenseClaimID": { + "type": ["string"] + }, + "Payments": { + "type": ["null", "array"], + "items": { + "$ref": "payments.json" + } + }, + "Status": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "PaymentDueDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingDate": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/invoices.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/invoices.json new file mode 100644 index 0000000000000..6b7f06eb63b72 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/invoices.json @@ -0,0 +1,158 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "$ref": "line_items.json" + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "ExpectedPaymentDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Payments": { + "type": ["null", "array"], + "items": { + "$ref": "payments.json" + } + }, + "CreditNotes": { + "type": ["null", "array"], + "items": { + "$ref": "credit_notes.json" + } + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "$ref": "prepayments.json" + } + }, + "Overpayments": { + "type": ["null", "array"], + "items": { + "$ref": "overpayments.json" + } + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/items.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/items.json new file mode 100644 index 0000000000000..a7b31c6f1bf93 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/items.json @@ -0,0 +1,80 @@ +{ + "type": ["null", "object"], + "properties": { + "ItemID": { + "type": ["string"] + }, + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "IsSold": { + "type": ["null", "boolean"] + }, + "IsPurchased": { + "type": ["null", "boolean"] + }, + "Description": { + "type": ["null", "string"] + }, + "PurchaseDescription": { + "type": ["null", "string"] + }, + "PurchaseDetails": { + "properties": { + "TaxType": { + "type": ["null", "string"] + }, + "COGSAccountCode": { + "type": ["null", "string"] + }, + "UnitPrice": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"], + "additionalProperties": true + }, + "SalesDetails": { + "properties": { + "TaxType": { + "type": ["null", "string"] + }, + "UnitPrice": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + } + }, + "type": ["null", "object"], + "additionalProperties": true + }, + "IsTrackedAsInventory": { + "type": ["null", "boolean"] + }, + "InventoryAssetAccountCode": { + "type": ["null", "string"] + }, + "TotalCostPool": { + "type": ["null", "number"] + }, + "QuantityOnHand": { + "type": ["null", "number"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/journals.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/journals.json new file mode 100644 index 0000000000000..d0dcb0a18848c --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/journals.json @@ -0,0 +1,77 @@ +{ + "type": ["null", "object"], + "properties": { + "JournalID": { + "type": ["string"] + }, + "JournalDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "JournalNumber": { + "type": ["null", "integer"] + }, + "CreatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "SourceID": { + "type": ["null", "string"] + }, + "SourceType": { + "type": ["null", "string"] + }, + "JournalLines": { + "items": { + "properties": { + "JournalLineID": { + "type": ["null", "string"] + }, + "AccountType": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["null", "string"] + }, + "AccountCode": { + "type": ["null", "string"] + }, + "TaxName": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "GrossAmount": { + "type": ["null", "number"] + }, + "NetAmount": { + "type": ["null", "number"] + }, + "AccountName": { + "type": ["null", "string"] + }, + "TaxAmount": { + "type": ["null", "number"] + }, + "TrackingCategories": { + "items": { + "$ref": "tracking_categories.json" + }, + "type": ["null", "array"] + } + }, + "type": ["null", "object"], + "additionalProperties": true + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/line_items.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/line_items.json new file mode 100644 index 0000000000000..fcee6da109f24 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/line_items.json @@ -0,0 +1,52 @@ +{ + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "$ref": "tracking_categories.json" + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/linked_transactions.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/linked_transactions.json new file mode 100644 index 0000000000000..2dd0d5cccc675 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/linked_transactions.json @@ -0,0 +1,37 @@ +{ + "type": ["null", "object"], + "properties": { + "LinkedTransactionID": { + "type": ["string"] + }, + "Status": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "SourceTransactionID": { + "type": ["null", "string"] + }, + "SourceLineItemID": { + "type": ["null", "string"] + }, + "SourceTransactionTypeCode": { + "type": ["null", "string"] + }, + "ContactID": { + "type": ["null", "string"] + }, + "TargetTransactionID": { + "type": ["null", "string"] + }, + "TargetLineItemID": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/manual_journals.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/manual_journals.json new file mode 100644 index 0000000000000..29379acb904ac --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/manual_journals.json @@ -0,0 +1,75 @@ +{ + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "Narration": { + "type": ["null", "string"] + }, + "JournalLines": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Description": { + "type": ["null", "string"] + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountID": { + "type": ["null", "string"] + }, + "AccountCode": { + "type": ["null", "string"] + }, + "IsBlank": { + "type": ["null", "boolean"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Tracking": { + "items": { + "$ref": "tracking_categories.json" + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true + } + }, + "Url": { + "type": ["null", "string"] + }, + "ShowOnCashBasisReports": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ManualJournalID": { + "type": ["string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/nested_invoice.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/nested_invoice.json new file mode 100644 index 0000000000000..49ec64344d33e --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/nested_invoice.json @@ -0,0 +1,126 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "$ref": "line_items.json" + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/organisations.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/organisations.json new file mode 100644 index 0000000000000..40f21bc2504a7 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/organisations.json @@ -0,0 +1,117 @@ +{ + "type": ["null", "object"], + "properties": { + "APIKey": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "LegalName": { + "type": ["null", "string"] + }, + "PaysTax": { + "type": ["null", "boolean"] + }, + "Version": { + "type": ["null", "string"] + }, + "OrganisationType": { + "type": ["null", "string"] + }, + "BaseCurrency": { + "type": ["null", "string"] + }, + "CountryCode": { + "type": ["null", "string"] + }, + "IsDemoCompany": { + "type": ["null", "boolean"] + }, + "OrganisationStatus": { + "type": ["null", "string"] + }, + "RegistrationNumber": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "FinancialYearEndDay": { + "type": ["null", "integer"] + }, + "FinancialYearEndMonth": { + "type": ["null", "integer"] + }, + "SalesTaxBasis": { + "type": ["null", "string"] + }, + "SalesTaxPeriod": { + "type": ["null", "string"] + }, + "DefaultSalesTax": { + "type": ["null", "string"] + }, + "DefaultPurchasesTax": { + "type": ["null", "string"] + }, + "PeriodLockDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "EndOfYearLockDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Timezone": { + "type": ["null", "string"] + }, + "OrganisationEntityType": { + "type": ["null", "string"] + }, + "ShortCode": { + "type": ["null", "string"] + }, + "OrganisationID": { + "type": ["string"] + }, + "LineOfBusiness": { + "type": ["null", "string"] + }, + "Addresses": { + "type": ["null", "array"], + "items": { + "$ref": "addresses.json" + } + }, + "Phones": { + "type": ["null", "array"], + "items": { + "$ref": "phones.json" + } + }, + "ExternalLinks": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "LinkType": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "PaymentTerms": { + "$ref": "payment_terms.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/overpayments.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/overpayments.json new file mode 100644 index 0000000000000..bd26d990ee81a --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/overpayments.json @@ -0,0 +1,93 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Status": { + "type": ["null", "string"] + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "$ref": "line_items.json" + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "OverpaymentID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "type": ["null", "array"], + "items": { + "$ref": "allocations.json" + } + }, + "Payments": { + "type": ["null", "array"], + "items": { + "$ref": "payments.json" + } + }, + "Reference": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ID": { + "type": ["null", "string"] + }, + "DateString": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/payment_terms.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/payment_terms.json new file mode 100644 index 0000000000000..65db9168a078a --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/payment_terms.json @@ -0,0 +1,30 @@ +{ + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/payments.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/payments.json new file mode 100644 index 0000000000000..fd88e94100724 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/payments.json @@ -0,0 +1,92 @@ +{ + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "PaymentType": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Account": { + "$ref": "accounts.json" + }, + "Invoice": { + "$ref": "nested_invoice.json" + }, + "CreditNote": { + "type": ["null", "object"], + "properties": { + "CreditNoteNumber": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PrepaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "Overpayment": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "OverpaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "BankAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "PaymentID": { + "type": ["string"] + }, + "HasAccount": { + "type": ["null", "boolean"] + }, + "BatchPaymentID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/phones.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/phones.json new file mode 100644 index 0000000000000..ecc7bea9ca046 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/phones.json @@ -0,0 +1,18 @@ +{ + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/prepayments.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/prepayments.json new file mode 100644 index 0000000000000..050b86367cb89 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/prepayments.json @@ -0,0 +1,93 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "$ref": "line_items.json" + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "PrepaymentID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "type": ["null", "array"], + "items": { + "$ref": "allocations.json" + } + }, + "Payments": { + "type": ["null", "array"], + "items": { + "$ref": "payments.json" + } + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/purchase_orders.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/purchase_orders.json new file mode 100644 index 0000000000000..57acbea8d802a --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/purchase_orders.json @@ -0,0 +1,116 @@ +{ + "type": ["null", "object"], + "properties": { + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DeliveryDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "PurchaseOrderNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "$ref": "line_items.json" + } + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "DeliveryAddress": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "Telephone": { + "type": ["null", "string"] + }, + "DeliveryInstructions": { + "type": ["null", "string"] + }, + "ExpectedArrivalDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "PurchaseOrderID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "DeliveryDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "ExpectedArrivalDateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/quotes.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/quotes.json new file mode 100644 index 0000000000000..90748ef5d35fc --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/quotes.json @@ -0,0 +1,88 @@ +{ + "type": ["null", "object"], + "properties": { + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "ExpiryDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "$ref": "line_items.json" + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "QuoteID": { + "type": ["null", "string"] + }, + "QuoteNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Title": { + "type": ["null", "string"] + }, + "Summary": { + "type": ["null", "string"] + }, + "Terms": { + "type": ["null", "string"] + }, + "TrackingCategory": { + "type": ["null", "array"], + "items": { + "$ref": "tracking_categories.json" + } + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/receipts.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/receipts.json new file mode 100644 index 0000000000000..e10bbdcaaf520 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/receipts.json @@ -0,0 +1,71 @@ +{ + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Contact": { + "$ref": "contacts.json" + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "$ref": "line_items.json" + } + }, + "User": { + "$ref": "users.json" + }, + "Reference": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "ReceiptID": { + "type": ["string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ReceiptNumber": { + "type": ["null", "integer"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Url": { + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + }, + "ValidationErrors": { + "$ref": "validation_errors.json" + }, + "Attachments": { + "$ref": "attachments.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/repeating_invoices.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/repeating_invoices.json new file mode 100644 index 0000000000000..c09f8110e95b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/repeating_invoices.json @@ -0,0 +1,91 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Schedule": { + "type": ["null", "object"], + "properties": { + "Unit": { + "type": ["null", "string"] + }, + "DueDateType": { + "type": ["null", "string"] + }, + "StartDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "DueDate": { + "type": ["null", "integer"] + }, + "EndDate": { + "type": ["null", "string"] + }, + "NextScheduledDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "Period": { + "type": ["null", "integer"] + } + }, + "additionalProperties": true + }, + "LineItems": { + "items": { + "$ref": "line_items.json" + }, + "type": ["null", "array"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RepeatingInvoiceID": { + "type": ["string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/accounts.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/accounts.json new file mode 100644 index 0000000000000..489e12a4f1775 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/accounts.json @@ -0,0 +1,61 @@ +{ + "type": ["null", "object"], + "properties": { + "Code": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "Type": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "ReportingCodeName": { + "type": ["null", "string"] + }, + "SystemAccount": { + "type": ["null", "string"] + }, + "BankAccountType": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "Description": { + "type": ["null", "string"] + }, + "Class": { + "type": ["null", "string"] + }, + "AccountID": { + "type": ["string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ShowInExpenseClaims": { + "type": ["null", "boolean"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "ReportingCode": { + "type": ["null", "string"] + }, + "EnablePaymentsToAccount": { + "type": ["null", "boolean"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/addresses.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/addresses.json new file mode 100644 index 0000000000000..8562d43038bbd --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/addresses.json @@ -0,0 +1,36 @@ +{ + "type": ["null", "object"], + "properties": { + "Region": { + "type": ["null", "string"] + }, + "AddressLine1": { + "type": ["null", "string"] + }, + "AddressLine2": { + "type": ["null", "string"] + }, + "AddressLine3": { + "type": ["null", "string"] + }, + "AddressLine4": { + "type": ["null", "string"] + }, + "AttentionTo": { + "type": ["null", "string"] + }, + "City": { + "type": ["null", "string"] + }, + "PostalCode": { + "type": ["null", "string"] + }, + "Country": { + "type": ["null", "string"] + }, + "AddressType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/allocations.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/allocations.json new file mode 100644 index 0000000000000..c64fbd72fa6da --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/allocations.json @@ -0,0 +1,18 @@ +{ + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Invoice": { + "$ref": "nested_invoice.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/attachments.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/attachments.json new file mode 100644 index 0000000000000..76395ac27ec79 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/attachments.json @@ -0,0 +1,8 @@ +{ + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": {}, + "additionalProperties": true + } +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/branding_themes.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/branding_themes.json new file mode 100644 index 0000000000000..2f050d55ded29 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/branding_themes.json @@ -0,0 +1,19 @@ +{ + "type": ["null", "object"], + "properties": { + "CreatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "SortOrder": { + "type": ["null", "integer"] + }, + "Name": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/contact_groups.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/contact_groups.json new file mode 100644 index 0000000000000..44e93c435b3c3 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/contact_groups.json @@ -0,0 +1,18 @@ +{ + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "ContactGroupID": { + "type": ["string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/contacts.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/contacts.json new file mode 100644 index 0000000000000..07fddcf746077 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/contacts.json @@ -0,0 +1,199 @@ +{ + "type": ["null", "object"], + "properties": { + "ContactID": { + "type": ["string"] + }, + "ContactNumber": { + "type": ["null", "string"] + }, + "AccountNumber": { + "type": ["null", "string"] + }, + "ContactStatus": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "SkypeUserName": { + "type": ["null", "string"] + }, + "BankAccountDetails": { + "type": ["null", "string"] + }, + "TaxNumber": { + "type": ["null", "string"] + }, + "AccountsReceivableTaxType": { + "type": ["null", "string"] + }, + "AccountsPayableTaxType": { + "type": ["null", "string"] + }, + "Addresses": { + "items": { + "$ref": "addresses.json" + }, + "type": ["null", "array"] + }, + "Phones": { + "items": { + "$ref": "phones.json" + }, + "type": ["null", "array"] + }, + "IsSupplier": { + "type": ["null", "boolean"] + }, + "IsCustomer": { + "type": ["null", "boolean"] + }, + "DefaultCurrency": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "ContactPersons": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "IncludeInEmails": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + } + }, + "XeroNetworkKey": { + "type": ["null", "string"] + }, + "SalesDefaultAccountCode": { + "type": ["null", "string"] + }, + "PurchasesDefaultAccountCode": { + "type": ["null", "string"] + }, + "SalesTrackingCategories": { + "type": ["null", "array"], + "items": { + "$ref": "tracking_categories.json" + } + }, + "PurchasesTrackingCategories": { + "type": ["null", "array"], + "items": { + "$ref": "tracking_categories.json" + } + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingCategoryOption": { + "type": ["null", "string"] + }, + "PaymentTerms": { + "$ref": "payment_terms.json" + }, + "ContactGroups": { + "items": { + "$ref": "contact_groups.json" + }, + "type": ["null", "array"] + }, + "Website": { + "type": ["null", "string"] + }, + "BrandingTheme": { + "$ref": "branding_themes.json" + }, + "BatchPayments": { + "type": ["null", "object"], + "properties": { + "Details": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "Code": { + "type": ["null", "string"] + }, + "BankAccountNumber": { + "type": ["null", "string"] + }, + "BankAccountName": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Discount": { + "type": ["null", "number"] + }, + "Balances": { + "type": ["null", "object"], + "properties": { + "AccountsReceivable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + }, + "AccountsPayable": { + "type": ["null", "object"], + "properties": { + "Outstanding": { + "type": ["null", "number"] + }, + "Overdue": { + "type": ["null", "number"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Attachments": { + "$ref": "attachments.json" + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "ValidationErrors": { + "$ref": "validation_errors.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/credit_notes.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/credit_notes.json new file mode 100644 index 0000000000000..b1a28bc7b43d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/credit_notes.json @@ -0,0 +1,108 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "$ref": "line_items.json" + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "CreditNoteID": { + "type": ["string"] + }, + "CreditNoteNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "items": { + "$ref": "allocations.json" + }, + "type": ["null", "array"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/line_items.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/line_items.json new file mode 100644 index 0000000000000..fcee6da109f24 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/line_items.json @@ -0,0 +1,52 @@ +{ + "type": ["null", "object"], + "properties": { + "Description": { + "type": ["null", "string"] + }, + "Quantity": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UnitAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AccountCode": { + "type": ["null", "string"] + }, + "ItemCode": { + "type": ["null", "string"] + }, + "LineItemID": { + "type": ["string"] + }, + "TaxType": { + "type": ["null", "string"] + }, + "LineAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TaxAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DiscountRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Tracking": { + "items": { + "$ref": "tracking_categories.json" + }, + "type": ["null", "array"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/nested_invoice.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/nested_invoice.json new file mode 100644 index 0000000000000..49ec64344d33e --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/nested_invoice.json @@ -0,0 +1,126 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "DueDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "$ref": "line_items.json" + } + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "InvoiceID": { + "type": ["string"] + }, + "InvoiceNumber": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "Url": { + "type": ["null", "string"] + }, + "SentToContact": { + "type": ["null", "boolean"] + }, + "ExpectedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "PlannedPaymentDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "AmountDue": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AmountPaid": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "FullyPaidOnDate": { + "format": "date-time", + "type": ["null", "string"] + }, + "AmountCredited": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DueDateString": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsDiscounted": { + "type": ["null", "boolean"] + }, + "HasErrors": { + "type": ["null", "boolean"] + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/overpayments.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/overpayments.json new file mode 100644 index 0000000000000..bd26d990ee81a --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/overpayments.json @@ -0,0 +1,93 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Status": { + "type": ["null", "string"] + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "$ref": "line_items.json" + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "OverpaymentID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "type": ["null", "array"], + "items": { + "$ref": "allocations.json" + } + }, + "Payments": { + "type": ["null", "array"], + "items": { + "$ref": "payments.json" + } + }, + "Reference": { + "type": ["null", "string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ID": { + "type": ["null", "string"] + }, + "DateString": { + "type": ["null", "string"], + "format": "date-time" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/payment_terms.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/payment_terms.json new file mode 100644 index 0000000000000..65db9168a078a --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/payment_terms.json @@ -0,0 +1,30 @@ +{ + "type": ["null", "object"], + "properties": { + "Sales": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + }, + "Bills": { + "type": ["null", "object"], + "properties": { + "Day": { + "type": ["null", "integer"] + }, + "Type": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/payments.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/payments.json new file mode 100644 index 0000000000000..fd88e94100724 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/payments.json @@ -0,0 +1,92 @@ +{ + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Amount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "IsReconciled": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "PaymentType": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "Account": { + "$ref": "accounts.json" + }, + "Invoice": { + "$ref": "nested_invoice.json" + }, + "CreditNote": { + "type": ["null", "object"], + "properties": { + "CreditNoteNumber": { + "type": ["string"] + } + }, + "additionalProperties": true + }, + "Prepayments": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "PrepaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "Overpayment": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "OverpaymentID": { + "type": ["string"] + } + }, + "additionalProperties": true + } + }, + "BankAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + }, + "PaymentID": { + "type": ["string"] + }, + "HasAccount": { + "type": ["null", "boolean"] + }, + "BatchPaymentID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/phones.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/phones.json new file mode 100644 index 0000000000000..ecc7bea9ca046 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/phones.json @@ -0,0 +1,18 @@ +{ + "type": ["null", "object"], + "properties": { + "PhoneNumber": { + "type": ["null", "string"] + }, + "PhoneAreaCode": { + "type": ["null", "string"] + }, + "PhoneCountryCode": { + "type": ["null", "string"] + }, + "PhoneType": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/prepayments.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/prepayments.json new file mode 100644 index 0000000000000..050b86367cb89 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/prepayments.json @@ -0,0 +1,93 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Date": { + "format": "date-time", + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "LineItems": { + "items": { + "$ref": "line_items.json" + }, + "type": ["null", "array"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "PrepaymentID": { + "type": ["string"] + }, + "CurrencyRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Reference": { + "type": ["null", "string"] + }, + "RemainingCredit": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "AppliedAmount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Allocations": { + "type": ["null", "array"], + "items": { + "$ref": "allocations.json" + } + }, + "Payments": { + "type": ["null", "array"], + "items": { + "$ref": "payments.json" + } + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "DateString": { + "format": "date-time", + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/receipts.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/receipts.json new file mode 100644 index 0000000000000..e10bbdcaaf520 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/receipts.json @@ -0,0 +1,71 @@ +{ + "type": ["null", "object"], + "properties": { + "Date": { + "type": ["null", "string"], + "format": "date-time" + }, + "Contact": { + "$ref": "contacts.json" + }, + "LineItems": { + "type": ["null", "array"], + "items": { + "$ref": "line_items.json" + } + }, + "User": { + "$ref": "users.json" + }, + "Reference": { + "type": ["null", "string"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "ReceiptID": { + "type": ["string"] + }, + "Status": { + "type": ["null", "string"] + }, + "ReceiptNumber": { + "type": ["null", "integer"] + }, + "UpdatedDateUTC": { + "type": ["null", "string"], + "format": "date-time" + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "Url": { + "type": ["null", "string"] + }, + "ID": { + "type": ["null", "string"] + }, + "ValidationErrors": { + "$ref": "validation_errors.json" + }, + "Attachments": { + "$ref": "attachments.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/repeating_invoices.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/repeating_invoices.json new file mode 100644 index 0000000000000..c09f8110e95b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/repeating_invoices.json @@ -0,0 +1,91 @@ +{ + "type": ["null", "object"], + "properties": { + "Type": { + "type": ["null", "string"] + }, + "Contact": { + "$ref": "contacts.json" + }, + "Schedule": { + "type": ["null", "object"], + "properties": { + "Unit": { + "type": ["null", "string"] + }, + "DueDateType": { + "type": ["null", "string"] + }, + "StartDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "DueDate": { + "type": ["null", "integer"] + }, + "EndDate": { + "type": ["null", "string"] + }, + "NextScheduledDate": { + "type": ["null", "string"], + "format": "date-time" + }, + "Period": { + "type": ["null", "integer"] + } + }, + "additionalProperties": true + }, + "LineItems": { + "items": { + "$ref": "line_items.json" + }, + "type": ["null", "array"] + }, + "LineAmountTypes": { + "type": ["null", "string"] + }, + "Reference": { + "type": ["null", "string"] + }, + "BrandingThemeID": { + "type": ["null", "string"] + }, + "CurrencyCode": { + "type": ["null", "string"] + }, + "Status": { + "type": ["null", "string"] + }, + "SubTotal": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalTax": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "TotalDiscount": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "Total": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "RepeatingInvoiceID": { + "type": ["string"] + }, + "HasAttachments": { + "type": ["null", "boolean"] + }, + "ID": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/tracking_categories.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/tracking_categories.json new file mode 100644 index 0000000000000..71051059f86b7 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/tracking_categories.json @@ -0,0 +1,57 @@ +{ + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/users.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/users.json new file mode 100644 index 0000000000000..78b642c8f586e --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/users.json @@ -0,0 +1,31 @@ +{ + "type": ["null", "object"], + "properties": { + "UserID": { + "type": ["string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsSubscriber": { + "type": ["null", "boolean"] + }, + "OrganisationRole": { + "type": ["null", "string"] + }, + "ValidationErrors": { + "$ref": "validation_errors.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/validation_errors.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/validation_errors.json new file mode 100644 index 0000000000000..eb1bd87988f92 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/shared/validation_errors.json @@ -0,0 +1,12 @@ +{ + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/tax_rates.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/tax_rates.json new file mode 100644 index 0000000000000..943a2b328751e --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/tax_rates.json @@ -0,0 +1,66 @@ +{ + "type": ["null", "object"], + "properties": { + "Name": { + "type": ["null", "string"] + }, + "TaxType": { + "type": ["string"] + }, + "TaxComponents": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Name": { + "type": ["null", "string"] + }, + "IsCompound": { + "type": ["null", "boolean"] + }, + "IsNonRecoverable": { + "type": ["null", "boolean"] + }, + "Rate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + } + }, + "additionalProperties": true + } + }, + "Status": { + "type": ["null", "string"] + }, + "ReportTaxType": { + "type": ["null", "string"] + }, + "CanApplyToAssets": { + "type": ["null", "boolean"] + }, + "CanApplyToEquity": { + "type": ["null", "boolean"] + }, + "CanApplyToExpenses": { + "type": ["null", "boolean"] + }, + "CanApplyToLiabilities": { + "type": ["null", "boolean"] + }, + "CanApplyToRevenue": { + "type": ["null", "boolean"] + }, + "DisplayTaxRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + }, + "EffectiveRate": { + "type": ["null", "number"], + "minimum": -1e33, + "maximum": 1e33 + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/tracking_categories.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/tracking_categories.json new file mode 100644 index 0000000000000..71051059f86b7 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/tracking_categories.json @@ -0,0 +1,57 @@ +{ + "type": ["null", "object"], + "properties": { + "Status": { + "type": ["null", "string"] + }, + "TrackingCategoryID": { + "type": ["string"] + }, + "Option": { + "type": ["null", "string"] + }, + "TrackingCategoryName": { + "type": ["null", "string"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "TrackingOptionName": { + "type": ["null", "string"] + }, + "Options": { + "items": { + "type": ["null", "object"], + "properties": { + "IsActive": { + "type": ["null", "boolean"] + }, + "IsDeleted": { + "type": ["null", "boolean"] + }, + "TrackingOptionID": { + "type": ["null", "string"] + }, + "IsArchived": { + "type": ["null", "boolean"] + }, + "Status": { + "type": ["null", "string"] + }, + "Name": { + "type": ["null", "string"] + }, + "HasValidationErrors": { + "type": ["null", "boolean"] + } + }, + "additionalProperties": true + }, + "type": ["null", "array"] + }, + "Name": { + "type": ["null", "string"] + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/users.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/users.json new file mode 100644 index 0000000000000..78b642c8f586e --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/users.json @@ -0,0 +1,31 @@ +{ + "type": ["null", "object"], + "properties": { + "UserID": { + "type": ["string"] + }, + "EmailAddress": { + "type": ["null", "string"] + }, + "FirstName": { + "type": ["null", "string"] + }, + "LastName": { + "type": ["null", "string"] + }, + "UpdatedDateUTC": { + "format": "date-time", + "type": ["null", "string"] + }, + "IsSubscriber": { + "type": ["null", "boolean"] + }, + "OrganisationRole": { + "type": ["null", "string"] + }, + "ValidationErrors": { + "$ref": "validation_errors.json" + } + }, + "additionalProperties": true +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/schemas/validation_errors.json b/airbyte-integrations/connectors/source-xero/source_xero/schemas/validation_errors.json new file mode 100644 index 0000000000000..eb1bd87988f92 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/schemas/validation_errors.json @@ -0,0 +1,12 @@ +{ + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "Message": { + "type": ["null", "string"] + } + }, + "additionalProperties": true + } +} diff --git a/airbyte-integrations/connectors/source-xero/source_xero/source.py b/airbyte-integrations/connectors/source-xero/source_xero/source.py new file mode 100644 index 0000000000000..eeaf6b44e757f --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/source.py @@ -0,0 +1,95 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from typing import Any, List, Mapping, Tuple + +import pendulum +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream + +from .oauth import XeroCustomConnectionsOauth2Authenticator +from .streams import ( + Accounts, + BankTransactions, + BankTransfers, + BrandingThemes, + ContactGroups, + Contacts, + CreditNotes, + Currencies, + Employees, + Invoices, + Items, + ManualJournals, + Organisations, + Overpayments, + Payments, + Prepayments, + PurchaseOrders, + RepeatingInvoices, + TaxRates, + TrackingCategories, + Users, +) + + +class SourceXero(AbstractSource): + config = None + + def check_connection(self, logger, config) -> Tuple[bool, any]: + self.config = config + stream_kwargs = self.get_stream_kwargs(config) + + organisations_stream = Organisations(**stream_kwargs) + organisations_gen = organisations_stream.read_records(sync_mode=SyncMode.full_refresh) + + organisation = next(organisations_gen) + + return organisation["OrganisationID"] == config.get("tenant_id"), None + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + self.config = config + stream_kwargs = self.get_stream_kwargs(config) + incremental_kwargs = {**stream_kwargs, "start_date": pendulum.parse(config.get("start_date"))} + streams = [ + BankTransactions(**incremental_kwargs), + Contacts(**incremental_kwargs), + CreditNotes(**incremental_kwargs), + Invoices(**incremental_kwargs), + ManualJournals(**incremental_kwargs), + Overpayments(**incremental_kwargs), + Prepayments(**incremental_kwargs), + PurchaseOrders(**incremental_kwargs), + Accounts(**incremental_kwargs), + BankTransfers(**incremental_kwargs), + Employees(**incremental_kwargs), + Items(**incremental_kwargs), + Payments(**incremental_kwargs), + Users(**incremental_kwargs), + BrandingThemes(**stream_kwargs), + ContactGroups(**stream_kwargs), + Currencies(**stream_kwargs), + Organisations(**stream_kwargs), + RepeatingInvoices(**stream_kwargs), + TaxRates(**stream_kwargs), + TrackingCategories(**stream_kwargs), + ] + return streams + + @staticmethod + def get_stream_kwargs(config: Mapping[str, Any]) -> Mapping[str, Any]: + authentication = config.get("authentication") + stream_kwargs = dict() + if authentication.get("auth_type") == "custom_connection": + stream_kwargs["authenticator"] = XeroCustomConnectionsOauth2Authenticator( + token_refresh_endpoint="https://identity.xero.com/connect/token", + client_secret=config.get("client_secret"), + client_id=config.get("client_id"), + scopes=config.get("scopes"), + ) + elif authentication.get("auth_type") == "oauth": + raise Exception("Config validation error. OAuth connection is not supported yet.") + + return stream_kwargs diff --git a/airbyte-integrations/connectors/source-xero/source_xero/spec.yaml b/airbyte-integrations/connectors/source-xero/source_xero/spec.yaml new file mode 100644 index 0000000000000..1eeab8b1e795d --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/spec.yaml @@ -0,0 +1,67 @@ +documentationUrl: https://docs.airbyte.io/integrations/sources/xero +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Xero Spec + type: object + required: + - authentication + - start_date + - scopes + - tenant_id + - client_id + - client_secret + additionalProperties: true + + properties: + client_id: + title: Client ID + type: string + description: "Enter your Xero application's Client ID" + client_secret: + title: Client Secret + type: string + description: "Enter your Xero application's Client Secret" + airbyte_secret: true + tenant_id: + title: Tenant ID + type: string + description: "Enter your Xero organization's Tenant ID" + scopes: + title: Scopes + type: string + description: "Enter your required list of scopes (delimited by comma)" + authentication: + type: object + title: Authentication + description: >- + Type and additional credentials of the Xero API connection + oneOf: + - title: Authenticate via Xero (OAuth) (unsupported yet) + type: object + required: + - auth_type + - refresh_token + properties: + auth_type: + type: string + const: oauth + refresh_token: + title: Refresh Token + type: string + description: "Enter your Xero application's refresh token" + airbyte_secret: true + - title: Custom Connections Authentication + type: object + required: + - auth_type + properties: + auth_type: + type: string + const: custom_connection + + start_date: + type: string + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: "UTC date and time in the format YYYY-MM-DDTHH:mm:ssZ. Any data with created_at before this data will not be synced." + examples: + - "2022-03-01T00:00:00Z" diff --git a/airbyte-integrations/connectors/source-xero/source_xero/streams.py b/airbyte-integrations/connectors/source-xero/source_xero/streams.py new file mode 100644 index 0000000000000..e092bda4479db --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/source_xero/streams.py @@ -0,0 +1,242 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import decimal +import re +from abc import ABC +from datetime import date, datetime, time, timedelta, timezone +from typing import Any, Iterable, Mapping, MutableMapping, Optional + +import pendulum +import requests +from airbyte_cdk.sources.streams.http import HttpStream + + +def parse_date(value): + # Xero datetimes can be .NET JSON date strings which look like + # "/Date(1419937200000+0000)/" + # https://developer.xero.com/documentation/api/requests-and-responses + pattern = r"Date\((\-?\d+)([-+])?(\d+)?\)" + match = re.search(pattern, value) + + iso8601pattern = r"((\d{4})-([0-2]\d)-0?([0-3]\d)T([0-5]\d):([0-5]\d):([0-6]\d))" + + if not match: + iso8601match = re.search(iso8601pattern, value) + if iso8601match: + try: + return datetime.strptime(value) + except Exception: + return None + else: + return None + + millis_timestamp, offset_sign, offset = match.groups() + if offset: + if offset_sign == "+": + offset_sign = 1 + else: + offset_sign = -1 + offset_hours = offset_sign * int(offset[:2]) + offset_minutes = offset_sign * int(offset[2:]) + else: + offset_hours = 0 + offset_minutes = 0 + + return datetime.fromtimestamp((int(millis_timestamp) / 1000), tz=timezone.utc) + timedelta(hours=offset_hours, minutes=offset_minutes) + + +def _json_load_object_hook(_dict): + """Hook for json.parse(...) to parse Xero date formats.""" + # This was taken from the pyxero library and modified + # to format the dates according to RFC3339 + for key, value in _dict.items(): + if isinstance(value, str): + value = parse_date(value) + if value: + if type(value) is date: + value = datetime.combine(value, time.min) + value = value.replace(tzinfo=timezone.utc) + _dict[key] = datetime.isoformat(value, timespec="seconds") + return _dict + + +class XeroStream(HttpStream, ABC): + url_base = "https://api.xero.com/api.xro/2.0/" + page_size = 100 + current_page = 1 + pagination = False + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + records = response.json().get(self.data_field) or [] + if not self.pagination: + return None + if len(records) == self.page_size: + self.current_page += 1 + return {"has_next_page": True} + return None + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + params = {} + if self.pagination: + params["page"] = self.current_page + return params + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + headers = {"Accept": "application/json"} + return headers + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + records = response.json(object_hook=_json_load_object_hook, parse_float=decimal.Decimal).get(self.data_field) or [] + for record in records: + record = record.get(self.data_field) or record + if self.primary_key in record and record[self.primary_key] is None: + record[self.primary_key] = 0 + yield record + + def path(self, **kwargs) -> str: + class_name = self.__class__.__name__ + return f"{class_name[0].lower()}{class_name[1:]}" + + @property + def data_field(self, **kwargs) -> str: + class_name = self.__class__.__name__ + re.sub(r"(? str: + return "UpdatedDateUTC" + + def request_headers( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> Mapping[str, Any]: + request_headers = super().request_headers(stream_state, stream_slice, next_page_token) + stream_date = stream_state.get("date") or self.start_date + if isinstance(stream_date, str): + stream_date = pendulum.parse(stream_date) + request_headers["If-Modified-Since"] = stream_date.strftime("%Y-%m-%dT%H:%M:%S") + + return request_headers + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + latest_state = latest_record.get(self.cursor_field) + current_state = current_stream_state.get(self.cursor_field) or latest_state + if current_state: + return {"date": max(latest_state, current_state)} + return {} + + +class BankTransactions(IncrementalXeroStream): + primary_key = "BankTransactionID" + pagination = True + + +class Contacts(IncrementalXeroStream): + primary_key = "ContactID" + pagination = True + + +class CreditNotes(IncrementalXeroStream): + primary_key = "CreditNoteID" + pagination = True + + +class Invoices(IncrementalXeroStream): + primary_key = "InvoiceID" + pagination = True + + +class ManualJournals(IncrementalXeroStream): + primary_key = "ManualJournalID" + pagination = True + + +class Overpayments(IncrementalXeroStream): + primary_key = "OverpaymentID" + pagination = True + + +class Prepayments(IncrementalXeroStream): + primary_key = "PrepaymentID" + pagination = True + + +class PurchaseOrders(IncrementalXeroStream): + primary_key = "PurchaseOrderID" + pagination = True + + +class Accounts(IncrementalXeroStream): + primary_key = "AccountID" + + +class BankTransfers(IncrementalXeroStream): + primary_key = "BankTransferID" + pagination = True + + @property + def cursor_field(self) -> str: + return "CreatedDateUTC" + + +class Employees(IncrementalXeroStream): + primary_key = "EmployeeID" + pagination = True + + +class Items(IncrementalXeroStream): + primary_key = "ItemID" + + +class Payments(IncrementalXeroStream): + primary_key = "PaymentID" + pagination = True + + +class Users(IncrementalXeroStream): + primary_key = "UserID" + + +class BrandingThemes(XeroStream): + primary_key = "BrandingThemeID" + + +class ContactGroups(XeroStream): + primary_key = "ContactGroupID" + + +class Currencies(XeroStream): + primary_key = "Code" + + +class Organisations(XeroStream): + primary_key = "OrganisationID" + + def path(self, **kwargs) -> str: + return "Organisation" + + +class RepeatingInvoices(XeroStream): + primary_key = "RepeatingInvoiceID" + + +class TaxRates(XeroStream): + primary_key = "Name" + + +class TrackingCategories(XeroStream): + primary_key = "TrackingCategoryID" diff --git a/airbyte-integrations/connectors/source-xero/unit_tests/__init__.py b/airbyte-integrations/connectors/source-xero/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-xero/unit_tests/conftest.py b/airbyte-integrations/connectors/source-xero/unit_tests/conftest.py new file mode 100644 index 0000000000000..ddcc4bad4d8f9 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/unit_tests/conftest.py @@ -0,0 +1,48 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from pytest import fixture + + +@fixture(name="config") +def config_fixture(): + return { + "client_id": "client_id", + "client_secret": "client_secret", + "tenant_id": "tenant_id", + "scopes": "scope1, scope2", + "authentication": {"auth_type": "custom_connection"}, + "start_date": "2020-01-01T00:00:00Z", + } + + +@fixture(name="mock_response") +def mock_response(): + return { + "data": [{"gid": "gid", "resource_type": "resource_type", "name": "name"}], + "next_page": {"offset": "offset", "path": "path", "uri": "uri"}, + } + + +@fixture(name="mock_stream") +def mock_stream_fixture(requests_mock): + def _mock_stream(path, response=None): + if response is None: + response = {} + + url = f"https://api.xero.com/api.xro/2.0/{path}" + requests_mock.get(url, json=response) + requests_mock.get("https://identity.xero.com/connect/token", json={}) + + return _mock_stream + + +@fixture(name="mock_auth") +def mock_auth_fixture(requests_mock): + def _mock_auth(response=None): + if response is None: + response = {} + requests_mock.post("https://identity.xero.com/connect/token", json=response) + + return _mock_auth diff --git a/airbyte-integrations/connectors/source-xero/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-xero/unit_tests/test_incremental_streams.py new file mode 100644 index 0000000000000..adf8361290d18 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/unit_tests/test_incremental_streams.py @@ -0,0 +1,55 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import datetime + +from airbyte_cdk.models import SyncMode +from pytest import fixture +from source_xero.streams import IncrementalXeroStream + + +@fixture +def patch_incremental_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(IncrementalXeroStream, "path", "v0/example_endpoint") + mocker.patch.object(IncrementalXeroStream, "primary_key", "test_primary_key") + mocker.patch.object(IncrementalXeroStream, "__abstractmethods__", set()) + + +def test_cursor_field(patch_incremental_base_class): + stream = IncrementalXeroStream(start_date=datetime.datetime.now()) + expected_cursor_field = "UpdatedDateUTC" + assert stream.cursor_field == expected_cursor_field + + +def test_get_updated_state(patch_incremental_base_class): + stream = IncrementalXeroStream(start_date=datetime.datetime.now()) + date = datetime.datetime.now().replace(microsecond=0) + inputs = {"current_stream_state": {"date": "2022-01-01"}, "latest_record": {"UpdatedDateUTC": date.isoformat()}} + expected_state = {"date": date.isoformat()} + assert stream.get_updated_state(**inputs) == expected_state + + +def test_stream_slices(patch_incremental_base_class): + stream = IncrementalXeroStream(start_date=datetime.datetime.now()) + inputs = {"sync_mode": SyncMode.incremental, "cursor_field": [], "stream_state": {}} + expected_stream_slice = [None] + assert stream.stream_slices(**inputs) == expected_stream_slice + + +def test_supports_incremental(patch_incremental_base_class, mocker): + mocker.patch.object(IncrementalXeroStream, "cursor_field", "dummy_field") + stream = IncrementalXeroStream(start_date=datetime.datetime.now()) + assert stream.supports_incremental + + +def test_source_defined_cursor(patch_incremental_base_class): + stream = IncrementalXeroStream(start_date=datetime.datetime.now()) + assert stream.source_defined_cursor + + +def test_stream_checkpoint_interval(patch_incremental_base_class): + stream = IncrementalXeroStream(start_date=datetime.datetime.now()) + expected_checkpoint_interval = 100 + assert stream.state_checkpoint_interval == expected_checkpoint_interval diff --git a/airbyte-integrations/connectors/source-xero/unit_tests/test_source.py b/airbyte-integrations/connectors/source-xero/unit_tests/test_source.py new file mode 100644 index 0000000000000..1601871e7b9c1 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/unit_tests/test_source.py @@ -0,0 +1,22 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from unittest.mock import MagicMock + +from source_xero.source import SourceXero + + +def test_check_connection(mock_auth, mock_stream, mock_response, config): + mock_stream("Organisation", response={"Organisations": [{"OrganisationID": "tenant_id"}]}) + mock_auth({"access_token": "TOKEN", "expires_in": 123}) + source = SourceXero() + logger_mock, config_mock = MagicMock(), config + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(config): + source = SourceXero() + streams = source.streams(config) + expected_streams_number = 21 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-xero/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-xero/unit_tests/test_streams.py new file mode 100644 index 0000000000000..936bc53efeaa4 --- /dev/null +++ b/airbyte-integrations/connectors/source-xero/unit_tests/test_streams.py @@ -0,0 +1,95 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +import datetime +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_xero.streams import XeroStream, parse_date + + +@pytest.fixture +def patch_base_class(mocker): + mocker.patch.object(XeroStream, "path", "v0/example_endpoint") + mocker.patch.object(XeroStream, "primary_key", "test_primary_key") + mocker.patch.object(XeroStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = XeroStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {} + assert stream.request_params(**inputs) == expected_params + + +def test_next_page_token(patch_base_class): + stream = XeroStream() + inputs = {"response": MagicMock()} + expected_token = None + assert stream.next_page_token(**inputs) == expected_token + + stream.page_size = 1 + stream.pagination = True + response = MagicMock() + response.json.return_value = {"XeroStream": [{}]} + inputs = {"response": response} + expected_token = {"has_next_page": True} + assert stream.next_page_token(**inputs) == expected_token + + +def test_parse_response(patch_base_class): + stream = XeroStream() + response = MagicMock() + response.json.return_value = {"XeroStream": [{"key": "value"}]} + inputs = {"response": response} + expected_parsed_object = {"key": "value"} + assert next(stream.parse_response(**inputs)) == expected_parsed_object + + +def test_request_headers(patch_base_class): + stream = XeroStream() + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_headers = {"Accept": "application/json"} + assert stream.request_headers(**inputs) == expected_headers + + +def test_http_method(patch_base_class): + stream = XeroStream() + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = XeroStream() + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = XeroStream() + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time + + +def test_parse_date(): + # 11/10/2020 00:00:00 +3 (11/10/2020 21:00:00 GMT/UTC) + assert parse_date("/Date(1602363600000+0300)/") == datetime.datetime(2020, 10, 11, 0, 0, tzinfo=datetime.timezone.utc) + # 02/02/2020 10:31:51.5 +3 (02/02/2020 07:31:51.5 GMT/UTC) + assert parse_date("/Date(1580628711500+0300)/") == datetime.datetime(2020, 2, 2, 10, 31, 51, 500000, tzinfo=datetime.timezone.utc) + # 07/02/2022 20:12:55 GMT/UTC + assert parse_date("/Date(1656792775000)/") == datetime.datetime(2022, 7, 2, 20, 12, 55, tzinfo=datetime.timezone.utc) + # Not a date + assert parse_date("not a date") is None diff --git a/airbyte-integrations/connectors/source-younium/.dockerignore b/airbyte-integrations/connectors/source-younium/.dockerignore new file mode 100644 index 0000000000000..cebe350aeec30 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_younium +!setup.py +!secrets \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-younium/Dockerfile b/airbyte-integrations/connectors/source-younium/Dockerfile new file mode 100644 index 0000000000000..92fb46be3954d --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.13-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_younium ./source_younium + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-younium \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-younium/README.md b/airbyte-integrations/connectors/source-younium/README.md new file mode 100644 index 0000000000000..0c8f7fa797d05 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/README.md @@ -0,0 +1,132 @@ +# Younium Source + +This is the repository for the Younium source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/younium). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-younium:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/younium) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_younium/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source younium test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-younium:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-younium:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-younium:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-younium:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-younium:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-younium:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-younium:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-younium:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-younium/acceptance-test-config.yml b/airbyte-integrations/connectors/source-younium/acceptance-test-config.yml new file mode 100644 index 0000000000000..56827d2bf7c3d --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/acceptance-test-config.yml @@ -0,0 +1,26 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-younium:dev +tests: + spec: + - spec_path: "source_younium/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.txt" + # extra_fields: no + # exact_order: no + # extra_records: yes + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-younium/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-younium/acceptance-test-docker.sh new file mode 100755 index 0000000000000..fa680528f222c --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/acceptance-test-docker.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-younium/build.gradle b/airbyte-integrations/connectors/source-younium/build.gradle new file mode 100644 index 0000000000000..81baf943b54e1 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_younium' +} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-younium/integration_tests/__init__.py b/airbyte-integrations/connectors/source-younium/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-younium/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-younium/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..52b0f2c2118f4 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-younium/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-younium/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-younium/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-younium/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..a9a071c44c442 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/integration_tests/configured_catalog.json @@ -0,0 +1,31 @@ +{ + "streams": [ + { + "stream": { + "name": "invoice", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "product", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + }, + { + "stream": { + "name": "subscription", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-younium/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-younium/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..7a61623e3ac98 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/integration_tests/invalid_config.json @@ -0,0 +1,6 @@ +{ + "username": "invalid username", + "password": "invalid password", + "legal_entity": "invalid legal entity", + "playground": "invalid flag" +} diff --git a/airbyte-integrations/connectors/source-younium/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-younium/integration_tests/sample_config.json new file mode 100644 index 0000000000000..2935f03552707 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/integration_tests/sample_config.json @@ -0,0 +1,6 @@ +{ + "username": "user@user.com", + "password": "password", + "legal_entity": "XX co", + "playground": true +} diff --git a/airbyte-integrations/connectors/source-younium/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-younium/integration_tests/sample_state.json new file mode 100644 index 0000000000000..3587e579822d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-younium/main.py b/airbyte-integrations/connectors/source-younium/main.py new file mode 100644 index 0000000000000..d638d3d94c3fc --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_younium import SourceYounium + +if __name__ == "__main__": + source = SourceYounium() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-younium/requirements.txt b/airbyte-integrations/connectors/source-younium/requirements.txt new file mode 100644 index 0000000000000..78140e52009f5 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-younium/setup.py b/airbyte-integrations/connectors/source-younium/setup.py new file mode 100644 index 0000000000000..9c1c848e788ca --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/setup.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.2", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "responses~=0.22.0", + "source-acceptance-test", +] + +setup( + name="source_younium", + description="Source implementation for Younium.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-younium/source_younium/__init__.py b/airbyte-integrations/connectors/source-younium/source_younium/__init__.py new file mode 100644 index 0000000000000..e51d99b37ef65 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/source_younium/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceYounium + +__all__ = ["SourceYounium"] diff --git a/airbyte-integrations/connectors/source-younium/source_younium/schemas/invoice.json b/airbyte-integrations/connectors/source-younium/source_younium/schemas/invoice.json new file mode 100644 index 0000000000000..96c65fe57d0cc --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/source_younium/schemas/invoice.json @@ -0,0 +1,386 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "invoiceNumber": { + "type": "string" + }, + "status": { + "type": "string" + }, + "account": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "accountNumber": { + "type": "string" + }, + "id": { + "type": "string" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + } + } + }, + "notes": { + "type": "string" + }, + "invoiceDate": { + "type": "string" + }, + "dueDate": { + "type": "string" + }, + "daysPastDue": { + "type": "number" + }, + "nrOfReminders": { + "type": "number" + }, + "paymentTerm": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "days": { + "type": "number" + }, + "name": { + "type": "string" + } + } + }, + "currency": { + "type": "string" + }, + "subtotal": { + "type": "number" + }, + "tax": { + "type": "number" + }, + "totalAmount": { + "type": "number" + }, + "totalRoundingAmount": { + "type": "number" + }, + "settledAmount": { + "type": "number" + }, + "balancedAmount": { + "type": "number" + }, + "taxIncluded": { + "type": "boolean" + }, + "invoiceAddress": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "street": { + "type": "string" + }, + "street2": { + "type": "string" + }, + "city": { + "type": "string" + }, + "county": { + "type": "string" + }, + "state": { + "type": "string" + }, + "zip": { + "type": "string" + }, + "country": { + "type": "string" + } + } + }, + "deliveryAddress": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "street": { + "type": "string" + }, + "street2": { + "type": "string" + }, + "city": { + "type": "string" + }, + "county": { + "type": "string" + }, + "state": { + "type": "string" + }, + "zip": { + "type": "string" + }, + "country": { + "type": "string" + } + } + }, + "invoiceBatchId": { + "type": "string" + }, + "invoiceLines": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "invoiceLineNumber": { + "type": "number" + }, + "productNumber": { + "type": "string" + }, + "productName": { + "type": "string" + }, + "chargeDescription": { + "type": "string" + }, + "chargeNumber": { + "type": "string" + }, + "quantity": { + "type": "number" + }, + "unitOfMeasure": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "unitCode": { + "type": "string" + }, + "name": { + "type": "string" + }, + "displayName": { + "type": "string" + } + } + }, + "price": { + "type": "number" + }, + "subtotal": { + "type": "number" + }, + "total": { + "type": "number" + }, + "tax": { + "type": "number" + }, + "servicePeriodStartDate": { + "type": "string" + }, + "servicePeriodEndDate": { + "type": "string" + }, + "notes": { + "type": "string" + }, + "orderChargeId": { + "type": "string" + }, + "orderId": { + "type": "string" + }, + "accountId": { + "type": "string" + }, + "customFields": { + "type": "object" + }, + "accountsReceivable": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "code": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + } + } + }, + "deferredRevenue": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "code": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + } + } + }, + "recognizedRevenue": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "code": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + } + } + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + }, + "taxCategoryName": { + "type": "string" + }, + "taxRate": { + "type": "number" + } + } + } + }, + "yourReference": { + "type": "string" + }, + "ourReference": { + "type": "string" + }, + "yourOrderNumber": { + "type": "string" + }, + "buyerReference": { + "type": "string" + }, + "invoiceType": { + "type": "string" + }, + "sendMethod": { + "type": "string" + }, + "exchangeRate": { + "type": "number" + }, + "settledNotes": { + "type": "string" + }, + "invoiceTemplateId": { + "type": "string" + }, + "disableAutomaticInvoiceReminder": { + "type": "boolean" + }, + "onlinePaymentLink": { + "type": "string" + }, + "accountsReceivable": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "code": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + } + } + }, + "customFields": { + "type": "object" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + } + } +} diff --git a/airbyte-integrations/connectors/source-younium/source_younium/schemas/product.json b/airbyte-integrations/connectors/source-younium/source_younium/schemas/product.json new file mode 100644 index 0000000000000..296946ccd16d9 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/source_younium/schemas/product.json @@ -0,0 +1,174 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "productNumber": { + "type": "string" + }, + "name": { + "type": "string" + }, + "productType": { + "type": "string" + }, + "category": { + "type": "string" + }, + "activationDate": { + "type": "string" + }, + "endOfNewSalesDate": { + "type": "string" + }, + "endOfRenewalDate": { + "type": "string" + }, + "endOfLifeDate": { + "type": "string" + }, + "isFrameworkProduct": { + "type": "boolean" + }, + "chargePlans": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "chargePlanNumber": { + "type": "string" + }, + "name": { + "type": "string" + }, + "effectiveStartDate": { + "type": "string" + }, + "endOfNewSalesDate": { + "type": "string" + }, + "effectiveEndDate": { + "type": "string" + }, + "charges": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "chargeNumber": { + "type": "string" + }, + "name": { + "type": "string" + }, + "model": { + "type": "string" + }, + "chargeType": { + "type": "string" + }, + "unitCode": { + "type": "string" + }, + "defaultQuantity": { + "type": "number" + }, + "pricePeriod": { + "type": "string" + }, + "usageRating": { + "type": "string" + }, + "createInvoiceLinesPerTier": { + "type": "boolean" + }, + "billingDay": { + "type": "string" + }, + "specificBillingDay": { + "type": "number" + }, + "billingPeriod": { + "type": "string" + }, + "periodAlignment": { + "type": "string" + }, + "billingTiming": { + "type": "string" + }, + "taxTemplate": { + "type": "string" + }, + "taxIncluded": { + "type": "boolean" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + }, + "deferredRevenueAccount": { + "type": "string" + }, + "recognizedRevenueAccount": { + "type": "string" + }, + "customFields": { + "type": "object" + }, + "priceDetails": { + "type": "array", + "items": { + "type": "object", + "properties": { + "currency": { + "type": "string" + }, + "price": { + "type": "number" + }, + "tier": { + "type": "number" + }, + "description": { + "type": "string" + }, + "fromQuantity": { + "type": "number" + }, + "toQuantity": { + "type": "number" + }, + "priceBase": { + "type": "string" + } + } + } + } + } + } + } + } + } + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + }, + "customFields": { + "type": "object" + } + } +} diff --git a/airbyte-integrations/connectors/source-younium/source_younium/schemas/subscription.json b/airbyte-integrations/connectors/source-younium/source_younium/schemas/subscription.json new file mode 100644 index 0000000000000..c4ea6eeca3ff2 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/source_younium/schemas/subscription.json @@ -0,0 +1,836 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "orderNumber": { + "type": "string" + }, + "version": { + "type": "number" + }, + "isLastVersion": { + "type": "boolean" + }, + "status": { + "type": "string" + }, + "description": { + "type": "string" + }, + "remarks": { + "type": "string" + }, + "effectiveStartDate": { + "type": "string" + }, + "effectiveEndDate": { + "type": "string" + }, + "cancellationDate": { + "type": "string" + }, + "effectiveChangeDate": { + "type": "string" + }, + "orderDate": { + "type": "string" + }, + "noticePeriodDate": { + "type": "string" + }, + "lastRenewalDate": { + "type": "string" + }, + "noticePeriod": { + "type": "number" + }, + "term": { + "type": "number" + }, + "renewalTerm": { + "type": "number" + }, + "isAutoRenewed": { + "type": "boolean" + }, + "orderType": { + "type": "string" + }, + "termType": { + "type": "string" + }, + "orderPaymentMethod": { + "type": "string" + }, + "invoiceSeparatly": { + "type": "boolean" + }, + "yourReference": { + "type": "string" + }, + "ourReference": { + "type": "string" + }, + "yourOrderNumber": { + "type": "string" + }, + "invoiceAddress": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "street": { + "type": "string" + }, + "street2": { + "type": "string" + }, + "city": { + "type": "string" + }, + "county": { + "type": "string" + }, + "state": { + "type": "string" + }, + "zip": { + "type": "string" + }, + "country": { + "type": "string" + } + } + }, + "deliveryAddress": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "description": { + "type": "string" + }, + "name": { + "type": "string" + }, + "street": { + "type": "string" + }, + "street2": { + "type": "string" + }, + "city": { + "type": "string" + }, + "county": { + "type": "string" + }, + "state": { + "type": "string" + }, + "zip": { + "type": "string" + }, + "country": { + "type": "string" + } + } + }, + "invoiceBatchGroup": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "code": { + "type": "string" + }, + "description": { + "type": "string" + } + } + }, + "paymentTerm": { + "type": "string" + }, + "useAccountInvoiceBatchGroup": { + "type": "boolean" + }, + "account": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "accountNumber": { + "type": "string" + }, + "id": { + "type": "string" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + } + } + }, + "invoiceAccount": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "accountNumber": { + "type": "string" + }, + "id": { + "type": "string" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + } + } + }, + "products": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "productNumber": { + "type": "string" + }, + "chargePlanId": { + "type": "string" + }, + "chargePlanName": { + "type": "string" + }, + "chargePlanNumber": { + "type": "string" + }, + "productLineNumber": { + "type": "number" + }, + "name": { + "type": "string" + }, + "charges": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "chargeNumber": { + "type": "string" + }, + "version": { + "type": "number" + }, + "isLastVersion": { + "type": "boolean" + }, + "name": { + "type": "string" + }, + "chargeType": { + "type": "string" + }, + "priceModel": { + "type": "string" + }, + "effectiveStartDate": { + "type": "string" + }, + "effectiveEndDate": { + "type": "string" + }, + "quantity": { + "type": "number" + }, + "unitCode": { + "type": "string" + }, + "startOn": { + "type": "string" + }, + "endOn": { + "type": "string" + }, + "chargedThroughDate": { + "type": "string" + }, + "lastRenewalDate": { + "type": "string" + }, + "lastPriceAdjustmentDate": { + "type": "string" + }, + "pricePeriod": { + "type": "string" + }, + "usageRating": { + "type": "string" + }, + "revenueRecognitionRule": { + "type": "string" + }, + "billingDay": { + "type": "string" + }, + "specificBillingDay": { + "type": "number" + }, + "billingPeriod": { + "type": "string" + }, + "billingTiming": { + "type": "string" + }, + "periodAlignment": { + "type": "string" + }, + "taxTemplate": { + "type": "string" + }, + "taxIncluded": { + "type": "boolean" + }, + "createInvoiceLinesPerTier": { + "type": "boolean" + }, + "estimatedUsage": { + "type": "number" + }, + "estimatedQuantity": { + "type": "number" + }, + "remarks": { + "type": "string" + }, + "accountsReceivableAccount": { + "type": "string" + }, + "deferredRevenueAccount": { + "type": "string" + }, + "recognizedRevenueAccount": { + "type": "string" + }, + "changeState": { + "type": "string" + }, + "displayPrice": { + "type": "number" + }, + "customFields": { + "type": "object" + }, + "priceDetails": { + "type": "array", + "items": { + "type": "object", + "properties": { + "tier": { + "type": "number" + }, + "price": { + "type": "number" + }, + "listPrice": { + "type": "number" + }, + "description": { + "type": "string" + }, + "fromQuantity": { + "type": "number" + }, + "toQuantity": { + "type": "number" + }, + "priceBase": { + "type": "string" + }, + "lineDiscountPercent": { + "type": "number" + }, + "lineDiscountAmount": { + "type": "number" + } + } + } + }, + "recurringMonthlyAmount": { + "type": "number" + }, + "recurringMonthlyAmountBase": { + "type": "number" + }, + "features": { + "type": "array", + "items": { + "type": "object", + "properties": { + "code": { + "type": "string" + }, + "description": { + "type": "string" + } + } + } + }, + "orderDiscounts": { + "type": "array", + "items": { + "type": "object", + "properties": { + "orderDiscountId": { + "type": "string" + }, + "chargeId": { + "type": "string" + } + } + } + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + }, + "cmrr": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "acv": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "tcv": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "emrr": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "oneTimeFees": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "orderProductId": { + "type": "string" + }, + "orderId": { + "type": "string" + } + } + } + }, + "customFields": { + "type": "object" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + }, + "cmrr": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "acv": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "emrr": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "oneTimeFees": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "tcv": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + } + } + } + }, + "milestones": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "orderId": { + "type": "string" + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "milestoneDate": { + "type": "string" + }, + "plannedDate": { + "type": "string" + } + } + } + }, + "orderDiscounts": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "orderId": { + "type": "string" + }, + "startOn": { + "type": "string" + }, + "endOn": { + "type": "string" + }, + "startDate": { + "type": "string" + }, + "endDate": { + "type": "string" + }, + "percent": { + "type": "number" + }, + "discountType": { + "type": "string" + }, + "orderProductCharges": { + "type": "array", + "items": { + "type": "object", + "properties": { + "orderDiscountId": { + "type": "string" + }, + "chargeId": { + "type": "string" + } + } + } + }, + "onSpecificCharges": { + "type": "boolean" + } + } + } + }, + "currency": { + "type": "string" + }, + "externalERPId": { + "type": "string" + }, + "externalCRMId": { + "type": "string" + }, + "currencyCodeToUseWhenInvoice": { + "type": "string" + }, + "customFields": { + "type": "object" + }, + "cmrr": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "acv": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "emrr": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "oneTimeFees": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + }, + "tcv": { + "type": "object", + "properties": { + "amount": { + "type": "number" + }, + "currencyCode": { + "type": "string" + }, + "currencyConversionDate": { + "type": "string" + }, + "baseCurrencyAmount": { + "type": "number" + }, + "baseCurrencyCode": { + "type": "string" + } + } + } + } +} diff --git a/airbyte-integrations/connectors/source-younium/source_younium/source.py b/airbyte-integrations/connectors/source-younium/source_younium/source.py new file mode 100644 index 0000000000000..ac4c5be6a6327 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/source_younium/source.py @@ -0,0 +1,131 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from abc import ABC +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import requests +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream +from airbyte_cdk.sources.streams.http.auth import TokenAuthenticator + + +# Basic full refresh stream +class YouniumStream(HttpStream, ABC): + # url_base = "https://apisandbox.younium.com" + + # https://api.younium.com + def __init__(self, authenticator=TokenAuthenticator, playground: bool = False, *args, **kwargs): + super().__init__(authenticator=authenticator) + self.page_size = 100 + self.playground: bool = playground + + @property + def url_base(self) -> str: + if self.playground: + endpoint = "https://apisandbox.younium.com" + else: + endpoint = "https://api.younium.com" + return endpoint + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + response = response.json() + current_page = response.get("pageNumber", 1) + total_rows = response.get("totalCount", 0) + + total_pages = total_rows // self.page_size + + if current_page <= total_pages: + return {"pageNumber": current_page + 1} + else: + return None + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + if next_page_token: + return {"pageNumber": next_page_token["pageNumber"], "PageSize": self.page_size} + else: + return {"PageSize": self.page_size} + + def parse_response( + self, + response: requests.Response, + *, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> Iterable[Mapping]: + response_results = response.json() + yield from response_results.get("data", []) + + +class Invoice(YouniumStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "Invoices" + + +class Product(YouniumStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "Products" + + +class Subscription(YouniumStream): + primary_key = "id" + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return "Subscriptions" + + +class SourceYounium(AbstractSource): + def get_auth(self, config): + scope = "openid youniumapi profile" + + if config.get("playground"): + url = "https://younium-identity-server-sandbox.azurewebsites.net/connect/token" + else: + url = "https://younium-identity-server.azurewebsites.net/connect/token" + + payload = f"grant_type=password&client_id=apiclient&username={config['username']}&password={config['password']}&scope={scope}" + headers = {"Content-Type": "application/x-www-form-urlencoded"} + response = requests.request("POST", url, headers=headers, data=payload) + response.raise_for_status() + access_token = response.json()["access_token"] + + auth = TokenAuthenticator(token=access_token) + return auth + + def check_connection(self, logger, config) -> Tuple[bool, any]: + + try: + stream = Invoice(authenticator=self.get_auth(config), **config) + stream.next_page_token = lambda response: None + stream.page_size = 1 + # auth = self.get_auth(config) + _ = list(stream.read_records(sync_mode=SyncMode.full_refresh)) + return True, None + except Exception as e: + logger.error(e) + return False, repr(e) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + """ + + :param config: A Mapping of the user input configuration as defined in the connector spec. + """ + auth = self.get_auth(config) + return [Invoice(authenticator=auth, **config), Product(authenticator=auth, **config), Subscription(authenticator=auth, **config)] diff --git a/airbyte-integrations/connectors/source-younium/source_younium/spec.yaml b/airbyte-integrations/connectors/source-younium/source_younium/spec.yaml new file mode 100644 index 0000000000000..0cf38ff69a034 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/source_younium/spec.yaml @@ -0,0 +1,28 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/younium +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Younium Spec + type: object + required: + - username + - password + - legal_entity + properties: + username: + title: Username + type: string + description: Username for Younium account + password: + title: Password + type: string + description: Account password for younium account API key + airbyte_secret: true + legal_entity: + title: Legal Entity + type: string + description: Legal Entity that data should be pulled from + playground: + title: Playground environment + type: boolean + description: Property defining if connector is used against playground or production environment + default: false diff --git a/airbyte-integrations/connectors/source-younium/unit_tests/__init__.py b/airbyte-integrations/connectors/source-younium/unit_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/unit_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-younium/unit_tests/test_source.py b/airbyte-integrations/connectors/source-younium/unit_tests/test_source.py new file mode 100644 index 0000000000000..61f8dc242c896 --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/unit_tests/test_source.py @@ -0,0 +1,51 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import responses +from source_younium.source import SourceYounium + + +@responses.activate +def test_check_connection(mocker): + sandbox = False + + source = SourceYounium() + # mock the post request + + if sandbox: + mock_url1 = "https://younium-identity-server-sandbox.azurewebsites.net/connect/token" + mock_url2 = "https://apisandbox.younium.com/Invoices?PageSize=1" + else: + mock_url1 = "https://younium-identity-server.azurewebsites.net/connect/token" + mock_url2 = "https://api.younium.com/Invoices?PageSize=1" + # Mock the POST to get the access token + responses.add( + responses.POST, + mock_url1, + json={ + "access_token": "dummy_token", + }, + status=HTTPStatus.OK, + ) + + # Mock the GET to get the first page of the stream + responses.add(responses.GET, mock_url2, json={}, status=HTTPStatus.OK) + + logger_mock = MagicMock() + config_mock = {"playground": sandbox, "username": "dummy_username", "password": "dummy_password"} + + assert source.check_connection(logger_mock, config_mock) == (True, None) + + +def test_streams(mocker): + source = SourceYounium() + mocker.patch.object(source, "get_auth", return_value="dummy_token") + config_mock = {"playground": False, "username": "dummy_username", "password": "dummy_password"} + streams = source.streams(config_mock) + + expected_streams_number = 3 + assert len(streams) == expected_streams_number diff --git a/airbyte-integrations/connectors/source-younium/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-younium/unit_tests/test_streams.py new file mode 100644 index 0000000000000..39d4621a927bc --- /dev/null +++ b/airbyte-integrations/connectors/source-younium/unit_tests/test_streams.py @@ -0,0 +1,73 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from http import HTTPStatus +from unittest.mock import MagicMock + +import pytest +from source_younium.source import YouniumStream + + +@pytest.fixture +def patch_base_class(mocker): + # Mock abstract methods to enable instantiating abstract class + mocker.patch.object(YouniumStream, "path", "v0/example_endpoint") + mocker.patch.object(YouniumStream, "primary_key", "test_primary_key") + mocker.patch.object(YouniumStream, "__abstractmethods__", set()) + + +def test_request_params(patch_base_class): + stream = YouniumStream(authenticator=None) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None} + expected_params = {"PageSize": 100} + assert stream.request_params(**inputs) == expected_params + + +def test_request_params_with_next_page_token(patch_base_class): + stream = YouniumStream(authenticator=None) + inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"pageNumber": 2}} + expected_params = {"PageSize": 100, "pageNumber": 2} + assert stream.request_params(**inputs) == expected_params + + +def test_playground_url_base(patch_base_class): + stream = YouniumStream(authenticator=None, playground=True) + expected_url_base = "https://apisandbox.younium.com" + assert stream.url_base == expected_url_base + + +def test_use_playground_url_base(patch_base_class): + stream = YouniumStream(authenticator=None, playground=True) + expected_url_base = "https://apisandbox.younium.com" + assert stream.url_base == expected_url_base + + +def test_http_method(patch_base_class): + stream = YouniumStream(authenticator=None) + # TODO: replace this with your expected http request method + expected_method = "GET" + assert stream.http_method == expected_method + + +@pytest.mark.parametrize( + ("http_status", "should_retry"), + [ + (HTTPStatus.OK, False), + (HTTPStatus.BAD_REQUEST, False), + (HTTPStatus.TOO_MANY_REQUESTS, True), + (HTTPStatus.INTERNAL_SERVER_ERROR, True), + ], +) +def test_should_retry(patch_base_class, http_status, should_retry): + response_mock = MagicMock() + response_mock.status_code = http_status + stream = YouniumStream(authenticator=None) + assert stream.should_retry(response_mock) == should_retry + + +def test_backoff_time(patch_base_class): + response_mock = MagicMock() + stream = YouniumStream(authenticator=None) + expected_backoff_time = None + assert stream.backoff_time(response_mock) == expected_backoff_time diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/.dockerignore b/airbyte-integrations/connectors/source-youtube-analytics-business/.dockerignore new file mode 100644 index 0000000000000..17db0a5103005 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_youtube_analytics_business +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/Dockerfile b/airbyte-integrations/connectors/source-youtube-analytics-business/Dockerfile new file mode 100644 index 0000000000000..5e5778e35cfcd --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/Dockerfile @@ -0,0 +1,13 @@ +FROM airbyte/source-youtube-analytics:0.1.3 + +WORKDIR /airbyte/integration_code +COPY source_youtube_analytics_business ./source_youtube_analytics_business +COPY main.py ./ +COPY setup.py ./ +RUN pip install . + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-youtube-analytics-business diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/README.md b/airbyte-integrations/connectors/source-youtube-analytics-business/README.md new file mode 100644 index 0000000000000..9d7430d8a7896 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/README.md @@ -0,0 +1,132 @@ +# Youtube Analytics Business Source + +This is the repository for the Youtube Analytics Business source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/youtube-analytics-business). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.9.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +pip install '.[tests]' +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-youtube-analytics-business:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/youtube-analytics-business) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_youtube_analytics_business/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source youtube-analytics-business test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . --no-cache -t airbyte/source-youtube-analytics-business:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-youtube-analytics-business:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-youtube-analytics-business:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-youtube-analytics-business:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-youtube-analytics-business:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-youtube-analytics-business:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-youtube-analytics-business:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-youtube-analytics-business:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/acceptance-test-config.yml b/airbyte-integrations/connectors/source-youtube-analytics-business/acceptance-test-config.yml new file mode 100644 index 0000000000000..1d98f49a26ba4 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/acceptance-test-config.yml @@ -0,0 +1,13 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-youtube-analytics-business:dev +tests: + spec: + - spec_path: "source_youtube_analytics_business/spec.json" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-youtube-analytics-business/acceptance-test-docker.sh new file mode 100755 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/build.gradle b/airbyte-integrations/connectors/source-youtube-analytics-business/build.gradle new file mode 100644 index 0000000000000..8d2cce25a4f1b --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_youtube_analytics_business' +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/__init__.py b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/__init__.py new file mode 100644 index 0000000000000..46b7376756ec6 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2021 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..bb4ef1bc801b6 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/abnormal_state.json @@ -0,0 +1,56 @@ +{ + "channel_annotations_a1": { + "date": 99999999 + }, + "channel_basic_a2": { + "date": 99999999 + }, + "channel_cards_a1": { + "date": 99999999 + }, + "channel_combined_a2": { + "date": 99999999 + }, + "channel_demographics_a1": { + "date": 99999999 + }, + "channel_device_os_a2": { + "date": 99999999 + }, + "channel_end_screens_a1": { + "date": 99999999 + }, + "channel_playback_location_a2": { + "date": 99999999 + }, + "channel_province_a2": { + "date": 99999999 + }, + "channel_sharing_service_a1": { + "date": 99999999 + }, + "channel_subtitles_a2": { + "date": 99999999 + }, + "channel_traffic_source_a2": { + "date": 99999999 + }, + "playlist_basic_a1": { + "date": 99999999 + }, + "playlist_combined_a1": { + "date": 99999999 + }, + "playlist_device_os_a1": { + "date": 99999999 + }, + "playlist_playback_location_a1": { + "date": 99999999 + }, + "playlist_province_a1": { + "date": 99999999 + }, + "playlist_traffic_source_a1": { + "date": 99999999 + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/acceptance.py new file mode 100644 index 0000000000000..950b53b59d416 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/acceptance.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + yield diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..93caeb3b3e3e4 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/configured_catalog.json @@ -0,0 +1,166 @@ +{ + "streams": [ + { + "stream": { + "name": "channel_annotations_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_basic_a2", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_cards_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_combined_a2", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_demographics_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_device_os_a2", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_end_screens_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_playback_location_a2", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_province_a2", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_sharing_service_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_subtitles_a2", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "channel_traffic_source_a2", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "playlist_basic_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "playlist_combined_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "playlist_device_os_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "playlist_playback_location_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "playlist_province_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "playlist_traffic_source_a1", + "json_schema": {}, + "supported_sync_modes": ["full_refresh", "incremental"] + }, + "sync_mode": "incremental", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..e0e0fbb602cd3 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/integration_tests/invalid_config.json @@ -0,0 +1,7 @@ +{ + "credentials": { + "client_id": "client_id", + "client_secret": "client_secret", + "refresh_token": "refresh_token" + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/main.py b/airbyte-integrations/connectors/source-youtube-analytics-business/main.py new file mode 100644 index 0000000000000..87e33ed3052ce --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_youtube_analytics_business import SourceYoutubeAnalyticsBusiness + +if __name__ == "__main__": + source = SourceYoutubeAnalyticsBusiness() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/requirements.txt b/airbyte-integrations/connectors/source-youtube-analytics-business/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/setup.py b/airbyte-integrations/connectors/source-youtube-analytics-business/setup.py new file mode 100644 index 0000000000000..db5cdbcd36f68 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/setup.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "requests-mock==1.9.3", + "source-acceptance-test", +] + +setup( + name="source_youtube_analytics_business", + description="Source implementation for Youtube Analytics Business.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/__init__.py b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/__init__.py new file mode 100644 index 0000000000000..7cf712520a69b --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceYoutubeAnalyticsBusiness + +__all__ = ["SourceYoutubeAnalyticsBusiness"] diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/defaults/channel_reports.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/defaults/channel_reports.json new file mode 100644 index 0000000000000..23a8c483d6f96 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/defaults/channel_reports.json @@ -0,0 +1,274 @@ +[ + { + "id": "channel_annotations_a1", + "name": "Annotations", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "annotation_type", + "annotation_id" + ], + "metrics": [] + }, + { + "id": "channel_basic_a2", + "name": "User activity", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code" + ], + "metrics": [] + }, + { + "id": "channel_cards_a1", + "name": "Cards", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "card_type", + "card_id" + ], + "metrics": [] + }, + { + "id": "channel_combined_a2", + "name": "Combined", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "playback_location_type", + "traffic_source_type", + "device_type", + "operating_system" + ], + "metrics": [] + }, + { + "id": "channel_demographics_a1", + "name": "Demographics", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "age_group", + "gender" + ], + "metrics": [] + }, + { + "id": "channel_device_os_a2", + "name": "Device and OS", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "device_type", + "operating_system" + ], + "metrics": [] + }, + { + "id": "channel_end_screens_a1", + "name": "End screens", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "end_screen_element_type", + "end_screen_element_id" + ], + "metrics": [] + }, + { + "id": "channel_playback_location_a2", + "name": "Playback locations", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "playback_location_type", + "playback_location_detail" + ], + "metrics": [] + }, + { + "id": "channel_province_a2", + "name": "Province", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "province_code" + ], + "metrics": [] + }, + { + "id": "channel_sharing_service_a1", + "name": "Sharing service", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "sharing_service" + ], + "metrics": [] + }, + { + "id": "channel_subtitles_a2", + "name": "Subtitles", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "subtitle_language" + ], + "metrics": [] + }, + { + "id": "channel_traffic_source_a2", + "name": "Traffic sources", + "dimensions": [ + "date", + "channel_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "traffic_source_type", + "traffic_source_detail" + ], + "metrics": [] + }, + { + "id": "playlist_basic_a1", + "name": "Playlist user activity", + "dimensions": [ + "date", + "channel_id", + "playlist_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code" + ], + "metrics": [] + }, + { + "id": "playlist_combined_a1", + "name": "Playlist combined", + "dimensions": [ + "date", + "channel_id", + "playlist_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "playback_location_type", + "traffic_source_type", + "device_type", + "operating_system" + ], + "metrics": [] + }, + { + "id": "playlist_device_os_a1", + "name": "Playlist device and OS", + "dimensions": [ + "date", + "channel_id", + "playlist_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "device_type", + "operating_system" + ], + "metrics": [] + }, + { + "id": "playlist_playback_location_a1", + "name": "Playlist playback locations", + "dimensions": [ + "date", + "channel_id", + "playlist_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "playback_location_type", + "playback_location_detail" + ], + "metrics": [] + }, + { + "id": "playlist_province_a1", + "name": "Playlist province", + "dimensions": [ + "date", + "channel_id", + "playlist_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "province_code" + ], + "metrics": [] + }, + { + "id": "playlist_traffic_source_a1", + "name": "Playlist traffic sources", + "dimensions": [ + "date", + "channel_id", + "playlist_id", + "video_id", + "live_or_on_demand", + "subscribed_status", + "country_code", + "traffic_source_type", + "traffic_source_detail" + ], + "metrics": [] + } +] diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_annotations_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_annotations_a1.json new file mode 100644 index 0000000000000..7d44a378504c0 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_annotations_a1.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "annotation_type": { + "type": ["null", "string"] + }, + "annotation_id": { + "type": ["null", "string"] + }, + "annotation_impressions": { + "type": ["null", "string"] + }, + "annotation_clickable_impressions": { + "type": ["null", "string"] + }, + "annotation_clicks": { + "type": ["null", "string"] + }, + "annotation_click_through_rate": { + "type": ["null", "string"] + }, + "annotation_closable_impressions": { + "type": ["null", "string"] + }, + "annotation_closes": { + "type": ["null", "string"] + }, + "annotation_close_rate": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_basic_a2.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_basic_a2.json new file mode 100644 index 0000000000000..98e09cf869b3d --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_basic_a2.json @@ -0,0 +1,105 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "integer"] + }, + "comments": { + "type": ["null", "integer"] + }, + "likes": { + "type": ["null", "integer"] + }, + "dislikes": { + "type": ["null", "integer"] + }, + "shares": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "average_view_duration_percentage": { + "type": ["null", "number"] + }, + "annotation_impressions": { + "type": ["null", "integer"] + }, + "annotation_clickable_impressions": { + "type": ["null", "integer"] + }, + "annotation_clicks": { + "type": ["null", "integer"] + }, + "annotation_click_through_rate": { + "type": ["null", "number"] + }, + "annotation_closable_impressions": { + "type": ["null", "integer"] + }, + "annotation_closes": { + "type": ["null", "integer"] + }, + "annotation_close_rate": { + "type": ["null", "number"] + }, + "card_teaser_impressions": { + "type": ["null", "integer"] + }, + "card_teaser_clicks": { + "type": ["null", "integer"] + }, + "card_teaser_click_rate": { + "type": ["null", "number"] + }, + "card_impressions": { + "type": ["null", "integer"] + }, + "card_clicks": { + "type": ["null", "integer"] + }, + "card_click_rate": { + "type": ["null", "number"] + }, + "subscribers_gained": { + "type": ["null", "integer"] + }, + "subscribers_lost": { + "type": ["null", "integer"] + }, + "videos_added_to_playlists": { + "type": ["null", "integer"] + }, + "videos_removed_from_playlists": { + "type": ["null", "integer"] + }, + "red_views": { + "type": ["null", "integer"] + }, + "red_watch_time_minutes": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_cards_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_cards_a1.json new file mode 100644 index 0000000000000..25a8fae12135a --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_cards_a1.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "card_type": { + "type": ["null", "string"] + }, + "card_id": { + "type": ["null", "string"] + }, + "card_teaser_impressions": { + "type": ["null", "string"] + }, + "card_teaser_clicks": { + "type": ["null", "string"] + }, + "card_teaser_click_rate": { + "type": ["null", "string"] + }, + "card_impressions": { + "type": ["null", "string"] + }, + "card_clicks": { + "type": ["null", "string"] + }, + "card_click_rate": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_combined_a2.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_combined_a2.json new file mode 100644 index 0000000000000..08b81fee2eb1b --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_combined_a2.json @@ -0,0 +1,54 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "playback_location_type": { + "type": ["null", "integer"] + }, + "traffic_source_type": { + "type": ["null", "integer"] + }, + "device_type": { + "type": ["null", "integer"] + }, + "operating_system": { + "type": ["null", "integer"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "average_view_duration_percentage": { + "type": ["null", "number"] + }, + "red_views": { + "type": ["null", "integer"] + }, + "red_watch_time_minutes": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_demographics_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_demographics_a1.json new file mode 100644 index 0000000000000..78ed6aacc97d1 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_demographics_a1.json @@ -0,0 +1,33 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "age_group": { + "type": ["null", "string"] + }, + "gender": { + "type": ["null", "string"] + }, + "views_percentage": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_device_os_a2.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_device_os_a2.json new file mode 100644 index 0000000000000..c589e6ade85e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_device_os_a2.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "device_type": { + "type": ["null", "integer"] + }, + "operating_system": { + "type": ["null", "integer"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "average_view_duration_percentage": { + "type": ["null", "number"] + }, + "red_views": { + "type": ["null", "integer"] + }, + "red_watch_time_minutes": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_end_screens_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_end_screens_a1.json new file mode 100644 index 0000000000000..398b81005f872 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_end_screens_a1.json @@ -0,0 +1,39 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "end_screen_element_type": { + "type": ["null", "string"] + }, + "end_screen_element_id": { + "type": ["null", "string"] + }, + "end_screen_element_clicks": { + "type": ["null", "string"] + }, + "end_screen_element_impressions": { + "type": ["null", "string"] + }, + "end_screen_element_click_rate": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_playback_location_a2.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_playback_location_a2.json new file mode 100644 index 0000000000000..f3ec4a981964b --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_playback_location_a2.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "playback_location_type": { + "type": ["null", "integer"] + }, + "playback_location_detail": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "average_view_duration_percentage": { + "type": ["null", "number"] + }, + "red_views": { + "type": ["null", "integer"] + }, + "red_watch_time_minutes": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_province_a2.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_province_a2.json new file mode 100644 index 0000000000000..31b1164852043 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_province_a2.json @@ -0,0 +1,84 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "province_code": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "average_view_duration_percentage": { + "type": ["null", "number"] + }, + "annotation_click_through_rate": { + "type": ["null", "integer"] + }, + "annotation_close_rate": { + "type": ["null", "integer"] + }, + "annotation_impressions": { + "type": ["null", "integer"] + }, + "annotation_clickable_impressions": { + "type": ["null", "integer"] + }, + "annotation_closable_impressions": { + "type": ["null", "integer"] + }, + "annotation_clicks": { + "type": ["null", "integer"] + }, + "annotation_closes": { + "type": ["null", "integer"] + }, + "card_click_rate": { + "type": ["null", "integer"] + }, + "card_teaser_click_rate": { + "type": ["null", "integer"] + }, + "card_impressions": { + "type": ["null", "integer"] + }, + "card_teaser_impressions": { + "type": ["null", "integer"] + }, + "card_clicks": { + "type": ["null", "integer"] + }, + "card_teaser_clicks": { + "type": ["null", "integer"] + }, + "red_views": { + "type": ["null", "integer"] + }, + "red_watch_time_minutes": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_sharing_service_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_sharing_service_a1.json new file mode 100644 index 0000000000000..70b9bd150b7fd --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_sharing_service_a1.json @@ -0,0 +1,30 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "sharing_service": { + "type": ["null", "string"] + }, + "shares": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_subtitles_a2.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_subtitles_a2.json new file mode 100644 index 0000000000000..d189c49b2e8c6 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_subtitles_a2.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "subtitle_language": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "average_view_duration_percentage": { + "type": ["null", "number"] + }, + "red_views": { + "type": ["null", "integer"] + }, + "red_watch_time_minutes": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_traffic_source_a2.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_traffic_source_a2.json new file mode 100644 index 0000000000000..077e0585c5302 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/channel_traffic_source_a2.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "traffic_source_type": { + "type": ["null", "integer"] + }, + "traffic_source_detail": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "average_view_duration_percentage": { + "type": ["null", "number"] + }, + "red_views": { + "type": ["null", "integer"] + }, + "red_watch_time_minutes": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_basic_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_basic_a1.json new file mode 100644 index 0000000000000..d2fc5834aaceb --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_basic_a1.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "playlist_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "playlist_starts": { + "type": ["null", "integer"] + }, + "playlist_saves_added": { + "type": ["null", "integer"] + }, + "playlist_saves_removed": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_combined_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_combined_a1.json new file mode 100644 index 0000000000000..cdd3292cd9812 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_combined_a1.json @@ -0,0 +1,57 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "playlist_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "playback_location_type": { + "type": ["null", "integer"] + }, + "traffic_source_type": { + "type": ["null", "integer"] + }, + "device_type": { + "type": ["null", "integer"] + }, + "operating_system": { + "type": ["null", "integer"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "playlist_starts": { + "type": ["null", "integer"] + }, + "playlist_saves_added": { + "type": ["null", "integer"] + }, + "playlist_saves_removed": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_device_os_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_device_os_a1.json new file mode 100644 index 0000000000000..383957aa165de --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_device_os_a1.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "playlist_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "device_type": { + "type": ["null", "integer"] + }, + "operating_system": { + "type": ["null", "integer"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "playlist_starts": { + "type": ["null", "integer"] + }, + "playlist_saves_added": { + "type": ["null", "integer"] + }, + "playlist_saves_removed": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_playback_location_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_playback_location_a1.json new file mode 100644 index 0000000000000..5ce825fab94b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_playback_location_a1.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "playlist_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "playback_location_type": { + "type": ["null", "integer"] + }, + "playback_location_detail": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "playlist_starts": { + "type": ["null", "integer"] + }, + "playlist_saves_added": { + "type": ["null", "integer"] + }, + "playlist_saves_removed": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_province_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_province_a1.json new file mode 100644 index 0000000000000..2d77f0d13fd12 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_province_a1.json @@ -0,0 +1,48 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "playlist_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "province_code": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "string"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "playlist_starts": { + "type": ["null", "string"] + }, + "playlist_saves_added": { + "type": ["null", "string"] + }, + "playlist_saves_removed": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_traffic_source_a1.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_traffic_source_a1.json new file mode 100644 index 0000000000000..3c6a1ab476ed7 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/schemas/playlist_traffic_source_a1.json @@ -0,0 +1,51 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "date": { + "type": "integer" + }, + "channel_id": { + "type": ["null", "string"] + }, + "playlist_id": { + "type": ["null", "string"] + }, + "video_id": { + "type": ["null", "string"] + }, + "live_or_on_demand": { + "type": ["null", "string"] + }, + "subscribed_status": { + "type": ["null", "string"] + }, + "country_code": { + "type": ["null", "string"] + }, + "traffic_source_type": { + "type": ["null", "integer"] + }, + "traffic_source_detail": { + "type": ["null", "string"] + }, + "views": { + "type": ["null", "integer"] + }, + "watch_time_minutes": { + "type": ["null", "number"] + }, + "average_view_duration_seconds": { + "type": ["null", "number"] + }, + "playlist_starts": { + "type": ["null", "integer"] + }, + "playlist_saves_added": { + "type": ["null", "integer"] + }, + "playlist_saves_removed": { + "type": ["null", "integer"] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/source.py b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/source.py new file mode 100644 index 0000000000000..4b7d5b6276fa0 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/source.py @@ -0,0 +1,308 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import csv +import datetime +import io +import json +import pkgutil +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import pendulum +import requests +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.streams.http import HttpStream, HttpSubStream +from airbyte_cdk.sources.streams.http.requests_native_auth import Oauth2Authenticator +from airbyte_cdk.sources.utils.transform import TransformConfig, TypeTransformer + + +class CustomBackoffMixin: + def daily_quota_exceeded(self, response: requests.Response) -> bool: + """Response example: + { + "error": { + "code": 429, + "message": "Quota exceeded for quota metric 'Free requests' and limit 'Free requests per minute' of service 'youtubereporting.googleapis.com' for consumer 'project_number:863188056127'.", + "status": "RESOURCE_EXHAUSTED", + "details": [ + { + "reason": "RATE_LIMIT_EXCEEDED", + "metadata": { + "consumer": "projects/863188056127", + "quota_limit": "FreeQuotaRequestsPerMinutePerProject", + "quota_limit_value": "60", + "quota_metric": "youtubereporting.googleapis.com/free_quota_requests", + "service": "youtubereporting.googleapis.com", + } + }, + ] + } + } + + :param response: + :return: + """ + details = response.json().get("error", {}).get("details", []) + for detail in details: + if detail.get("reason") == "RATE_LIMIT_EXCEEDED": + if detail.get("metadata", {}).get("quota_limit") == "FreeQuotaRequestsPerDayPerProject": + self.logger.error(f"Exceeded daily quota: {detail.get('metadata', {}).get('quota_limit_value')} reqs/day") + return True + break + return False + + def should_retry(self, response: requests.Response) -> bool: + """ + Override to set different conditions for backoff based on the response from the server. + + By default, back off on the following HTTP response statuses: + - 500s to handle transient server errors + - 429 (Too Many Requests) indicating rate limiting: + Different behavior in case of 'RATE_LIMIT_EXCEEDED': + + Requests Per Minute: + "message": "Quota exceeded for quota metric 'Free requests' and limit 'Free requests per minute' of service 'youtubereporting.googleapis.com' for consumer 'project_number:863188056127'." + "quota_limit": "FreeQuotaRequestsPerMinutePerProject", + "quota_limit_value": "60", + + --> use increased retry_factor (30 seconds) + + Requests Per Day: + "message": "Quota exceeded for quota metric 'Free requests' and limit 'Free requests per day' of service 'youtubereporting.googleapis.com' for consumer 'project_number:863188056127" + "quota_limit": "FreeQuotaRequestsPerDayPerProject + "quota_limit_value": "20000", + + --> just throw an error, next scan is reasonable to start only in 1 day. + """ + if 500 <= response.status_code < 600: + return True + + if response.status_code == 429 and not self.daily_quota_exceeded(response): + return True + + return False + + @property + def retry_factor(self) -> float: + """ + Default FreeQuotaRequestsPerMinutePerProject is 60 reqs/min, so reasonable delay is 30 seconds + """ + return 30 + + +class JobsResource(CustomBackoffMixin, HttpStream): + """ + https://developers.google.com/youtube/reporting/v1/reference/rest/v1/jobs + + All YouTube Analytics streams require a created reporting job. + This class allows to `list` all existing reporting jobs or `create` new reporting job for a specific stream. One stream can have only one reporting job. + By creating a reporting job, you are instructing YouTube to generate stream data on a daily basis. If reporting job is removed YouTube removes all stream data. + + On every connector invocation, it gets a list of all running reporting jobs, if the currently processed stream has a reporting job - connector does nothing, + but if the currently processed stream does not have a job connector immediately creates one. This connector does not store IDs of reporting jobs. + If the reporting job was created by the user separately, this connector just uses that job. This connector does not remove reporting jobs it can only create them. + + After reporting job is created, the first data can be available only after up to 48 hours. + """ + + name = None + primary_key = None + http_method = None + raise_on_http_errors = True + url_base = "https://youtubereporting.googleapis.com/v1/" + JOB_NAME = "Airbyte reporting job" + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def should_retry(self, response: requests.Response) -> bool: + # if the connected Google account is not bounded with target Youtube account, + # we receive `401: UNAUTHENTICATED` + if response.status_code == 401: + setattr(self, "raise_on_http_errors", False) + return False + else: + return super().should_retry(response) + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + return [response.json()] + + def path(self, **kwargs) -> str: + return "jobs" + + def request_body_json(self, **kwargs) -> Optional[Mapping]: + if self.name: + return {"name": self.JOB_NAME, "reportTypeId": self.name} + + def list(self): + "https://developers.google.com/youtube/reporting/v1/reference/rest/v1/jobs/list" + self.name = None + self.http_method = "GET" + results = list(self.read_records(sync_mode=None)) + result = results[0] + return result.get("jobs", {}) + + def create(self, name): + "https://developers.google.com/youtube/reporting/v1/reference/rest/v1/jobs/create" + self.name = name + self.http_method = "POST" + results = list(self.read_records(sync_mode=None)) + result = results[0] + return result["id"] + + +class ReportResources(CustomBackoffMixin, HttpStream): + "https://developers.google.com/youtube/reporting/v1/reference/rest/v1/jobs.reports/list" + + name = None + primary_key = "id" + url_base = "https://youtubereporting.googleapis.com/v1/" + + def __init__(self, name: str, jobs_resource: JobsResource, job_id: str, start_time: str = None, **kwargs): + self.name = name + self.jobs_resource = jobs_resource + self.job_id = job_id + self.start_time = start_time + super().__init__(**kwargs) + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + if not self.job_id: + self.job_id = self.jobs_resource.create(self.name) + self.logger.info(f"YouTube reporting job is created: '{self.job_id}'") + return "jobs/{}/reports".format(self.job_id) + + def request_params( + self, + stream_state: Mapping[str, Any], + stream_slice: Mapping[str, Any] = None, + next_page_token: Mapping[str, Any] = None, + ) -> MutableMapping[str, Any]: + return {"startTimeAtOrAfter": self.start_time} if self.start_time else {} + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + response_json = response.json() + reports = [] + for report in response_json.get("reports", []): + report = {**report} + report["startTime"] = datetime.datetime.strptime(report["startTime"], "%Y-%m-%dT%H:%M:%S%z") + reports.append(report) + reports.sort(key=lambda x: x["startTime"]) + date = kwargs["stream_state"].get("date") + if date: + reports = [r for r in reports if int(r["startTime"].date().strftime("%Y%m%d")) > date] + if not reports: + reports.append(None) + return reports + + +class ChannelReports(CustomBackoffMixin, HttpSubStream): + "https://developers.google.com/youtube/reporting/v1/reports/channel_reports" + + name = None + primary_key = None + cursor_field = "date" + url_base = "" + transformer = TypeTransformer(TransformConfig.DefaultSchemaNormalization) + + def __init__(self, name: str, dimensions: List[str], **kwargs): + self.name = name + self.primary_key = dimensions + super().__init__(**kwargs) + + def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: + return None + + def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: + fp = io.StringIO(response.text) + reader = csv.DictReader(fp) + for record in reader: + yield record + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + if not current_stream_state: + return {self.cursor_field: latest_record[self.cursor_field]} + return {self.cursor_field: max(current_stream_state[self.cursor_field], latest_record[self.cursor_field])} + + def path( + self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None + ) -> str: + return stream_slice["parent"]["downloadUrl"] + + def read_records(self, *, stream_slice: Mapping[str, Any] = None, **kwargs) -> Iterable[Mapping[str, Any]]: + parent = stream_slice.get("parent") + if parent: + yield from super().read_records(stream_slice=stream_slice, **kwargs) + else: + self.logger.info("no data from parent stream") + yield from [] + + +class SourceYoutubeAnalyticsBusiness(AbstractSource): + @staticmethod + def get_authenticator(config): + credentials = config["credentials"] + client_id = credentials["client_id"] + client_secret = credentials["client_secret"] + refresh_token = credentials["refresh_token"] + + return Oauth2Authenticator( + token_refresh_endpoint="https://oauth2.googleapis.com/token", + client_id=client_id, + client_secret=client_secret, + refresh_token=refresh_token, + ) + + def check_connection(self, logger, config) -> Tuple[bool, any]: + authenticator = self.get_authenticator(config) + jobs_resource = JobsResource(authenticator=authenticator) + jobs = jobs_resource.list() + + if not jobs: + # try to create job report if such has not been created yet + report_streams = ReportResources(name="channel_basic_a2", jobs_resource=jobs_resource, job_id=None, authenticator=authenticator) + jobs = list(report_streams.read_records(sync_mode=None)) + + if jobs: + return True, None + else: + return ( + False, + "The Youtube account is not valid. Please make sure you're trying to use the active Youtube Account connected to your Google Account.", + ) + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + authenticator = self.get_authenticator(config) + jobs_resource = JobsResource(authenticator=authenticator) + jobs = jobs_resource.list() + report_to_job_id = {j["reportTypeId"]: j["id"] for j in jobs} + + # By default, API returns reports for last 60 days. Report for each day requires a separate request. + # Full scan of all 18 streams requires ~ 1100 requests (18+18*60), so we can hit 'default' API quota limits: + # - 60 reqs per minute + # - 20000 reqs per day + # For SAT: scan only last N days ('testing_period' option) in order to decrease a number of requests and avoid API limits + start_time = None + testing_period = config.get("testing_period") + if testing_period: + start_time = pendulum.today().add(days=-int(testing_period)).to_rfc3339_string() + + channel_reports = json.loads(pkgutil.get_data("source_youtube_analytics", "defaults/channel_reports.json")) + + streams = [] + for channel_report in channel_reports: + stream_name = channel_report["id"] + dimensions = channel_report["dimensions"] + job_id = report_to_job_id.get(stream_name) + parent = ReportResources( + name=stream_name, jobs_resource=jobs_resource, job_id=job_id, start_time=start_time, authenticator=authenticator + ) + streams.append(ChannelReports(name=stream_name, dimensions=dimensions, parent=parent, authenticator=authenticator)) + return streams diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/spec.json b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/spec.json new file mode 100644 index 0000000000000..52461e7891533 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/source_youtube_analytics_business/spec.json @@ -0,0 +1,46 @@ +{ + "documentationUrl": "https://docs.airbyte.com/integrations/sources/youtube-analytics-business", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "YouTube Analytics Business Spec", + "type": "object", + "required": ["credentials"], + "additionalProperties": true, + "properties": { + "credentials": { + "title": "Authenticate via OAuth 2.0", + "type": "object", + "required": ["client_id", "client_secret", "refresh_token"], + "additionalProperties": true, + "properties": { + "client_id": { + "title": "Client ID", + "type": "string", + "description": "The Client ID of your developer application", + "airbyte_secret": true + }, + "client_secret": { + "title": "Client Secret", + "type": "string", + "description": "The client secret of your developer application", + "airbyte_secret": true + }, + "refresh_token": { + "title": "Refresh Token", + "type": "string", + "description": "A refresh token generated using the above client ID and secret", + "airbyte_secret": true + } + } + } + } + }, + "authSpecification": { + "auth_type": "oauth2.0", + "oauth2Specification": { + "rootObject": ["credentials"], + "oauthFlowInitParameters": [["client_id"], ["client_secret"]], + "oauthFlowOutputParameters": [["refresh_token"]] + } + } +} diff --git a/airbyte-integrations/connectors/source-youtube-analytics-business/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-youtube-analytics-business/unit_tests/unit_test.py new file mode 100644 index 0000000000000..d770b3dd27eb7 --- /dev/null +++ b/airbyte-integrations/connectors/source-youtube-analytics-business/unit_tests/unit_test.py @@ -0,0 +1,7 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +def test_test(): + assert True diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/.dockerignore b/airbyte-integrations/connectors/source-zapier-supported-storage/.dockerignore new file mode 100644 index 0000000000000..bdc101c5044b7 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/.dockerignore @@ -0,0 +1,6 @@ +* +!Dockerfile +!main.py +!source_zapier_supported_storage +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/Dockerfile b/airbyte-integrations/connectors/source-zapier-supported-storage/Dockerfile new file mode 100644 index 0000000000000..15e99274e53bf --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.9.11-alpine3.15 as base + +# build and load all requirements +FROM base as builder +WORKDIR /airbyte/integration_code + +# upgrade pip to the latest version +RUN apk --no-cache upgrade \ + && pip install --upgrade pip \ + && apk --no-cache add tzdata build-base + + +COPY setup.py ./ +# install necessary packages to a temporary folder +RUN pip install --prefix=/install . + +# build a clean environment +FROM base +WORKDIR /airbyte/integration_code + +# copy all loaded and built libraries to a pure basic image +COPY --from=builder /install /usr/local +# add default timezone settings +COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime +RUN echo "Etc/UTC" > /etc/timezone + +# bash is installed for more convenient debugging. +RUN apk --no-cache add bash + +# copy payload code only +COPY main.py ./ +COPY source_zapier_supported_storage ./source_zapier_supported_storage + +ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py" +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-zapier-supported-storage diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/README.md b/airbyte-integrations/connectors/source-zapier-supported-storage/README.md new file mode 100644 index 0000000000000..bc47eb39a78b7 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/README.md @@ -0,0 +1,79 @@ +# Zapier Supported Storage Source + +This is the repository for the Zapier Supported Storage configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/zapier-supported-storage). + +## Local development + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-zapier-supported-storage:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/zapier-supported-storage) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_zapier_supported_storage/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source zapier-supported-storage test creds` +and place them into `secrets/config.json`. + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-zapier-supported-storage:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-zapier-supported-storage:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-zapier-supported-storage:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zapier-supported-storage:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zapier-supported-storage:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-zapier-supported-storage:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-zapier-supported-storage:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-zapier-supported-storage:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/__init__.py b/airbyte-integrations/connectors/source-zapier-supported-storage/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-config.yml new file mode 100644 index 0000000000000..166465242842b --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-config.yml @@ -0,0 +1,30 @@ +# See [Source Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/source-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-zapier-supported-storage:dev +tests: + spec: + - spec_path: "source_zapier_supported_storage/spec.yaml" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.txt" + # extra_fields: no + # exact_order: no + # extra_records: yes +# incremental: # TODO if your connector does not implement incremental sync, remove this block +# - config_path: "secrets/config.json" +# configured_catalog_path: "integration_tests/configured_catalog.json" +# future_state_path: "integration_tests/abnormal_state.json" + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-docker.sh new file mode 100644 index 0000000000000..c51577d10690c --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/acceptance-test-docker.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env sh + +# Build latest connector image +docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2-) + +# Pull latest acctest image +docker pull airbyte/source-acceptance-test:latest + +# Run +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input + diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/build.gradle b/airbyte-integrations/connectors/source-zapier-supported-storage/build.gradle new file mode 100644 index 0000000000000..bfe2f88880a87 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_zapier_supported_storage' +} diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/__init__.py b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/__init__.py new file mode 100644 index 0000000000000..1100c1c58cf51 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/abnormal_state.json new file mode 100644 index 0000000000000..813bb5ad104d4 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "zapier_supported_storage_stream": { + "some_key_other_than_data": "any_value_being_returned(this should not work)" + } +} diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/acceptance.py new file mode 100644 index 0000000000000..1302b2f57e10e --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/configured_catalog.json new file mode 100644 index 0000000000000..bca6d3d2e6e1f --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/configured_catalog.json @@ -0,0 +1,13 @@ +{ + "streams": [ + { + "stream": { + "name": "zapier_supported_storage", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "overwrite" + } + ] +} diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/invalid_config.json new file mode 100644 index 0000000000000..85f935431b74d --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/invalid_config.json @@ -0,0 +1,3 @@ +{ + "secret": "invalid_key" +} diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/sample_config.json new file mode 100644 index 0000000000000..07b7659cf5f74 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/sample_config.json @@ -0,0 +1,3 @@ +{ + "secret": "" +} diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/sample_state.json new file mode 100644 index 0000000000000..4d99a3885d301 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "zapier_supported_storage_stream": { + "data": "any_value" + } +} diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/main.py b/airbyte-integrations/connectors/source-zapier-supported-storage/main.py new file mode 100644 index 0000000000000..fa4bf55989d24 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/main.py @@ -0,0 +1,13 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_zapier_supported_storage import SourceZapierSupportedStorage + +if __name__ == "__main__": + source = SourceZapierSupportedStorage() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/requirements.txt b/airbyte-integrations/connectors/source-zapier-supported-storage/requirements.txt new file mode 100644 index 0000000000000..0411042aa0911 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/setup.py b/airbyte-integrations/connectors/source-zapier-supported-storage/setup.py new file mode 100644 index 0000000000000..af33d9906216a --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/setup.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = [ + "airbyte-cdk~=0.1", +] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "pytest-mock~=3.6.1", + "source-acceptance-test", +] + +setup( + name="source_zapier_supported_storage", + description="Source implementation for Zapier Supported Storage.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "*.yaml", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/__init__.py b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/__init__.py new file mode 100644 index 0000000000000..fedb5cd2ecca5 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceZapierSupportedStorage + +__all__ = ["SourceZapierSupportedStorage"] diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/schemas/zapier_supported_storage.json b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/schemas/zapier_supported_storage.json new file mode 100644 index 0000000000000..83b5c3a88b3b4 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/schemas/zapier_supported_storage.json @@ -0,0 +1,5 @@ +{ + "type": "object", + "additionalProperties": true, + "properties": {} +} diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/source.py b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/source.py new file mode 100644 index 0000000000000..4a2e44426b81b --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2022 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceZapierSupportedStorage(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "zapier_supported_storage.yaml"}) diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/spec.yaml b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/spec.yaml new file mode 100644 index 0000000000000..7b585349d33fa --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/spec.yaml @@ -0,0 +1,14 @@ +documentationUrl: https://docs.airbyte.com/integrations/sources/zapier-supported-storage +connectionSpecification: + $schema: http://json-schema.org/draft-07/schema# + title: Zapier Supported Storage Spec + type: object + required: + - secret + additionalProperties: true + properties: + secret: + title: Secret Key + type: string + description: Secret key supplied by zapier + airbyte_secret: true diff --git a/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/zapier_supported_storage.yaml b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/zapier_supported_storage.yaml new file mode 100644 index 0000000000000..97c6bf1cc0bb8 --- /dev/null +++ b/airbyte-integrations/connectors/source-zapier-supported-storage/source_zapier_supported_storage/zapier_supported_storage.yaml @@ -0,0 +1,34 @@ +version: "0.1.0" + +definitions: + selector: + extractor: + field_pointer: [] + requester: + url_base: "https://store.zapier.com/api" + http_method: "GET" + request_options_provider: + request_parameters: + secret: "{{ config['secret'] }}" + retriever: + record_selector: + $ref: "*ref(definitions.selector)" + paginator: + type: NoPagination + requester: + $ref: "*ref(definitions.requester)" + base_stream: + retriever: + $ref: "*ref(definitions.retriever)" + zapier_supported_storage_stream: + $ref: "*ref(definitions.base_stream)" + $options: + name: "zapier_supported_storage" + path: "/records" + +streams: + - "*ref(definitions.zapier_supported_storage_stream)" + +check: + stream_names: + - "zapier_supported_storage" diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java index ae0cd6bd2c5b3..41abede3e79ba 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java @@ -36,6 +36,11 @@ private ApmTraceConstants() {} */ public static final class Tags { + /** + * Name of the APM trace tag that holds the attempt number value associated with the trace. + */ + public static final String ATTEMPT_NUMBER_KEY = "attempt_number"; + /** * Name of the APM trace tag that holds the destination Docker image value associated with the * trace. @@ -58,6 +63,11 @@ public static final class Tags { */ public static final String DOCKER_IMAGE_KEY = "docker_image"; + /** + * Name of the APM trace tag that holds the failure origin(s) associated with the trace. + */ + public static final String FAILURE_ORIGINS_KEY = "failure_origins"; + /** * Name of the APM trace tag that holds the job ID value associated with the trace. */ diff --git a/airbyte-metrics/reporter/Dockerfile b/airbyte-metrics/reporter/Dockerfile index 2448f3f348bc8..546bbfedc3976 100644 --- a/airbyte-metrics/reporter/Dockerfile +++ b/airbyte-metrics/reporter/Dockerfile @@ -1,7 +1,7 @@ ARG JDK_IMAGE=airbyte/airbyte-base-java-image:1.0 FROM ${JDK_IMAGE} AS metrics-reporter -ARG VERSION=0.40.18 +ARG VERSION=0.40.19 ENV APPLICATION airbyte-metrics-reporter ENV VERSION ${VERSION} diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Emitter.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Emitter.java index 26fae9f07e447..2d458af1a4fec 100644 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Emitter.java +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/Emitter.java @@ -82,7 +82,7 @@ final class OldestPendingJob extends Emitter { db.oldestPendingJobAgeSecsByGeography().forEach((geographyType, count) -> client.gauge( OssMetricsRegistry.OLDEST_PENDING_JOB_AGE_SECS, count, - new MetricAttribute(MetricTags.GEOGRAPHY, geographyType.getLiteral()))); + new MetricAttribute(MetricTags.GEOGRAPHY, geographyType))); return null; }); } diff --git a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/MetricRepository.java b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/MetricRepository.java index d365cf756a41d..319281d022643 100644 --- a/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/MetricRepository.java +++ b/airbyte-metrics/reporter/src/main/java/io/airbyte/metrics/reporter/MetricRepository.java @@ -9,9 +9,9 @@ import static io.airbyte.db.instance.jobs.jooq.generated.Tables.JOBS; import static org.jooq.impl.DSL.asterisk; import static org.jooq.impl.DSL.count; +import static org.jooq.impl.DSL.name; import static org.jooq.impl.SQLDataType.VARCHAR; -import io.airbyte.db.instance.configs.jooq.generated.enums.GeographyType; import io.airbyte.db.instance.configs.jooq.generated.enums.StatusType; import io.airbyte.db.instance.jobs.jooq.generated.enums.AttemptStatus; import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus; @@ -20,28 +20,48 @@ import java.util.List; import java.util.Map; import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.impl.DSL; @Singleton class MetricRepository { private final DSLContext ctx; + // We have to report gauge metric with value 0 if they are not showing up in the DB, + // otherwise datadog will use previous reported value. + // Another option we didn't use here is to build this into SQL query - it will lead SQL much less + // readable while not decreasing any complexity. + private final static List REGISTERED_ATTEMPT_QUEUE = List.of("SYNC", "AWS_PARIS_SYNC", "null"); + private final static List REGISTERED_GEOGRAPHY = List.of("US", "AUTO", "EU"); + MetricRepository(final DSLContext ctx) { this.ctx = ctx; } Map numberOfPendingJobsByGeography() { - var result = ctx.select(CONNECTION.GEOGRAPHY.cast(String.class), count(asterisk()).as("count")) + String geographyResultAlias = "geography"; + String countResultAlias = "result"; + var result = ctx.select(CONNECTION.GEOGRAPHY.cast(String.class).as(geographyResultAlias), count(asterisk()).as(countResultAlias)) .from(JOBS) .join(CONNECTION) .on(CONNECTION.ID.cast(VARCHAR(255)).eq(JOBS.SCOPE)) .where(JOBS.STATUS.eq(JobStatus.pending)) .groupBy(CONNECTION.GEOGRAPHY); - return (Map) result.fetchMap(0, 1); + Field geographyResultField = DSL.field(name(geographyResultAlias), String.class); + Field countResultField = DSL.field(name(countResultAlias), Integer.class); + Map queriedMap = result.fetchMap(geographyResultField, countResultField); + for (final String potentialGeography : REGISTERED_GEOGRAPHY) { + if (!queriedMap.containsKey(potentialGeography)) { + queriedMap.put(potentialGeography, 0); + } + } + return queriedMap; } Map numberOfRunningJobsByTaskQueue() { - var result = ctx.select(ATTEMPTS.PROCESSING_TASK_QUEUE, count(asterisk()).as("count")) + String countFieldName = "count"; + var result = ctx.select(ATTEMPTS.PROCESSING_TASK_QUEUE, count(asterisk()).as(countFieldName)) .from(JOBS) .join(CONNECTION) .on(CONNECTION.ID.cast(VARCHAR(255)).eq(JOBS.SCOPE)) @@ -50,8 +70,15 @@ Map numberOfRunningJobsByTaskQueue() { .where(JOBS.STATUS.eq(JobStatus.running).and(CONNECTION.STATUS.eq(StatusType.active))) .and(ATTEMPTS.STATUS.eq(AttemptStatus.running)) .groupBy(ATTEMPTS.PROCESSING_TASK_QUEUE); - return (Map) result.fetchMap(0, 1); + Field countResultField = DSL.field(name(countFieldName), Integer.class); + Map queriedMap = result.fetchMap(ATTEMPTS.PROCESSING_TASK_QUEUE, countResultField); + for (final String potentialAttemptQueue : REGISTERED_ATTEMPT_QUEUE) { + if (!queriedMap.containsKey(potentialAttemptQueue)) { + queriedMap.put(potentialAttemptQueue, 0); + } + } + return queriedMap; } // This is a rare case and not likely to be related to data planes; So we will monitor them as a @@ -65,7 +92,7 @@ int numberOfOrphanRunningJobs() { .fetchOne(0, int.class); } - Map oldestPendingJobAgeSecsByGeography() { + Map oldestPendingJobAgeSecsByGeography() { final var query = """ SELECT cast(connection.geography as varchar) AS geography, MAX(EXTRACT(EPOCH FROM (current_timestamp - jobs.created_at))) AS run_duration_seconds @@ -76,7 +103,15 @@ SELECT cast(connection.geography as varchar) AS geography, MAX(EXTRACT(EPOCH FRO GROUP BY geography; """; final var result = ctx.fetch(query); - return (Map) result.intoMap(0, 1); + Field geographyResultField = DSL.field(name("geography"), String.class); + Field runDurationSecondsField = DSL.field(name("run_duration_seconds"), Double.class); + Map queriedMap = result.intoMap(geographyResultField, runDurationSecondsField); + for (final String potentialGeography : REGISTERED_GEOGRAPHY) { + if (!queriedMap.containsKey(potentialGeography)) { + queriedMap.put(potentialGeography, 0.0); + } + } + return queriedMap; } Map oldestRunningJobAgeSecsByTaskQueue() { @@ -90,7 +125,15 @@ SELECT attempts.processing_task_queue AS task_queue, MAX(EXTRACT(EPOCH FROM (cur GROUP BY task_queue; """; final var result = ctx.fetch(query); - return (Map) result.intoMap(0, 1); + Field taskQueueResultField = DSL.field(name("task_queue"), String.class); + Field runDurationSecondsField = DSL.field(name("run_duration_seconds"), Double.class); + Map queriedMap = result.intoMap(taskQueueResultField, runDurationSecondsField); + for (final String potentialAttemptQueue : REGISTERED_ATTEMPT_QUEUE) { + if (!queriedMap.containsKey(potentialAttemptQueue)) { + queriedMap.put(potentialAttemptQueue, 0.0); + } + } + return queriedMap; } List numberOfActiveConnPerWorkspace() { diff --git a/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/EmitterTest.java b/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/EmitterTest.java index 4318849237903..600f5e8b7f2c0 100644 --- a/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/EmitterTest.java +++ b/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/EmitterTest.java @@ -9,7 +9,6 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import io.airbyte.db.instance.configs.jooq.generated.enums.GeographyType; import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus; import io.airbyte.metrics.lib.MetricAttribute; import io.airbyte.metrics.lib.MetricClient; @@ -40,7 +39,7 @@ void setUp() { @Test void TestNumPendingJobs() { - final var value = Map.of("AUTO", 101, "EU", 20); + final var value = Map.of(AUTO_REGION, 101, EU_REGION, 20); when(repo.numberOfPendingJobsByGeography()).thenReturn(value); final var emitter = new NumPendingJobs(client, repo); @@ -49,9 +48,9 @@ void TestNumPendingJobs() { assertEquals(Duration.ofSeconds(15), emitter.getDuration()); verify(repo).numberOfPendingJobsByGeography(); verify(client).gauge(OssMetricsRegistry.NUM_PENDING_JOBS, 101, - new MetricAttribute(MetricTags.GEOGRAPHY, "AUTO")); + new MetricAttribute(MetricTags.GEOGRAPHY, AUTO_REGION)); verify(client).gauge(OssMetricsRegistry.NUM_PENDING_JOBS, 20, - new MetricAttribute(MetricTags.GEOGRAPHY, "EU")); + new MetricAttribute(MetricTags.GEOGRAPHY, EU_REGION)); verify(client).count(OssMetricsRegistry.EST_NUM_METRICS_EMITTED_BY_REPORTER, 1); } @@ -105,7 +104,7 @@ void TestOldestRunningJob() { @Test void TestOldestPendingJob() { - final var value = Map.of(GeographyType.AUTO, 101.0, GeographyType.EU, 20.0); + final var value = Map.of(AUTO_REGION, 101.0, EU_REGION, 20.0); when(repo.oldestPendingJobAgeSecsByGeography()).thenReturn(value); final var emitter = new OldestPendingJob(client, repo); diff --git a/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/MetricRepositoryTest.java b/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/MetricRepositoryTest.java index 0052dd10a8f5a..c435f0de2dbd4 100644 --- a/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/MetricRepositoryTest.java +++ b/airbyte-metrics/reporter/src/test/java/io/airbyte/metrics/reporter/MetricRepositoryTest.java @@ -18,7 +18,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; -import com.google.common.collect.Iterators; import io.airbyte.db.factory.DSLContextFactory; import io.airbyte.db.init.DatabaseInitializationException; import io.airbyte.db.instance.configs.jooq.generated.enums.ActorType; @@ -54,6 +53,8 @@ class MetricRepositoryTest { private static final String DEST = "dst"; private static final String CONN = "conn"; private static final String SYNC_QUEUE = "SYNC"; + private static final String AWS_SYNC_QUEUE = "AWS_PARIS_SYNC"; + private static final String AUTO_REGION = "AUTO"; private static final String EU_REGION = "EU"; private static final UUID SRC_DEF_ID = UUID.randomUUID(); @@ -114,7 +115,7 @@ class NumJobs { void shouldReturnReleaseStages() { ctx.insertInto(ATTEMPTS, ATTEMPTS.ID, ATTEMPTS.JOB_ID, ATTEMPTS.STATUS, ATTEMPTS.PROCESSING_TASK_QUEUE) .values(10L, 1L, AttemptStatus.running, SYNC_QUEUE).values(20L, 2L, AttemptStatus.running, SYNC_QUEUE) - .values(30L, 3L, AttemptStatus.running, SYNC_QUEUE).values(40L, 4L, AttemptStatus.running, SYNC_QUEUE) + .values(30L, 3L, AttemptStatus.running, SYNC_QUEUE).values(40L, 4L, AttemptStatus.running, AWS_SYNC_QUEUE) .values(50L, 5L, AttemptStatus.running, SYNC_QUEUE) .execute(); final var srcId = UUID.randomUUID(); @@ -138,7 +139,10 @@ void shouldReturnReleaseStages() { .values(5L, inactiveConnectionId.toString(), JobStatus.running) .execute(); - assertEquals(2, db.numberOfRunningJobsByTaskQueue().get(SYNC_QUEUE)); + assertEquals(1, db.numberOfRunningJobsByTaskQueue().get(SYNC_QUEUE)); + assertEquals(1, db.numberOfRunningJobsByTaskQueue().get(AWS_SYNC_QUEUE)); + // To test we send 0 for 'null' to overwrite previous bug. + assertEquals(0, db.numberOfRunningJobsByTaskQueue().get("null")); assertEquals(1, db.numberOfOrphanRunningJobs()); } @@ -148,8 +152,9 @@ void runningJobsShouldReturnZero() throws SQLException { ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(1L, "", JobStatus.pending).execute(); ctx.insertInto(JOBS, JOBS.ID, JOBS.SCOPE, JOBS.STATUS).values(2L, "", JobStatus.failed).execute(); - final var res = db.numberOfRunningJobsByTaskQueue(); - assertTrue(res.isEmpty()); + final var result = db.numberOfRunningJobsByTaskQueue(); + assertEquals(result.get(SYNC_QUEUE), 0); + assertEquals(result.get(AWS_SYNC_QUEUE), 0); } @Test @@ -173,6 +178,7 @@ void pendingJobsShouldReturnCorrectCount() throws SQLException { final var res = db.numberOfPendingJobsByGeography(); assertEquals(2, res.get(EU_REGION)); + assertEquals(0, res.get(AUTO_REGION)); } @Test @@ -192,8 +198,9 @@ void pendingJobsShouldReturnZero() throws SQLException { .values(2L, connectionUuid.toString(), JobStatus.failed) .execute(); - final var res = db.numberOfPendingJobsByGeography(); - assertTrue(res.isEmpty()); + final var result = db.numberOfPendingJobsByGeography(); + assertEquals(result.get(AUTO_REGION), 0); + assertEquals(result.get(EU_REGION), 0); } } @@ -248,8 +255,9 @@ void shouldReturnNothingIfNotApplicable() { .values(2L, connectionUuid.toString(), JobStatus.running) .values(3L, connectionUuid.toString(), JobStatus.failed).execute(); - final var res = db.oldestPendingJobAgeSecsByGeography(); - assertTrue(res.isEmpty()); + final var result = db.oldestPendingJobAgeSecsByGeography(); + assertEquals(result.get(EU_REGION), 0.0); + assertEquals(result.get(AUTO_REGION), 0.0); } } @@ -277,10 +285,10 @@ void shouldReturnOnlyRunningSeconds() { .values(4L, "", JobStatus.failed) .execute(); - final var result = Iterators.getOnlyElement(db.oldestRunningJobAgeSecsByTaskQueue().entrySet().iterator()); - assertEquals(SYNC_QUEUE, result.getKey()); + final var result = db.oldestRunningJobAgeSecsByTaskQueue(); // expected age is 1000 seconds, but allow for +/- 1 second to account for timing/rounding errors - assertTrue(9999 < result.getValue() && result.getValue() < 10001L); + assertTrue(9999 < result.get(SYNC_QUEUE) && result.get(SYNC_QUEUE) < 10001L); + assertEquals(result.get(AWS_SYNC_QUEUE), 0.0); } @Test @@ -293,8 +301,9 @@ void shouldReturnNothingIfNotApplicable() { .values(3L, "", JobStatus.failed) .execute(); - final var res = db.oldestRunningJobAgeSecsByTaskQueue(); - assertTrue(res.isEmpty()); + final var result = db.oldestRunningJobAgeSecsByTaskQueue(); + assertEquals(result.get(SYNC_QUEUE), 0.0); + assertEquals(result.get(AWS_SYNC_QUEUE), 0.0); } } diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java index 9ddfc93a33264..ecb5c115e3ccb 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerioNotificationClient.java @@ -121,6 +121,11 @@ public boolean notifyFailure(final String message) throws IOException, Interrupt throw new NotImplementedException(); } + @Override + public boolean notifySchemaChange(final UUID connectionId, final boolean isBreaking) { + throw new NotImplementedException(); + } + private boolean notifyByEmail(final String requestBody) throws IOException, InterruptedException { final HttpRequest request = HttpRequest.newBuilder() .POST(HttpRequest.BodyPublishers.ofString(requestBody)) diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java index c4be2734ef211..2895457fa2944 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/NotificationClient.java @@ -55,6 +55,8 @@ public abstract boolean notifyConnectionDisableWarning(String receiverEmail, public abstract boolean notifyFailure(String message) throws IOException, InterruptedException; + public abstract boolean notifySchemaChange(UUID connectionId, boolean isBreaking) throws IOException, InterruptedException; + public static NotificationClient createNotificationClient(final Notification notification) { return switch (notification.getNotificationType()) { case SLACK -> new SlackNotificationClient(notification); diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java index 887580db8aded..ce069efcc9256 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java @@ -4,6 +4,7 @@ package io.airbyte.notification; +import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import io.airbyte.commons.json.Jsons; @@ -33,11 +34,9 @@ public class SlackNotificationClient extends NotificationClient { private static final Logger LOGGER = LoggerFactory.getLogger(SlackNotificationClient.class); - private final HttpClient httpClient = HttpClient.newBuilder() - .version(HttpClient.Version.HTTP_2) - .build(); private final SlackNotificationConfiguration config; + @JsonCreator public SlackNotificationClient(final Notification notification) { super(notification); this.config = notification.getSlackConfiguration(); @@ -121,7 +120,22 @@ public boolean notifyConnectionDisableWarning(final String receiverEmail, return false; } + @Override + public boolean notifySchemaChange(UUID connectionId, boolean isBreaking) throws IOException, InterruptedException { + final String message = renderTemplate( + isBreaking ? "slack/breaking_schema_change_notification_template.txt" : "slack/non_breaking_schema_change_notification_template.txt", + connectionId.toString()); + final String webhookUrl = config.getWebhook(); + if (!Strings.isEmpty(webhookUrl)) { + return notify(message); + } + return false; + } + private boolean notify(final String message) throws IOException, InterruptedException { + final HttpClient httpClient = HttpClient.newBuilder() + .version(HttpClient.Version.HTTP_2) + .build(); final ImmutableMap body = new Builder() .put("text", message) .build(); diff --git a/airbyte-notification/src/main/resources/slack/breaking_schema_change_notification_template.txt b/airbyte-notification/src/main/resources/slack/breaking_schema_change_notification_template.txt new file mode 100644 index 0000000000000..925089685ffc3 --- /dev/null +++ b/airbyte-notification/src/main/resources/slack/breaking_schema_change_notification_template.txt @@ -0,0 +1,3 @@ +Your source schema has changed for connection ID: %s + +Airbyte has disabled this connection because this source schema change will cause broken syncs. Visit your connection page, refresh your source schema, and reset your data in order to fix this connection. diff --git a/airbyte-notification/src/main/resources/slack/non_breaking_schema_change_notification_template.txt b/airbyte-notification/src/main/resources/slack/non_breaking_schema_change_notification_template.txt new file mode 100644 index 0000000000000..79c8447abbd30 --- /dev/null +++ b/airbyte-notification/src/main/resources/slack/non_breaking_schema_change_notification_template.txt @@ -0,0 +1,3 @@ +Your source schema has changed for connection ID: %s + +Visit your connection page, refresh your source schema, and reset your data in order to update this connection. diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 7d8808ef92cfe..9829dd736d496 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -53,6 +53,9 @@ public OAuthImplementationFactory(final ConfigRepository configRepository, final .put("airbyte/source-surveymonkey", new SurveymonkeyOAuthFlow(configRepository, httpClient)) .put("airbyte/source-trello", new TrelloOAuthFlow(configRepository)) .put("airbyte/source-youtube-analytics", new YouTubeAnalyticsOAuthFlow(configRepository, httpClient)) + // revert me + .put("airbyte/source-youtube-analytics-business", new YouTubeAnalyticsBusinessOAuthFlow(configRepository, httpClient)) + // .put("airbyte/source-drift", new DriftOAuthFlow(configRepository, httpClient)) .put("airbyte/source-zendesk-chat", new ZendeskChatOAuthFlow(configRepository, httpClient)) .put("airbyte/source-zendesk-support", new ZendeskSupportOAuthFlow(configRepository, httpClient)) diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ShopifyOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ShopifyOAuthFlow.java index 30f7863a755a4..c14e34efe66d1 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ShopifyOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/ShopifyOAuthFlow.java @@ -27,7 +27,6 @@ public class ShopifyOAuthFlow extends BaseOAuth2Flow { "read_orders", "read_all_orders", "read_assigned_fulfillment_orders", - "read_checkouts", "read_content", "read_customers", "read_discounts", diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsBusinessOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsBusinessOAuthFlow.java new file mode 100644 index 0000000000000..fac8326ff2046 --- /dev/null +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/google/YouTubeAnalyticsBusinessOAuthFlow.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.oauth.flows.google; + +import com.google.common.annotations.VisibleForTesting; +import io.airbyte.config.persistence.ConfigRepository; +import java.net.http.HttpClient; +import java.util.function.Supplier; + +public class YouTubeAnalyticsBusinessOAuthFlow extends GoogleOAuthFlow { + + private static final String SCOPE_URL = + "https://www.googleapis.com/auth/yt-analytics.readonly https://www.googleapis.com/auth/yt-analytics-monetary.readonly"; + + public YouTubeAnalyticsBusinessOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient) { + super(configRepository, httpClient); + } + + @VisibleForTesting + YouTubeAnalyticsBusinessOAuthFlow(final ConfigRepository configRepository, final HttpClient httpClient, final Supplier stateSupplier) { + super(configRepository, httpClient, stateSupplier); + } + + @Override + protected String getScope() { + return SCOPE_URL; + } + +} diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java index 42d7e14c6a7e7..369e7463cf982 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobPersistence.java @@ -24,6 +24,7 @@ import io.airbyte.commons.text.Names; import io.airbyte.commons.text.Sqls; import io.airbyte.commons.version.AirbyteProtocolVersion; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.commons.version.Version; import io.airbyte.config.AttemptFailureSummary; @@ -838,6 +839,27 @@ public void setAirbyteProtocolVersionMin(final Version version) throws IOExcepti setMetadata(AirbyteProtocolVersion.AIRBYTE_PROTOCOL_VERSION_MIN_KEY_NAME, version.serialize()); } + @Override + public Optional getCurrentProtocolVersionRange() throws IOException { + final Optional min = getAirbyteProtocolVersionMin(); + final Optional max = getAirbyteProtocolVersionMax(); + + if (min.isPresent() != max.isPresent()) { + // Flagging this because this would be highly suspicious but not bad enough that we should fail + // hard. + // If the new config is fine, the system should self-heal. + LOGGER.warn("Inconsistent AirbyteProtocolVersion found, only one of min/max was found. (min:{}, max:{})", + min.map(Version::serialize).orElse(""), max.map(Version::serialize).orElse("")); + } + + if (min.isEmpty() && max.isEmpty()) { + return Optional.empty(); + } + + return Optional.of(new AirbyteProtocolVersionRange(min.orElse(AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION), + max.orElse(AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION))); + } + private Stream getMetadata(final String keyName) throws IOException { return jobDatabase.query(ctx -> ctx.select() .from(AIRBYTE_METADATA_TABLE) diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java index 003e0387c6ea8..96a526249af6b 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobPersistence.java @@ -5,6 +5,7 @@ package io.airbyte.persistence.job; import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.commons.version.Version; import io.airbyte.config.AttemptFailureSummary; import io.airbyte.config.JobConfig; @@ -262,6 +263,11 @@ List listJobStatusAndTimestampWithConnection(UUID con */ void setAirbyteProtocolVersionMin(Version version) throws IOException; + /** + * Get the current Airbyte Protocol Version range if defined + */ + Optional getCurrentProtocolVersionRange() throws IOException; + /** * Returns a deployment UUID. */ diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java index 74da5251d22fa..34adf975fcf3e 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/DefaultJobPersistenceTest.java @@ -24,6 +24,8 @@ import com.google.common.collect.Sets; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.text.Sqls; +import io.airbyte.commons.version.AirbyteProtocolVersion; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.commons.version.Version; import io.airbyte.config.AttemptFailureSummary; import io.airbyte.config.FailureReason; @@ -596,6 +598,22 @@ void testAirbyteProtocolVersionMinMetadata() throws IOException { assertEquals(minVersion2, minVersion2read.orElseThrow()); } + @Test + void testAirbyteProtocolVersionRange() throws IOException { + final Version v1 = new Version("1.5.0"); + final Version v2 = new Version("2.5.0"); + final Optional range = jobPersistence.getCurrentProtocolVersionRange(); + assertEquals(Optional.empty(), range); + + jobPersistence.setAirbyteProtocolVersionMax(v2); + final Optional range2 = jobPersistence.getCurrentProtocolVersionRange(); + assertEquals(Optional.of(new AirbyteProtocolVersionRange(AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION, v2)), range2); + + jobPersistence.setAirbyteProtocolVersionMin(v1); + final Optional range3 = jobPersistence.getCurrentProtocolVersionRange(); + assertEquals(Optional.of(new AirbyteProtocolVersionRange(v1, v2)), range3); + } + private long createJobAt(final Instant created_at) throws IOException { when(timeSupplier.get()).thenReturn(created_at); return jobPersistence.enqueueJob(SCOPE, SPEC_JOB_CONFIG).orElseThrow(); diff --git a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteStreamNameNamespacePair.java b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/AirbyteStreamNameNamespacePair.java similarity index 78% rename from airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteStreamNameNamespacePair.java rename to airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/AirbyteStreamNameNamespacePair.java index d733efbed0e37..123397533bc99 100644 --- a/airbyte-integrations/bases/base-java/src/main/java/io/airbyte/integrations/base/AirbyteStreamNameNamespacePair.java +++ b/airbyte-protocol/protocol-models/src/main/java/io/airbyte/protocol/models/AirbyteStreamNameNamespacePair.java @@ -2,12 +2,8 @@ * Copyright (c) 2022 Airbyte, Inc., all rights reserved. */ -package io.airbyte.integrations.base; +package io.airbyte.protocol.models; -import io.airbyte.protocol.models.AirbyteRecordMessage; -import io.airbyte.protocol.models.AirbyteStream; -import io.airbyte.protocol.models.ConfiguredAirbyteCatalog; -import io.airbyte.protocol.models.ConfiguredAirbyteStream; import java.util.HashSet; import java.util.Objects; import java.util.Set; @@ -35,12 +31,12 @@ public String getNamespace() { return namespace; } + /** + * As this is used as a metrics tag, enforce snake case. + */ @Override public String toString() { - return "AirbyteStreamNameNamespacePair{" + - "name='" + name + '\'' + - ", namespace='" + namespace + '\'' + - '}'; + return (namespace != null ? namespace : "") + "_" + name; } @Override @@ -85,27 +81,23 @@ public int compareTo(final AirbyteStreamNameNamespacePair o) { return namespace.compareTo(o.getNamespace()); } - public static void main(final String[] args) { - System.out.println("test".compareTo(null)); - } - public static AirbyteStreamNameNamespacePair fromRecordMessage(final AirbyteRecordMessage msg) { return new AirbyteStreamNameNamespacePair(msg.getStream(), msg.getNamespace()); } - public static AirbyteStreamNameNamespacePair fromAirbyteSteam(final AirbyteStream stream) { + public static AirbyteStreamNameNamespacePair fromAirbyteStream(final AirbyteStream stream) { return new AirbyteStreamNameNamespacePair(stream.getName(), stream.getNamespace()); } public static AirbyteStreamNameNamespacePair fromConfiguredAirbyteSteam(final ConfiguredAirbyteStream stream) { - return fromAirbyteSteam(stream.getStream()); + return fromAirbyteStream(stream.getStream()); } public static Set fromConfiguredCatalog(final ConfiguredAirbyteCatalog catalog) { final var pairs = new HashSet(); for (final ConfiguredAirbyteStream stream : catalog.getStreams()) { - final var pair = fromAirbyteSteam(stream.getStream()); + final var pair = fromAirbyteStream(stream.getStream()); pairs.add(pair); } diff --git a/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml b/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml index 9965bde95825d..5c6baf92524fc 100644 --- a/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml +++ b/airbyte-protocol/protocol-models/src/main/resources/airbyte_protocol/airbyte_protocol.yaml @@ -4,7 +4,7 @@ title: AirbyteProtocol type: object description: AirbyteProtocol structs -version: 0.3.1 +version: 0.3.2 properties: airbyte_message: "$ref": "#/definitions/AirbyteMessage" @@ -174,12 +174,16 @@ definitions: type: string enum: - ERROR + - ESTIMATE emitted_at: description: "the time in ms that the message was emitted" type: number error: description: "error trace message: the error object" "$ref": "#/definitions/AirbyteErrorTraceMessage" + estimate: + description: "Estimate trace message: a guess at how much data will be produced in this sync" + "$ref": "#/definitions/AirbyteEstimateTraceMessage" AirbyteErrorTraceMessage: type: object additionalProperties: true @@ -201,6 +205,32 @@ definitions: enum: - system_error - config_error + AirbyteEstimateTraceMessage: + type: object + additionalProperties: true + required: + - name + - type + properties: + name: + description: The name of the stream + type: string + type: + title: "estimate type" # this title is required to avoid python codegen conflicts with the "type" parameter in AirbyteMessage. See https://github.com/airbytehq/airbyte/pull/12581 + description: The type of estimate + type: string + enum: + - STREAM + - SYNC + namespace: + description: The namespace of the stream + type: string + row_estimate: + description: The estimated number of rows to be emitted by this sync for this stream + type: integer + byte_estimate: + description: The estimated number of bytes to be emitted by this sync for this stream + type: integer AirbyteControlMessage: type: object additionalProperties: true diff --git a/airbyte-proxy/Dockerfile b/airbyte-proxy/Dockerfile index aef41fd0dae31..ac453a8cbd509 100644 --- a/airbyte-proxy/Dockerfile +++ b/airbyte-proxy/Dockerfile @@ -2,7 +2,7 @@ FROM nginx:latest -ARG VERSION=0.40.18 +ARG VERSION=0.40.19 ENV APPLICATION airbyte-proxy ENV VERSION ${VERSION} diff --git a/airbyte-server/Dockerfile b/airbyte-server/Dockerfile index 8a1e61b8c3ad3..b04336ff2c58b 100644 --- a/airbyte-server/Dockerfile +++ b/airbyte-server/Dockerfile @@ -3,7 +3,7 @@ FROM ${JDK_IMAGE} AS server EXPOSE 8000 -ARG VERSION=0.40.18 +ARG VERSION=0.40.19 ENV APPLICATION airbyte-server ENV VERSION ${VERSION} diff --git a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiBinder.java b/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiBinder.java deleted file mode 100644 index c4ddb4f0149e6..0000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiBinder.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server; - -import io.airbyte.server.apis.ConfigurationApi; -import org.glassfish.hk2.utilities.binding.AbstractBinder; -import org.glassfish.jersey.process.internal.RequestScoped; - -public class ConfigurationApiBinder extends AbstractBinder { - - @Override - protected void configure() { - bindFactory(ConfigurationApiFactory.class) - .to(ConfigurationApi.class) - .in(RequestScoped.class); - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java deleted file mode 100644 index 2421e4061c7a6..0000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/ConfigurationApiFactory.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server; - -import io.airbyte.analytics.TrackingClient; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.StatePersistence; -import io.airbyte.db.Database; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.server.apis.ConfigurationApi; -import io.airbyte.server.scheduler.EventRunner; -import io.airbyte.server.scheduler.SynchronousSchedulerClient; -import java.net.http.HttpClient; -import java.nio.file.Path; -import java.util.Map; -import org.flywaydb.core.Flyway; -import org.glassfish.hk2.api.Factory; -import org.slf4j.MDC; - -public class ConfigurationApiFactory implements Factory { - - private static ConfigRepository configRepository; - private static JobPersistence jobPersistence; - private static SecretsRepositoryReader secretsRepositoryReader; - private static SecretsRepositoryWriter secretsRepositoryWriter; - private static SynchronousSchedulerClient synchronousSchedulerClient; - private static StatePersistence statePersistence; - private static Map mdc; - private static TrackingClient trackingClient; - private static WorkerEnvironment workerEnvironment; - private static LogConfigs logConfigs; - private static AirbyteVersion airbyteVersion; - private static EventRunner eventRunner; - - public static void setValues( - final ConfigRepository configRepository, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final JobPersistence jobPersistence, - final SynchronousSchedulerClient synchronousSchedulerClient, - final StatePersistence statePersistence, - final Map mdc, - final Database configsDatabase, - final Database jobsDatabase, - final TrackingClient trackingClient, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - final AirbyteVersion airbyteVersion, - final Path workspaceRoot, - final HttpClient httpClient, - final EventRunner eventRunner, - final Flyway configsFlyway, - final Flyway jobsFlyway) { - ConfigurationApiFactory.configRepository = configRepository; - ConfigurationApiFactory.jobPersistence = jobPersistence; - ConfigurationApiFactory.secretsRepositoryReader = secretsRepositoryReader; - ConfigurationApiFactory.secretsRepositoryWriter = secretsRepositoryWriter; - ConfigurationApiFactory.synchronousSchedulerClient = synchronousSchedulerClient; - ConfigurationApiFactory.mdc = mdc; - ConfigurationApiFactory.trackingClient = trackingClient; - ConfigurationApiFactory.workerEnvironment = workerEnvironment; - ConfigurationApiFactory.logConfigs = logConfigs; - ConfigurationApiFactory.airbyteVersion = airbyteVersion; - ConfigurationApiFactory.eventRunner = eventRunner; - ConfigurationApiFactory.statePersistence = statePersistence; - } - - @Override - public ConfigurationApi provide() { - MDC.setContextMap(ConfigurationApiFactory.mdc); - - return new ConfigurationApi( - ConfigurationApiFactory.configRepository, - ConfigurationApiFactory.jobPersistence, - ConfigurationApiFactory.secretsRepositoryReader, - ConfigurationApiFactory.secretsRepositoryWriter, - ConfigurationApiFactory.synchronousSchedulerClient, - ConfigurationApiFactory.statePersistence, - ConfigurationApiFactory.trackingClient, - ConfigurationApiFactory.workerEnvironment, - ConfigurationApiFactory.logConfigs, - ConfigurationApiFactory.airbyteVersion, - ConfigurationApiFactory.eventRunner); - } - - @Override - public void dispose(final ConfigurationApi service) { - /* noop */ - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java index 98332508930a6..e74c910944183 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerApp.java @@ -21,6 +21,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.SecretsRepositoryWriter; +import io.airbyte.config.persistence.StatePersistence; import io.airbyte.config.persistence.StreamResetPersistence; import io.airbyte.config.persistence.split_secrets.SecretPersistence; import io.airbyte.config.persistence.split_secrets.SecretsHydrator; @@ -61,11 +62,15 @@ import io.airbyte.server.handlers.SchedulerHandler; import io.airbyte.server.handlers.SourceDefinitionsHandler; import io.airbyte.server.handlers.SourceHandler; +import io.airbyte.server.handlers.StateHandler; +import io.airbyte.server.handlers.WebBackendConnectionsHandler; +import io.airbyte.server.handlers.WebBackendGeographiesHandler; import io.airbyte.server.handlers.WorkspacesHandler; import io.airbyte.server.scheduler.DefaultSynchronousSchedulerClient; import io.airbyte.server.scheduler.EventRunner; import io.airbyte.server.scheduler.TemporalEventRunner; import io.airbyte.validation.json.JsonSchemaValidator; +import io.airbyte.workers.helper.ConnectionHelper; import io.airbyte.workers.normalization.NormalizationRunnerFactory; import io.temporal.serviceclient.WorkflowServiceStubs; import java.net.http.HttpClient; @@ -254,11 +259,14 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final AttemptHandler attemptHandler = new AttemptHandler(jobPersistence); + final ConnectionHelper connectionHelper = new ConnectionHelper(configRepository, workspaceHelper); + final ConnectionsHandler connectionsHandler = new ConnectionsHandler( configRepository, workspaceHelper, trackingClient, - eventRunner); + eventRunner, + connectionHelper); final DestinationHandler destinationHandler = new DestinationHandler( configRepository, @@ -287,7 +295,7 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final HealthCheckHandler healthCheckHandler = new HealthCheckHandler(configRepository); - final OAuthHandler oAuthHandler = new OAuthHandler(configRepository, httpClient, trackingClient); + final OAuthHandler oAuthHandler = new OAuthHandler(configRepository, httpClient, trackingClient, secretsRepositoryReader); final SourceHandler sourceHandler = new SourceHandler( configRepository, @@ -307,7 +315,8 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, sourceDefinitionsHandler, destinationHandler, destinationDefinitionsHandler, - configs.getAirbyteVersion()); + configs.getAirbyteVersion(), + temporalClient); final LogsHandler logsHandler = new LogsHandler(configs); @@ -320,6 +329,23 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, final OpenApiConfigHandler openApiConfigHandler = new OpenApiConfigHandler(); + final StatePersistence statePersistence = new StatePersistence(configsDatabase); + + final StateHandler stateHandler = new StateHandler(statePersistence); + + final WebBackendConnectionsHandler webBackendConnectionsHandler = new WebBackendConnectionsHandler( + connectionsHandler, + stateHandler, + sourceHandler, + destinationHandler, + jobHistoryHandler, + schedulerHandler, + operationsHandler, + eventRunner, + configRepository); + + final WebBackendGeographiesHandler webBackendGeographiesHandler = new WebBackendGeographiesHandler(); + LOGGER.info("Starting server..."); return apiFactory.create( @@ -352,7 +378,11 @@ public static ServerRunnable getServer(final ServerFactory apiFactory, operationsHandler, schedulerHandler, sourceHandler, - workspacesHandler); + sourceDefinitionsHandler, + stateHandler, + workspacesHandler, + webBackendConnectionsHandler, + webBackendGeographiesHandler); } public static void main(final String[] args) { diff --git a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java index bc5048a098aa7..cd861f698dc67 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/ServerFactory.java @@ -11,11 +11,9 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.StatePersistence; import io.airbyte.db.Database; import io.airbyte.persistence.job.JobPersistence; import io.airbyte.server.apis.AttemptApiController; -import io.airbyte.server.apis.ConfigurationApi; import io.airbyte.server.apis.ConnectionApiController; import io.airbyte.server.apis.DbMigrationApiController; import io.airbyte.server.apis.DestinationApiController; @@ -30,6 +28,12 @@ import io.airbyte.server.apis.OperationApiController; import io.airbyte.server.apis.SchedulerApiController; import io.airbyte.server.apis.SourceApiController; +import io.airbyte.server.apis.SourceDefinitionApiController; +import io.airbyte.server.apis.SourceDefinitionSpecificationApiController; +import io.airbyte.server.apis.SourceOauthApiController; +import io.airbyte.server.apis.StateApiController; +import io.airbyte.server.apis.WebBackendApiController; +import io.airbyte.server.apis.WorkspaceApiController; import io.airbyte.server.apis.binders.AttemptApiBinder; import io.airbyte.server.apis.binders.ConnectionApiBinder; import io.airbyte.server.apis.binders.DbMigrationBinder; @@ -45,7 +49,12 @@ import io.airbyte.server.apis.binders.OperationApiBinder; import io.airbyte.server.apis.binders.SchedulerApiBinder; import io.airbyte.server.apis.binders.SourceApiBinder; +import io.airbyte.server.apis.binders.SourceDefinitionApiBinder; +import io.airbyte.server.apis.binders.SourceDefinitionSpecificationApiBinder; import io.airbyte.server.apis.binders.SourceOauthApiBinder; +import io.airbyte.server.apis.binders.StateApiBinder; +import io.airbyte.server.apis.binders.WebBackendApiBinder; +import io.airbyte.server.apis.binders.WorkspaceApiBinder; import io.airbyte.server.apis.factories.AttemptApiFactory; import io.airbyte.server.apis.factories.ConnectionApiFactory; import io.airbyte.server.apis.factories.DbMigrationApiFactory; @@ -61,7 +70,12 @@ import io.airbyte.server.apis.factories.OperationApiFactory; import io.airbyte.server.apis.factories.SchedulerApiFactory; import io.airbyte.server.apis.factories.SourceApiFactory; +import io.airbyte.server.apis.factories.SourceDefinitionApiFactory; +import io.airbyte.server.apis.factories.SourceDefinitionSpecificationApiFactory; import io.airbyte.server.apis.factories.SourceOauthApiFactory; +import io.airbyte.server.apis.factories.StateApiFactory; +import io.airbyte.server.apis.factories.WebBackendApiFactory; +import io.airbyte.server.apis.factories.WorkspaceApiFactory; import io.airbyte.server.handlers.AttemptHandler; import io.airbyte.server.handlers.ConnectionsHandler; import io.airbyte.server.handlers.DbMigrationHandler; @@ -74,7 +88,11 @@ import io.airbyte.server.handlers.OpenApiConfigHandler; import io.airbyte.server.handlers.OperationsHandler; import io.airbyte.server.handlers.SchedulerHandler; +import io.airbyte.server.handlers.SourceDefinitionsHandler; import io.airbyte.server.handlers.SourceHandler; +import io.airbyte.server.handlers.StateHandler; +import io.airbyte.server.handlers.WebBackendConnectionsHandler; +import io.airbyte.server.handlers.WebBackendGeographiesHandler; import io.airbyte.server.handlers.WorkspacesHandler; import io.airbyte.server.scheduler.EventRunner; import io.airbyte.server.scheduler.SynchronousSchedulerClient; @@ -116,7 +134,11 @@ ServerRunnable create(final SynchronousSchedulerClient synchronousSchedulerClien final OperationsHandler operationsHandler, final SchedulerHandler schedulerHandler, final SourceHandler sourceHandler, - final WorkspacesHandler workspacesHandler); + final SourceDefinitionsHandler sourceDefinitionsHandler, + final StateHandler stateHandler, + final WorkspacesHandler workspacesHandler, + final WebBackendConnectionsHandler webBackendConnectionsHandler, + final WebBackendGeographiesHandler webBackendGeographiesHandler); class Api implements ServerFactory { @@ -150,30 +172,13 @@ public ServerRunnable create(final SynchronousSchedulerClient synchronousSchedul final OperationsHandler operationsHandler, final SchedulerHandler schedulerHandler, final SourceHandler sourceHandler, - final WorkspacesHandler workspacesHandler) { + final SourceDefinitionsHandler sourceDefinitionsHandler, + final StateHandler stateHandler, + final WorkspacesHandler workspacesHandler, + final WebBackendConnectionsHandler webBackendConnectionsHandler, + final WebBackendGeographiesHandler webBackendGeographiesHandler) { final Map mdc = MDC.getCopyOfContextMap(); - // set static values for factory - ConfigurationApiFactory.setValues( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - jobPersistence, - synchronousSchedulerClient, - new StatePersistence(configsDatabase), - mdc, - configsDatabase, - jobsDatabase, - trackingClient, - workerEnvironment, - logConfigs, - airbyteVersion, - workspaceRoot, - httpClient, - eventRunner, - configsFlyway, - jobsFlyway); - AttemptApiFactory.setValues(attemptHandler, mdc); ConnectionApiFactory.setValues( @@ -210,9 +215,18 @@ public ServerRunnable create(final SynchronousSchedulerClient synchronousSchedul SourceApiFactory.setValues(schedulerHandler, sourceHandler); + SourceDefinitionApiFactory.setValues(sourceDefinitionsHandler); + + SourceDefinitionSpecificationApiFactory.setValues(schedulerHandler); + + StateApiFactory.setValues(stateHandler); + + WebBackendApiFactory.setValues(webBackendConnectionsHandler, webBackendGeographiesHandler); + + WorkspaceApiFactory.setValues(workspacesHandler); + // server configurations final Set> componentClasses = Set.of( - ConfigurationApi.class, AttemptApiController.class, ConnectionApiController.class, DbMigrationApiController.class, @@ -228,11 +242,15 @@ public ServerRunnable create(final SynchronousSchedulerClient synchronousSchedul OperationApiController.class, SchedulerApiController.class, SourceApiController.class, - SourceOauthApiFactory.class); + SourceDefinitionApiController.class, + SourceDefinitionSpecificationApiController.class, + SourceOauthApiController.class, + StateApiController.class, + WebBackendApiController.class, + WorkspaceApiController.class); final Set components = Set.of( new CorsFilter(), - new ConfigurationApiBinder(), new AttemptApiBinder(), new ConnectionApiBinder(), new DbMigrationBinder(), @@ -248,7 +266,12 @@ public ServerRunnable create(final SynchronousSchedulerClient synchronousSchedul new OperationApiBinder(), new SchedulerApiBinder(), new SourceApiBinder(), - new SourceOauthApiBinder()); + new SourceDefinitionApiBinder(), + new SourceDefinitionSpecificationApiBinder(), + new SourceOauthApiBinder(), + new StateApiBinder(), + new WebBackendApiBinder(), + new WorkspaceApiBinder()); // construct server return new ServerApp(airbyteVersion, componentClasses, components); diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ApiHelper.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ApiHelper.java new file mode 100644 index 0000000000000..5164ea9135f72 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ApiHelper.java @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis; + +import io.airbyte.config.persistence.ConfigNotFoundException; +import io.airbyte.server.errors.BadObjectSchemaKnownException; +import io.airbyte.server.errors.IdNotFoundKnownException; +import io.airbyte.validation.json.JsonValidationException; +import java.io.IOException; + +public class ApiHelper { + + static T execute(final HandlerCall call) { + try { + return call.call(); + } catch (final ConfigNotFoundException e) { + throw new IdNotFoundKnownException(String.format("Could not find configuration for %s: %s.", e.getType(), e.getConfigId()), + e.getConfigId(), e); + } catch (final JsonValidationException e) { + throw new BadObjectSchemaKnownException( + String.format("The provided configuration does not fulfill the specification. Errors: %s", e.getMessage()), e); + } catch (final IOException e) { + throw new RuntimeException(e); + } + } + + interface HandlerCall { + + T call() throws ConfigNotFoundException, IOException, JsonValidationException; + + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java index 4f83d98358e35..e5856dc8848d2 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/AttemptApiController.java @@ -27,7 +27,7 @@ public InternalOperationResult saveStats(final SaveStatsRequestBody saveStatsReq @Override public InternalOperationResult setWorkflowInAttempt(final SetWorkflowInAttemptRequestBody requestBody) { - return ConfigurationApi.execute(() -> attemptHandler.setWorkflowInAttempt(requestBody)); + return ApiHelper.execute(() -> attemptHandler.setWorkflowInAttempt(requestBody)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java deleted file mode 100644 index e58f68c7d3a7e..0000000000000 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConfigurationApi.java +++ /dev/null @@ -1,1120 +0,0 @@ -/* - * Copyright (c) 2022 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.analytics.TrackingClient; -import io.airbyte.api.model.generated.AttemptNormalizationStatusReadList; -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.CheckOperationRead; -import io.airbyte.api.model.generated.CompleteDestinationOAuthRequest; -import io.airbyte.api.model.generated.CompleteSourceOauthRequest; -import io.airbyte.api.model.generated.ConnectionCreate; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionReadList; -import io.airbyte.api.model.generated.ConnectionSearch; -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateCreateOrUpdate; -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.CustomDestinationDefinitionCreate; -import io.airbyte.api.model.generated.CustomDestinationDefinitionUpdate; -import io.airbyte.api.model.generated.CustomSourceDefinitionCreate; -import io.airbyte.api.model.generated.CustomSourceDefinitionUpdate; -import io.airbyte.api.model.generated.DbMigrationExecutionRead; -import io.airbyte.api.model.generated.DbMigrationReadList; -import io.airbyte.api.model.generated.DbMigrationRequestBody; -import io.airbyte.api.model.generated.DestinationCloneRequestBody; -import io.airbyte.api.model.generated.DestinationCoreConfig; -import io.airbyte.api.model.generated.DestinationCreate; -import io.airbyte.api.model.generated.DestinationDefinitionCreate; -import io.airbyte.api.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionRead; -import io.airbyte.api.model.generated.DestinationDefinitionReadList; -import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; -import io.airbyte.api.model.generated.DestinationDefinitionUpdate; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationOauthConsentRequest; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationReadList; -import io.airbyte.api.model.generated.DestinationSearch; -import io.airbyte.api.model.generated.DestinationUpdate; -import io.airbyte.api.model.generated.HealthCheckRead; -import io.airbyte.api.model.generated.InternalOperationResult; -import io.airbyte.api.model.generated.JobDebugInfoRead; -import io.airbyte.api.model.generated.JobIdRequestBody; -import io.airbyte.api.model.generated.JobInfoLightRead; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.JobListRequestBody; -import io.airbyte.api.model.generated.JobReadList; -import io.airbyte.api.model.generated.LogsRequestBody; -import io.airbyte.api.model.generated.Notification; -import io.airbyte.api.model.generated.NotificationRead; -import io.airbyte.api.model.generated.OAuthConsentRead; -import io.airbyte.api.model.generated.OperationCreate; -import io.airbyte.api.model.generated.OperationIdRequestBody; -import io.airbyte.api.model.generated.OperationRead; -import io.airbyte.api.model.generated.OperationReadList; -import io.airbyte.api.model.generated.OperationUpdate; -import io.airbyte.api.model.generated.OperatorConfiguration; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionRead; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionReadList; -import io.airbyte.api.model.generated.PrivateSourceDefinitionRead; -import io.airbyte.api.model.generated.PrivateSourceDefinitionReadList; -import io.airbyte.api.model.generated.SaveStatsRequestBody; -import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody; -import io.airbyte.api.model.generated.SetInstancewideSourceOauthParamsRequestBody; -import io.airbyte.api.model.generated.SetWorkflowInAttemptRequestBody; -import io.airbyte.api.model.generated.SlugRequestBody; -import io.airbyte.api.model.generated.SourceCloneRequestBody; -import io.airbyte.api.model.generated.SourceCoreConfig; -import io.airbyte.api.model.generated.SourceCreate; -import io.airbyte.api.model.generated.SourceDefinitionCreate; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionRead; -import io.airbyte.api.model.generated.SourceDefinitionReadList; -import io.airbyte.api.model.generated.SourceDefinitionSpecificationRead; -import io.airbyte.api.model.generated.SourceDefinitionUpdate; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceOauthConsentRequest; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceReadList; -import io.airbyte.api.model.generated.SourceSearch; -import io.airbyte.api.model.generated.SourceUpdate; -import io.airbyte.api.model.generated.WebBackendConnectionCreate; -import io.airbyte.api.model.generated.WebBackendConnectionRead; -import io.airbyte.api.model.generated.WebBackendConnectionReadList; -import io.airbyte.api.model.generated.WebBackendConnectionRequestBody; -import io.airbyte.api.model.generated.WebBackendConnectionUpdate; -import io.airbyte.api.model.generated.WebBackendGeographiesListResult; -import io.airbyte.api.model.generated.WebBackendWorkspaceState; -import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; -import io.airbyte.api.model.generated.WorkspaceCreate; -import io.airbyte.api.model.generated.WorkspaceGiveFeedback; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.api.model.generated.WorkspaceRead; -import io.airbyte.api.model.generated.WorkspaceReadList; -import io.airbyte.api.model.generated.WorkspaceUpdate; -import io.airbyte.api.model.generated.WorkspaceUpdateName; -import io.airbyte.commons.version.AirbyteVersion; -import io.airbyte.config.Configs.WorkerEnvironment; -import io.airbyte.config.helpers.LogConfigs; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; -import io.airbyte.config.persistence.SecretsRepositoryReader; -import io.airbyte.config.persistence.SecretsRepositoryWriter; -import io.airbyte.config.persistence.StatePersistence; -import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.server.errors.BadObjectSchemaKnownException; -import io.airbyte.server.errors.IdNotFoundKnownException; -import io.airbyte.server.handlers.ConnectionsHandler; -import io.airbyte.server.handlers.DestinationDefinitionsHandler; -import io.airbyte.server.handlers.DestinationHandler; -import io.airbyte.server.handlers.JobHistoryHandler; -import io.airbyte.server.handlers.OperationsHandler; -import io.airbyte.server.handlers.SchedulerHandler; -import io.airbyte.server.handlers.SourceDefinitionsHandler; -import io.airbyte.server.handlers.SourceHandler; -import io.airbyte.server.handlers.StateHandler; -import io.airbyte.server.handlers.WebBackendConnectionsHandler; -import io.airbyte.server.handlers.WebBackendGeographiesHandler; -import io.airbyte.server.handlers.WorkspacesHandler; -import io.airbyte.server.scheduler.EventRunner; -import io.airbyte.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.validation.json.JsonSchemaValidator; -import io.airbyte.validation.json.JsonValidationException; -import java.io.File; -import java.io.IOException; -import java.util.Map; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.NotImplementedException; - -@javax.ws.rs.Path("/v1") -@Slf4j -public class ConfigurationApi implements io.airbyte.api.generated.V1Api { - - private final WorkspacesHandler workspacesHandler; - private final SourceDefinitionsHandler sourceDefinitionsHandler; - private final SourceHandler sourceHandler; - private final DestinationDefinitionsHandler destinationDefinitionsHandler; - private final DestinationHandler destinationHandler; - private final ConnectionsHandler connectionsHandler; - private final OperationsHandler operationsHandler; - private final SchedulerHandler schedulerHandler; - private final StateHandler stateHandler; - private final JobHistoryHandler jobHistoryHandler; - private final WebBackendConnectionsHandler webBackendConnectionsHandler; - private final WebBackendGeographiesHandler webBackendGeographiesHandler; - - public ConfigurationApi(final ConfigRepository configRepository, - final JobPersistence jobPersistence, - final SecretsRepositoryReader secretsRepositoryReader, - final SecretsRepositoryWriter secretsRepositoryWriter, - final SynchronousSchedulerClient synchronousSchedulerClient, - final StatePersistence statePersistence, - final TrackingClient trackingClient, - final WorkerEnvironment workerEnvironment, - final LogConfigs logConfigs, - final AirbyteVersion airbyteVersion, - final EventRunner eventRunner) { - - final JsonSchemaValidator schemaValidator = new JsonSchemaValidator(); - - final WorkspaceHelper workspaceHelper = new WorkspaceHelper(configRepository, jobPersistence); - - connectionsHandler = new ConnectionsHandler( - configRepository, - workspaceHelper, - trackingClient, - eventRunner); - - schedulerHandler = new SchedulerHandler( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - synchronousSchedulerClient, - jobPersistence, - workerEnvironment, - logConfigs, - eventRunner, - connectionsHandler); - - stateHandler = new StateHandler(statePersistence); - sourceHandler = new SourceHandler( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - schemaValidator, - connectionsHandler); - sourceDefinitionsHandler = new SourceDefinitionsHandler(configRepository, synchronousSchedulerClient, sourceHandler); - operationsHandler = new OperationsHandler(configRepository); - destinationHandler = new DestinationHandler( - configRepository, - secretsRepositoryReader, - secretsRepositoryWriter, - schemaValidator, - connectionsHandler); - destinationDefinitionsHandler = new DestinationDefinitionsHandler(configRepository, synchronousSchedulerClient, destinationHandler); - workspacesHandler = new WorkspacesHandler( - configRepository, - secretsRepositoryWriter, - connectionsHandler, - destinationHandler, - sourceHandler); - jobHistoryHandler = new JobHistoryHandler(jobPersistence, workerEnvironment, logConfigs, connectionsHandler, sourceHandler, - sourceDefinitionsHandler, destinationHandler, destinationDefinitionsHandler, airbyteVersion); - webBackendConnectionsHandler = new WebBackendConnectionsHandler( - connectionsHandler, - stateHandler, - sourceHandler, - destinationHandler, - jobHistoryHandler, - schedulerHandler, - operationsHandler, - eventRunner, - configRepository); - webBackendGeographiesHandler = new WebBackendGeographiesHandler(); - } - - // WORKSPACE - - @Override - public WorkspaceReadList listWorkspaces() { - return execute(workspacesHandler::listWorkspaces); - } - - @Override - public WorkspaceRead createWorkspace(final WorkspaceCreate workspaceCreate) { - return execute(() -> workspacesHandler.createWorkspace(workspaceCreate)); - } - - @Override - public void deleteWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - execute(() -> { - workspacesHandler.deleteWorkspace(workspaceIdRequestBody); - return null; - }); - } - - @Override - public WorkspaceRead getWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return execute(() -> workspacesHandler.getWorkspace(workspaceIdRequestBody)); - } - - @Override - public WorkspaceRead getWorkspaceBySlug(final SlugRequestBody slugRequestBody) { - return execute(() -> workspacesHandler.getWorkspaceBySlug(slugRequestBody)); - } - - @Override - public WorkspaceRead getWorkspaceByConnectionId(final ConnectionIdRequestBody connectionIdRequestBody) { - return execute(() -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody)); - } - - @Override - public WorkspaceRead updateWorkspace(final WorkspaceUpdate workspaceUpdate) { - return execute(() -> workspacesHandler.updateWorkspace(workspaceUpdate)); - } - - @Override - public WorkspaceRead updateWorkspaceName(final WorkspaceUpdateName workspaceUpdateName) { - return execute(() -> workspacesHandler.updateWorkspaceName(workspaceUpdateName)); - } - - @Override - public void updateWorkspaceFeedback(final WorkspaceGiveFeedback workspaceGiveFeedback) { - execute(() -> { - workspacesHandler.setFeedbackDone(workspaceGiveFeedback); - return null; - }); - } - - /** - * This implementation has been moved to {@link AttemptApiController}. Since the path of - * {@link AttemptApiController} is more granular, it will override this implementation - */ - @Override - public NotificationRead tryNotificationConfig(final Notification notification) { - throw new NotImplementedException(); - } - - // SOURCE - - @Override - public SourceDefinitionReadList listSourceDefinitions() { - return execute(sourceDefinitionsHandler::listSourceDefinitions); - } - - @Override - public SourceDefinitionReadList listSourceDefinitionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return execute(() -> sourceDefinitionsHandler.listSourceDefinitionsForWorkspace(workspaceIdRequestBody)); - } - - @Override - public SourceDefinitionReadList listLatestSourceDefinitions() { - return execute(sourceDefinitionsHandler::listLatestSourceDefinitions); - } - - @Override - public PrivateSourceDefinitionReadList listPrivateSourceDefinitions(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return execute(() -> sourceDefinitionsHandler.listPrivateSourceDefinitions(workspaceIdRequestBody)); - } - - @Override - public SourceDefinitionRead getSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) { - return execute(() -> sourceDefinitionsHandler.getSourceDefinition(sourceDefinitionIdRequestBody)); - } - - @Override - public SourceDefinitionRead getSourceDefinitionForWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - return execute(() -> sourceDefinitionsHandler.getSourceDefinitionForWorkspace(sourceDefinitionIdWithWorkspaceId)); - } - - // TODO: Deprecate this route in favor of createCustomSourceDefinition - // since all connector definitions created through the API are custom - @Override - public SourceDefinitionRead createSourceDefinition(final SourceDefinitionCreate sourceDefinitionCreate) { - return execute(() -> sourceDefinitionsHandler.createPrivateSourceDefinition(sourceDefinitionCreate)); - } - - @Override - public SourceDefinitionRead createCustomSourceDefinition(final CustomSourceDefinitionCreate customSourceDefinitionCreate) { - return execute(() -> sourceDefinitionsHandler.createCustomSourceDefinition(customSourceDefinitionCreate)); - } - - @Override - public SourceDefinitionRead updateSourceDefinition(final SourceDefinitionUpdate sourceDefinitionUpdate) { - return execute(() -> sourceDefinitionsHandler.updateSourceDefinition(sourceDefinitionUpdate)); - } - - @Override - public SourceDefinitionRead updateCustomSourceDefinition(final CustomSourceDefinitionUpdate customSourceDefinitionUpdate) { - return execute(() -> sourceDefinitionsHandler.updateCustomSourceDefinition(customSourceDefinitionUpdate)); - } - - @Override - public void deleteSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) { - execute(() -> { - sourceDefinitionsHandler.deleteSourceDefinition(sourceDefinitionIdRequestBody); - return null; - }); - } - - @Override - public void deleteCustomSourceDefinition(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - execute(() -> { - sourceDefinitionsHandler.deleteCustomSourceDefinition(sourceDefinitionIdWithWorkspaceId); - return null; - }); - } - - @Override - public PrivateSourceDefinitionRead grantSourceDefinitionToWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - return execute(() -> sourceDefinitionsHandler.grantSourceDefinitionToWorkspace(sourceDefinitionIdWithWorkspaceId)); - } - - @Override - public void revokeSourceDefinitionFromWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - execute(() -> { - sourceDefinitionsHandler.revokeSourceDefinitionFromWorkspace(sourceDefinitionIdWithWorkspaceId); - return null; - }); - } - - @Override - public InternalOperationResult saveStats(SaveStatsRequestBody saveStatsRequestBody) { - throw new UnsupportedOperationException(); - } - - // SOURCE SPECIFICATION - - @Override - public SourceDefinitionSpecificationRead getSourceDefinitionSpecification(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { - return execute(() -> schedulerHandler.getSourceDefinitionSpecification(sourceDefinitionIdWithWorkspaceId)); - } - - // OAUTH - - /** - * This implementation has been moved to {@link SourceOauthApiController}. Since the path of - * {@link SourceOauthApiController} is more granular, it will override this implementation - */ - @Override - public OAuthConsentRead getSourceOAuthConsent(final SourceOauthConsentRequest sourceOauthConsentRequest) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceOauthApiController}. Since the path of - * {@link SourceOauthApiController} is more granular, it will override this implementation - */ - @Override - public Map completeSourceOAuth(final CompleteSourceOauthRequest completeSourceOauthRequest) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationOauthApiController}. Since the path of - * {@link DestinationOauthApiController} is more granular, it will override this implementation - */ - @Override - public OAuthConsentRead getDestinationOAuthConsent(final DestinationOauthConsentRequest destinationOauthConsentRequest) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationOauthApiController}. Since the path of - * {@link DestinationOauthApiController} is more granular, it will override this implementation - */ - @Override - public Map completeDestinationOAuth(final CompleteDestinationOAuthRequest requestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationOauthApiController}. Since the path of - * {@link DestinationOauthApiController} is more granular, it will override this implementation - */ - @Override - public void setInstancewideDestinationOauthParams(final SetInstancewideDestinationOauthParamsRequestBody requestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceOauthApiController}. Since the path of - * {@link SourceOauthApiController} is more granular, it will override this implementation - */ - @Override - public void setInstancewideSourceOauthParams(final SetInstancewideSourceOauthParamsRequestBody requestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link AttemptApiController}. Since the path of - * {@link AttemptApiController} is more granular, it will override this implementation - */ - @Override - public InternalOperationResult setWorkflowInAttempt(final SetWorkflowInAttemptRequestBody setWorkflowInAttemptRequestBody) { - throw new NotImplementedException(); - } - - // SOURCE IMPLEMENTATION - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public SourceRead createSource(final SourceCreate sourceCreate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public SourceRead updateSource(final SourceUpdate sourceUpdate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public SourceReadList listSourcesForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public SourceReadList searchSources(final SourceSearch sourceSearch) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public SourceRead getSource(final SourceIdRequestBody sourceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public void deleteSource(final SourceIdRequestBody sourceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public SourceRead cloneSource(final SourceCloneRequestBody sourceCloneRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public CheckConnectionRead checkConnectionToSource(final SourceIdRequestBody sourceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public CheckConnectionRead checkConnectionToSourceForUpdate(final SourceUpdate sourceUpdate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SourceApiController}. Since the path of - * {@link SourceApiController} is more granular, it will override this implementation - */ - @Override - public SourceDiscoverSchemaRead discoverSchemaForSource(final SourceDiscoverSchemaRequestBody discoverSchemaRequestBody) { - throw new NotImplementedException(); - } - - // DB MIGRATION - - /** - * This implementation has been moved to {@link DbMigrationApiController}. Since the path of - * {@link DbMigrationApiController} is more granular, it will override this implementation - */ - @Override - public DbMigrationReadList listMigrations(final DbMigrationRequestBody request) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DbMigrationApiController}. Since the path of - * {@link DbMigrationApiController} is more granular, it will override this implementation - */ - @Override - public DbMigrationExecutionRead executeMigrations(final DbMigrationRequestBody request) { - throw new NotImplementedException(); - } - - // DESTINATION - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public DestinationDefinitionReadList listDestinationDefinitions() { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public DestinationDefinitionReadList listDestinationDefinitionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public DestinationDefinitionReadList listLatestDestinationDefinitions() { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public PrivateDestinationDefinitionReadList listPrivateDestinationDefinitions(final WorkspaceIdRequestBody workspaceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public DestinationDefinitionRead getDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public DestinationDefinitionRead getDestinationDefinitionForWorkspace( - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - // TODO: Deprecate this route in favor of createCustomDestinationDefinition - // since all connector definitions created through the API are custom - @Override - public DestinationDefinitionRead createDestinationDefinition(final DestinationDefinitionCreate destinationDefinitionCreate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public DestinationDefinitionRead createCustomDestinationDefinition(final CustomDestinationDefinitionCreate customDestinationDefinitionCreate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public DestinationDefinitionRead updateDestinationDefinition(final DestinationDefinitionUpdate destinationDefinitionUpdate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public DestinationDefinitionRead updateCustomDestinationDefinition(final CustomDestinationDefinitionUpdate customDestinationDefinitionUpdate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public void deleteDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public void deleteCustomDestinationDefinition(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public PrivateDestinationDefinitionRead grantDestinationDefinitionToWorkspace( - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationDefinitionApiController}. Since the path - * of {@link DestinationDefinitionApiController} is more granular, it will override this - * implementation - */ - @Override - public void revokeDestinationDefinitionFromWorkspace(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - throw new NotImplementedException(); - } - - // DESTINATION SPECIFICATION - /** - * This implementation has been moved to {@link DestinationDefinitionSpecificationApiController}. - * Since the path of {@link DestinationDefinitionSpecificationApiController} is more granular, it - * will override this implementation - */ - @Override - public DestinationDefinitionSpecificationRead getDestinationDefinitionSpecification( - final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - throw new NotImplementedException(); - } - - // DESTINATION IMPLEMENTATION - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public DestinationRead createDestination(final DestinationCreate destinationCreate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public void deleteDestination(final DestinationIdRequestBody destinationIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public DestinationRead updateDestination(final DestinationUpdate destinationUpdate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public DestinationReadList listDestinationsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public DestinationReadList searchDestinations(final DestinationSearch destinationSearch) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public DestinationRead getDestination(final DestinationIdRequestBody destinationIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public DestinationRead cloneDestination(final DestinationCloneRequestBody destinationCloneRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public CheckConnectionRead checkConnectionToDestination(final DestinationIdRequestBody destinationIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link DestinationApiController}. Since the path of - * {@link DestinationApiController} is more granular, it will override this implementation - */ - @Override - public CheckConnectionRead checkConnectionToDestinationForUpdate(final DestinationUpdate destinationUpdate) { - throw new NotImplementedException(); - } - - // CONNECTION - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public ConnectionRead createConnection(final ConnectionCreate connectionCreate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public ConnectionRead updateConnection(final ConnectionUpdate connectionUpdate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public ConnectionReadList listConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public ConnectionReadList listAllConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public ConnectionReadList searchConnections(final ConnectionSearch connectionSearch) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public ConnectionRead getConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public void deleteConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public JobInfoRead syncConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link ConnectionApiController}. Since the path of - * {@link ConnectionApiController} is more granular, it will override this implementation - */ - @Override - public JobInfoRead resetConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - throw new NotImplementedException(); - } - - // Operations - - /** - * This implementation has been moved to {@link OperationApiController}. Since the path of - * {@link OperationApiController} is more granular, it will override this implementation - */ - @Override - public CheckOperationRead checkOperation(final OperatorConfiguration operatorConfiguration) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link OperationApiController}. Since the path of - * {@link OperationApiController} is more granular, it will override this implementation - */ - @Override - public OperationRead createOperation(final OperationCreate operationCreate) { - throw new NotImplementedException(); - } - - @Override - public ConnectionState createOrUpdateState(final ConnectionStateCreateOrUpdate connectionStateCreateOrUpdate) { - return ConfigurationApi.execute(() -> stateHandler.createOrUpdateState(connectionStateCreateOrUpdate)); - } - - /** - * This implementation has been moved to {@link OperationApiController}. Since the path of - * {@link OperationApiController} is more granular, it will override this implementation - */ - @Override - public void deleteOperation(final OperationIdRequestBody operationIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link OperationApiController}. Since the path of - * {@link OperationApiController} is more granular, it will override this implementation - */ - @Override - public OperationReadList listOperationsForConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link OperationApiController}. Since the path of - * {@link OperationApiController} is more granular, it will override this implementation - */ - @Override - public OperationRead getOperation(final OperationIdRequestBody operationIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link OperationApiController}. Since the path of - * {@link OperationApiController} is more granular, it will override this implementation - */ - @Override - public OperationRead updateOperation(final OperationUpdate operationUpdate) { - throw new NotImplementedException(); - } - - @Override - public ConnectionState getState(final ConnectionIdRequestBody connectionIdRequestBody) { - return ConfigurationApi.execute(() -> stateHandler.getState(connectionIdRequestBody)); - } - - // SCHEDULER - /** - * This implementation has been moved to {@link SchedulerApiController}. Since the path of - * {@link SchedulerApiController} is more granular, it will override this implementation - */ - @Override - public CheckConnectionRead executeSourceCheckConnection(final SourceCoreConfig sourceConfig) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SchedulerApiController}. Since the path of - * {@link SchedulerApiController} is more granular, it will override this implementation - */ - @Override - public CheckConnectionRead executeDestinationCheckConnection(final DestinationCoreConfig destinationConfig) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link SchedulerApiController}. Since the path of - * {@link SchedulerApiController} is more granular, it will override this implementation - */ - @Override - public SourceDiscoverSchemaRead executeSourceDiscoverSchema(final SourceCoreConfig sourceCreate) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link JobsApiController}. Since the path of - * {@link JobsApiController} is more granular, it will override this implementation - */ - @Override - public JobInfoRead cancelJob(final JobIdRequestBody jobIdRequestBody) { - throw new NotImplementedException(); - } - - // JOB HISTORY - - /** - * This implementation has been moved to {@link JobsApiController}. Since the path of - * {@link JobsApiController} is more granular, it will override this implementation - */ - @Override - public JobReadList listJobsFor(final JobListRequestBody jobListRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link JobsApiController}. Since the path of - * {@link JobsApiController} is more granular, it will override this implementation - */ - @Override - public JobInfoRead getJobInfo(final JobIdRequestBody jobIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link JobsApiController}. Since the path of - * {@link JobsApiController} is more granular, it will override this implementation - */ - @Override - public JobInfoLightRead getJobInfoLight(final JobIdRequestBody jobIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link JobsApiController}. Since the path of - * {@link JobsApiController} is more granular, it will override this implementation - */ - @Override - public JobDebugInfoRead getJobDebugInfo(final JobIdRequestBody jobIdRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link JobsApiController}. Since the path of - * {@link JobsApiController} is more granular, it will override this implementation - */ - @Override - public AttemptNormalizationStatusReadList getAttemptNormalizationStatusesForJob(final JobIdRequestBody jobIdRequestBody) { - return execute(() -> jobHistoryHandler.getAttemptNormalizationStatuses(jobIdRequestBody)); - } - - /** - * This implementation has been moved to {@link LogsApiController}. Since the path of - * {@link LogsApiController} is more granular, it will override this implementation - */ - @Override - public File getLogs(final LogsRequestBody logsRequestBody) { - throw new NotImplementedException(); - } - - /** - * This implementation has been moved to {@link HealthApiController}. Since the path of - * {@link HealthApiController} is more granular, it will override this implementation - */ - @Override - public File getOpenApiSpec() { - throw new NotImplementedException(); - } - - // HEALTH - /** - * This implementation has been moved to {@link HealthApiController}. Since the path of - * {@link HealthApiController} is more granular, it will override this implementation - */ - @Override - public HealthCheckRead getHealthCheck() { - throw new NotImplementedException(); - } - - // WEB BACKEND - - @Override - public WebBackendConnectionReadList webBackendListConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return execute(() -> webBackendConnectionsHandler.webBackendListConnectionsForWorkspace(workspaceIdRequestBody)); - } - - @Override - public WebBackendGeographiesListResult webBackendListGeographies() { - return execute(webBackendGeographiesHandler::listGeographiesOSS); - } - - @Override - public WebBackendConnectionRead webBackendGetConnection(final WebBackendConnectionRequestBody webBackendConnectionRequestBody) { - return execute(() -> webBackendConnectionsHandler.webBackendGetConnection(webBackendConnectionRequestBody)); - } - - @Override - public WebBackendConnectionRead webBackendCreateConnection(final WebBackendConnectionCreate webBackendConnectionCreate) { - return execute(() -> webBackendConnectionsHandler.webBackendCreateConnection(webBackendConnectionCreate)); - } - - @Override - public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConnectionUpdate webBackendConnectionUpdate) { - return execute(() -> webBackendConnectionsHandler.webBackendUpdateConnection(webBackendConnectionUpdate)); - } - - @Override - public ConnectionStateType getStateType(final ConnectionIdRequestBody connectionIdRequestBody) { - return ConfigurationApi.execute(() -> webBackendConnectionsHandler.getStateType(connectionIdRequestBody)); - } - - @Override - public WebBackendWorkspaceStateResult webBackendGetWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) { - return execute(() -> webBackendConnectionsHandler.getWorkspaceState(webBackendWorkspaceState)); - } - - // TODO: Move to common when all the api are moved - static T execute(final HandlerCall call) { - try { - return call.call(); - } catch (final ConfigNotFoundException e) { - throw new IdNotFoundKnownException(String.format("Could not find configuration for %s: %s.", e.getType(), e.getConfigId()), - e.getConfigId(), e); - } catch (final JsonValidationException e) { - throw new BadObjectSchemaKnownException( - String.format("The provided configuration does not fulfill the specification. Errors: %s", e.getMessage()), e); - } catch (final IOException e) { - throw new RuntimeException(e); - } - } - - interface HandlerCall { - - T call() throws ConfigNotFoundException, IOException, JsonValidationException; - - } - -} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java index 21d80c04e84cb..fe29b42e1f333 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java @@ -35,37 +35,37 @@ public ConnectionApiController(final ConnectionsHandler connectionsHandler, @Override public ConnectionRead createConnection(final ConnectionCreate connectionCreate) { - return ConfigurationApi.execute(() -> connectionsHandler.createConnection(connectionCreate)); + return ApiHelper.execute(() -> connectionsHandler.createConnection(connectionCreate)); } @Override public ConnectionRead updateConnection(final ConnectionUpdate connectionUpdate) { - return ConfigurationApi.execute(() -> connectionsHandler.updateConnection(connectionUpdate)); + return ApiHelper.execute(() -> connectionsHandler.updateConnection(connectionUpdate)); } @Override public ConnectionReadList listConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ConfigurationApi.execute(() -> connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody)); + return ApiHelper.execute(() -> connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody)); } @Override public ConnectionReadList listAllConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ConfigurationApi.execute(() -> connectionsHandler.listAllConnectionsForWorkspace(workspaceIdRequestBody)); + return ApiHelper.execute(() -> connectionsHandler.listAllConnectionsForWorkspace(workspaceIdRequestBody)); } @Override public ConnectionReadList searchConnections(final ConnectionSearch connectionSearch) { - return ConfigurationApi.execute(() -> connectionsHandler.searchConnections(connectionSearch)); + return ApiHelper.execute(() -> connectionsHandler.searchConnections(connectionSearch)); } @Override public ConnectionRead getConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - return ConfigurationApi.execute(() -> connectionsHandler.getConnection(connectionIdRequestBody.getConnectionId())); + return ApiHelper.execute(() -> connectionsHandler.getConnection(connectionIdRequestBody.getConnectionId())); } @Override public void deleteConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { operationsHandler.deleteOperationsForConnection(connectionIdRequestBody); connectionsHandler.deleteConnection(connectionIdRequestBody.getConnectionId()); return null; @@ -74,12 +74,12 @@ public void deleteConnection(final ConnectionIdRequestBody connectionIdRequestBo @Override public JobInfoRead syncConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - return ConfigurationApi.execute(() -> schedulerHandler.syncConnection(connectionIdRequestBody)); + return ApiHelper.execute(() -> schedulerHandler.syncConnection(connectionIdRequestBody)); } @Override public JobInfoRead resetConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - return ConfigurationApi.execute(() -> schedulerHandler.resetConnection(connectionIdRequestBody)); + return ApiHelper.execute(() -> schedulerHandler.resetConnection(connectionIdRequestBody)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DbMigrationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DbMigrationApiController.java index 93cb85dd33a7a..202b1c4da554f 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DbMigrationApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/DbMigrationApiController.java @@ -22,12 +22,12 @@ public DbMigrationApiController(final DbMigrationHandler dbMigrationHandler) { @Override public DbMigrationExecutionRead executeMigrations(final DbMigrationRequestBody dbMigrationRequestBody) { - return ConfigurationApi.execute(() -> dbMigrationHandler.migrate(dbMigrationRequestBody)); + return ApiHelper.execute(() -> dbMigrationHandler.migrate(dbMigrationRequestBody)); } @Override public DbMigrationReadList listMigrations(final DbMigrationRequestBody dbMigrationRequestBody) { - return ConfigurationApi.execute(() -> dbMigrationHandler.list(dbMigrationRequestBody)); + return ApiHelper.execute(() -> dbMigrationHandler.list(dbMigrationRequestBody)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationApiController.java index 99148027a59e0..8cbdbb5cd6d0c 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationApiController.java @@ -28,27 +28,27 @@ public class DestinationApiController implements DestinationApi { @Override public CheckConnectionRead checkConnectionToDestination(final DestinationIdRequestBody destinationIdRequestBody) { - return ConfigurationApi.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationId(destinationIdRequestBody)); + return ApiHelper.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationId(destinationIdRequestBody)); } @Override public CheckConnectionRead checkConnectionToDestinationForUpdate(final DestinationUpdate destinationUpdate) { - return ConfigurationApi.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationIdForUpdate(destinationUpdate)); + return ApiHelper.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationIdForUpdate(destinationUpdate)); } @Override public DestinationRead cloneDestination(final DestinationCloneRequestBody destinationCloneRequestBody) { - return ConfigurationApi.execute(() -> destinationHandler.cloneDestination(destinationCloneRequestBody)); + return ApiHelper.execute(() -> destinationHandler.cloneDestination(destinationCloneRequestBody)); } @Override public DestinationRead createDestination(final DestinationCreate destinationCreate) { - return ConfigurationApi.execute(() -> destinationHandler.createDestination(destinationCreate)); + return ApiHelper.execute(() -> destinationHandler.createDestination(destinationCreate)); } @Override public void deleteDestination(final DestinationIdRequestBody destinationIdRequestBody) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { destinationHandler.deleteDestination(destinationIdRequestBody); return null; }); @@ -56,22 +56,22 @@ public void deleteDestination(final DestinationIdRequestBody destinationIdReques @Override public DestinationRead getDestination(final DestinationIdRequestBody destinationIdRequestBody) { - return ConfigurationApi.execute(() -> destinationHandler.getDestination(destinationIdRequestBody)); + return ApiHelper.execute(() -> destinationHandler.getDestination(destinationIdRequestBody)); } @Override public DestinationReadList listDestinationsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ConfigurationApi.execute(() -> destinationHandler.listDestinationsForWorkspace(workspaceIdRequestBody)); + return ApiHelper.execute(() -> destinationHandler.listDestinationsForWorkspace(workspaceIdRequestBody)); } @Override public DestinationReadList searchDestinations(final DestinationSearch destinationSearch) { - return ConfigurationApi.execute(() -> destinationHandler.searchDestinations(destinationSearch)); + return ApiHelper.execute(() -> destinationHandler.searchDestinations(destinationSearch)); } @Override public DestinationRead updateDestination(final DestinationUpdate destinationUpdate) { - return ConfigurationApi.execute(() -> destinationHandler.updateDestination(destinationUpdate)); + return ApiHelper.execute(() -> destinationHandler.updateDestination(destinationUpdate)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionApiController.java index 209a1f1c3eb00..f5916639534d5 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionApiController.java @@ -28,19 +28,19 @@ public class DestinationDefinitionApiController implements DestinationDefinition @Override public DestinationDefinitionRead createCustomDestinationDefinition(final CustomDestinationDefinitionCreate customDestinationDefinitionCreate) { - return ConfigurationApi.execute(() -> destinationDefinitionsHandler.createCustomDestinationDefinition(customDestinationDefinitionCreate)); + return ApiHelper.execute(() -> destinationDefinitionsHandler.createCustomDestinationDefinition(customDestinationDefinitionCreate)); } // TODO: Deprecate this route in favor of createCustomDestinationDefinition // since all connector definitions created through the API are custom @Override public DestinationDefinitionRead createDestinationDefinition(final DestinationDefinitionCreate destinationDefinitionCreate) { - return ConfigurationApi.execute(() -> destinationDefinitionsHandler.createPrivateDestinationDefinition(destinationDefinitionCreate)); + return ApiHelper.execute(() -> destinationDefinitionsHandler.createPrivateDestinationDefinition(destinationDefinitionCreate)); } @Override public void deleteCustomDestinationDefinition(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { destinationDefinitionsHandler.deleteCustomDestinationDefinition(destinationDefinitionIdWithWorkspaceId); return null; }); @@ -48,7 +48,7 @@ public void deleteCustomDestinationDefinition(final DestinationDefinitionIdWithW @Override public void deleteDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { destinationDefinitionsHandler.deleteDestinationDefinition(destinationDefinitionIdRequestBody); return null; }); @@ -56,43 +56,43 @@ public void deleteDestinationDefinition(final DestinationDefinitionIdRequestBody @Override public DestinationDefinitionRead getDestinationDefinition(final DestinationDefinitionIdRequestBody destinationDefinitionIdRequestBody) { - return ConfigurationApi.execute(() -> destinationDefinitionsHandler.getDestinationDefinition(destinationDefinitionIdRequestBody)); + return ApiHelper.execute(() -> destinationDefinitionsHandler.getDestinationDefinition(destinationDefinitionIdRequestBody)); } @Override public DestinationDefinitionRead getDestinationDefinitionForWorkspace(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - return ConfigurationApi.execute(() -> destinationDefinitionsHandler.getDestinationDefinitionForWorkspace(destinationDefinitionIdWithWorkspaceId)); + return ApiHelper.execute(() -> destinationDefinitionsHandler.getDestinationDefinitionForWorkspace(destinationDefinitionIdWithWorkspaceId)); } @Override public PrivateDestinationDefinitionRead grantDestinationDefinitionToWorkspace(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - return ConfigurationApi + return ApiHelper .execute(() -> destinationDefinitionsHandler.grantDestinationDefinitionToWorkspace(destinationDefinitionIdWithWorkspaceId)); } @Override public DestinationDefinitionReadList listDestinationDefinitions() { - return ConfigurationApi.execute(destinationDefinitionsHandler::listDestinationDefinitions); + return ApiHelper.execute(destinationDefinitionsHandler::listDestinationDefinitions); } @Override public DestinationDefinitionReadList listDestinationDefinitionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ConfigurationApi.execute(() -> destinationDefinitionsHandler.listDestinationDefinitionsForWorkspace(workspaceIdRequestBody)); + return ApiHelper.execute(() -> destinationDefinitionsHandler.listDestinationDefinitionsForWorkspace(workspaceIdRequestBody)); } @Override public DestinationDefinitionReadList listLatestDestinationDefinitions() { - return ConfigurationApi.execute(destinationDefinitionsHandler::listLatestDestinationDefinitions); + return ApiHelper.execute(destinationDefinitionsHandler::listLatestDestinationDefinitions); } @Override public PrivateDestinationDefinitionReadList listPrivateDestinationDefinitions(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ConfigurationApi.execute(() -> destinationDefinitionsHandler.listPrivateDestinationDefinitions(workspaceIdRequestBody)); + return ApiHelper.execute(() -> destinationDefinitionsHandler.listPrivateDestinationDefinitions(workspaceIdRequestBody)); } @Override public void revokeDestinationDefinitionFromWorkspace(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { destinationDefinitionsHandler.revokeDestinationDefinitionFromWorkspace(destinationDefinitionIdWithWorkspaceId); return null; }); @@ -100,12 +100,12 @@ public void revokeDestinationDefinitionFromWorkspace(final DestinationDefinition @Override public DestinationDefinitionRead updateCustomDestinationDefinition(final CustomDestinationDefinitionUpdate customDestinationDefinitionUpdate) { - return ConfigurationApi.execute(() -> destinationDefinitionsHandler.updateCustomDestinationDefinition(customDestinationDefinitionUpdate)); + return ApiHelper.execute(() -> destinationDefinitionsHandler.updateCustomDestinationDefinition(customDestinationDefinitionUpdate)); } @Override public DestinationDefinitionRead updateDestinationDefinition(final DestinationDefinitionUpdate destinationDefinitionUpdate) { - return ConfigurationApi.execute(() -> destinationDefinitionsHandler.updateDestinationDefinition(destinationDefinitionUpdate)); + return ApiHelper.execute(() -> destinationDefinitionsHandler.updateDestinationDefinition(destinationDefinitionUpdate)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiController.java index 1840593567464..74853019c8dc1 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiController.java @@ -19,7 +19,7 @@ public class DestinationDefinitionSpecificationApiController implements Destinat @Override public DestinationDefinitionSpecificationRead getDestinationDefinitionSpecification(final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId) { - return ConfigurationApi.execute(() -> schedulerHandler.getDestinationSpecification(destinationDefinitionIdWithWorkspaceId)); + return ApiHelper.execute(() -> schedulerHandler.getDestinationSpecification(destinationDefinitionIdWithWorkspaceId)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationOauthApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationOauthApiController.java index 0a58c40a74137..fcbaa9a2675fd 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationOauthApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/DestinationOauthApiController.java @@ -22,17 +22,17 @@ public class DestinationOauthApiController implements DestinationOauthApi { @Override public Map completeDestinationOAuth(final CompleteDestinationOAuthRequest completeDestinationOAuthRequest) { - return ConfigurationApi.execute(() -> oAuthHandler.completeDestinationOAuth(completeDestinationOAuthRequest)); + return ApiHelper.execute(() -> oAuthHandler.completeDestinationOAuth(completeDestinationOAuthRequest)); } @Override public OAuthConsentRead getDestinationOAuthConsent(final DestinationOauthConsentRequest destinationOauthConsentRequest) { - return ConfigurationApi.execute(() -> oAuthHandler.getDestinationOAuthConsent(destinationOauthConsentRequest)); + return ApiHelper.execute(() -> oAuthHandler.getDestinationOAuthConsent(destinationOauthConsentRequest)); } @Override public void setInstancewideDestinationOauthParams(final SetInstancewideDestinationOauthParamsRequestBody setInstancewideDestinationOauthParamsRequestBody) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { oAuthHandler.setDestinationInstancewideOauthParams(setInstancewideDestinationOauthParamsRequestBody); return null; }); diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java index 0c1eb3a276a1e..197a861a63e58 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/JobsApiController.java @@ -26,32 +26,32 @@ public class JobsApiController implements JobsApi { @Override public JobInfoRead cancelJob(final JobIdRequestBody jobIdRequestBody) { - return ConfigurationApi.execute(() -> schedulerHandler.cancelJob(jobIdRequestBody)); + return ApiHelper.execute(() -> schedulerHandler.cancelJob(jobIdRequestBody)); } @Override public AttemptNormalizationStatusReadList getAttemptNormalizationStatusesForJob(final JobIdRequestBody jobIdRequestBody) { - return ConfigurationApi.execute(() -> jobHistoryHandler.getAttemptNormalizationStatuses(jobIdRequestBody)); + return ApiHelper.execute(() -> jobHistoryHandler.getAttemptNormalizationStatuses(jobIdRequestBody)); } @Override public JobDebugInfoRead getJobDebugInfo(final JobIdRequestBody jobIdRequestBody) { - return ConfigurationApi.execute(() -> jobHistoryHandler.getJobDebugInfo(jobIdRequestBody)); + return ApiHelper.execute(() -> jobHistoryHandler.getJobDebugInfo(jobIdRequestBody)); } @Override public JobInfoRead getJobInfo(final JobIdRequestBody jobIdRequestBody) { - return ConfigurationApi.execute(() -> jobHistoryHandler.getJobInfo(jobIdRequestBody)); + return ApiHelper.execute(() -> jobHistoryHandler.getJobInfo(jobIdRequestBody)); } @Override public JobInfoLightRead getJobInfoLight(final JobIdRequestBody jobIdRequestBody) { - return ConfigurationApi.execute(() -> jobHistoryHandler.getJobInfoLight(jobIdRequestBody)); + return ApiHelper.execute(() -> jobHistoryHandler.getJobInfoLight(jobIdRequestBody)); } @Override public JobReadList listJobsFor(final JobListRequestBody jobListRequestBody) { - return ConfigurationApi.execute(() -> jobHistoryHandler.listJobsFor(jobListRequestBody)); + return ApiHelper.execute(() -> jobHistoryHandler.listJobsFor(jobListRequestBody)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/LogsApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/LogsApiController.java index b3c1e5f586190..95e7758f4f689 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/LogsApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/LogsApiController.java @@ -19,7 +19,7 @@ public class LogsApiController implements LogsApi { @Override public File getLogs(final LogsRequestBody logsRequestBody) { - return ConfigurationApi.execute(() -> logsHandler.getLogs(logsRequestBody)); + return ApiHelper.execute(() -> logsHandler.getLogs(logsRequestBody)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/NotificationsApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/NotificationsApiController.java index e6f6df3b6370f..981ad80f7e8d4 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/NotificationsApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/NotificationsApiController.java @@ -19,7 +19,7 @@ public class NotificationsApiController implements NotificationsApi { @Override public NotificationRead tryNotificationConfig(final Notification notification) { - return ConfigurationApi.execute(() -> workspacesHandler.tryNotification(notification)); + return ApiHelper.execute(() -> workspacesHandler.tryNotification(notification)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/OpenapiApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/OpenapiApiController.java index c93ebd290fdda..ae749403b4201 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/OpenapiApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/OpenapiApiController.java @@ -18,7 +18,7 @@ public class OpenapiApiController implements OpenapiApi { @Override public File getOpenApiSpec() { - return ConfigurationApi.execute(openApiConfigHandler::getFile); + return ApiHelper.execute(openApiConfigHandler::getFile); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/OperationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/OperationApiController.java index fb6f9b04b0f27..3b4eeb9ee431b 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/OperationApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/OperationApiController.java @@ -25,17 +25,17 @@ public class OperationApiController implements OperationApi { @Override public CheckOperationRead checkOperation(final OperatorConfiguration operatorConfiguration) { - return ConfigurationApi.execute(() -> operationsHandler.checkOperation(operatorConfiguration)); + return ApiHelper.execute(() -> operationsHandler.checkOperation(operatorConfiguration)); } @Override public OperationRead createOperation(final OperationCreate operationCreate) { - return ConfigurationApi.execute(() -> operationsHandler.createOperation(operationCreate)); + return ApiHelper.execute(() -> operationsHandler.createOperation(operationCreate)); } @Override public void deleteOperation(final OperationIdRequestBody operationIdRequestBody) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { operationsHandler.deleteOperation(operationIdRequestBody); return null; }); @@ -43,17 +43,17 @@ public void deleteOperation(final OperationIdRequestBody operationIdRequestBody) @Override public OperationRead getOperation(final OperationIdRequestBody operationIdRequestBody) { - return ConfigurationApi.execute(() -> operationsHandler.getOperation(operationIdRequestBody)); + return ApiHelper.execute(() -> operationsHandler.getOperation(operationIdRequestBody)); } @Override public OperationReadList listOperationsForConnection(final ConnectionIdRequestBody connectionIdRequestBody) { - return ConfigurationApi.execute(() -> operationsHandler.listOperationsForConnection(connectionIdRequestBody)); + return ApiHelper.execute(() -> operationsHandler.listOperationsForConnection(connectionIdRequestBody)); } @Override public OperationRead updateOperation(final OperationUpdate operationUpdate) { - return ConfigurationApi.execute(() -> operationsHandler.updateOperation(operationUpdate)); + return ApiHelper.execute(() -> operationsHandler.updateOperation(operationUpdate)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SchedulerApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SchedulerApiController.java index a3cd0baecbe1c..addc445d3c0fa 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SchedulerApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/SchedulerApiController.java @@ -19,17 +19,17 @@ public class SchedulerApiController implements SchedulerApi { @Override public CheckConnectionRead executeDestinationCheckConnection(final DestinationCoreConfig destinationCoreConfig) { - return ConfigurationApi.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationCreate(destinationCoreConfig)); + return ApiHelper.execute(() -> schedulerHandler.checkDestinationConnectionFromDestinationCreate(destinationCoreConfig)); } @Override public CheckConnectionRead executeSourceCheckConnection(final SourceCoreConfig sourceCoreConfig) { - return ConfigurationApi.execute(() -> schedulerHandler.checkSourceConnectionFromSourceCreate(sourceCoreConfig)); + return ApiHelper.execute(() -> schedulerHandler.checkSourceConnectionFromSourceCreate(sourceCoreConfig)); } @Override public SourceDiscoverSchemaRead executeSourceDiscoverSchema(final SourceCoreConfig sourceCoreConfig) { - return ConfigurationApi.execute(() -> schedulerHandler.discoverSchemaForSourceFromSourceCreate(sourceCoreConfig)); + return ApiHelper.execute(() -> schedulerHandler.discoverSchemaForSourceFromSourceCreate(sourceCoreConfig)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceApiController.java index 4086da7f60f2a..595087e0d198f 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceApiController.java @@ -30,27 +30,27 @@ public class SourceApiController implements SourceApi { @Override public CheckConnectionRead checkConnectionToSource(final SourceIdRequestBody sourceIdRequestBody) { - return ConfigurationApi.execute(() -> schedulerHandler.checkSourceConnectionFromSourceId(sourceIdRequestBody)); + return ApiHelper.execute(() -> schedulerHandler.checkSourceConnectionFromSourceId(sourceIdRequestBody)); } @Override public CheckConnectionRead checkConnectionToSourceForUpdate(final SourceUpdate sourceUpdate) { - return ConfigurationApi.execute(() -> schedulerHandler.checkSourceConnectionFromSourceIdForUpdate(sourceUpdate)); + return ApiHelper.execute(() -> schedulerHandler.checkSourceConnectionFromSourceIdForUpdate(sourceUpdate)); } @Override public SourceRead cloneSource(final SourceCloneRequestBody sourceCloneRequestBody) { - return ConfigurationApi.execute(() -> sourceHandler.cloneSource(sourceCloneRequestBody)); + return ApiHelper.execute(() -> sourceHandler.cloneSource(sourceCloneRequestBody)); } @Override public SourceRead createSource(final SourceCreate sourceCreate) { - return ConfigurationApi.execute(() -> sourceHandler.createSource(sourceCreate)); + return ApiHelper.execute(() -> sourceHandler.createSource(sourceCreate)); } @Override public void deleteSource(final SourceIdRequestBody sourceIdRequestBody) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { sourceHandler.deleteSource(sourceIdRequestBody); return null; }); @@ -58,27 +58,27 @@ public void deleteSource(final SourceIdRequestBody sourceIdRequestBody) { @Override public SourceDiscoverSchemaRead discoverSchemaForSource(final SourceDiscoverSchemaRequestBody sourceDiscoverSchemaRequestBody) { - return ConfigurationApi.execute(() -> schedulerHandler.discoverSchemaForSourceFromSourceId(sourceDiscoverSchemaRequestBody)); + return ApiHelper.execute(() -> schedulerHandler.discoverSchemaForSourceFromSourceId(sourceDiscoverSchemaRequestBody)); } @Override public SourceRead getSource(final SourceIdRequestBody sourceIdRequestBody) { - return ConfigurationApi.execute(() -> sourceHandler.getSource(sourceIdRequestBody)); + return ApiHelper.execute(() -> sourceHandler.getSource(sourceIdRequestBody)); } @Override public SourceReadList listSourcesForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { - return ConfigurationApi.execute(() -> sourceHandler.listSourcesForWorkspace(workspaceIdRequestBody)); + return ApiHelper.execute(() -> sourceHandler.listSourcesForWorkspace(workspaceIdRequestBody)); } @Override public SourceReadList searchSources(final SourceSearch sourceSearch) { - return ConfigurationApi.execute(() -> sourceHandler.searchSources(sourceSearch)); + return ApiHelper.execute(() -> sourceHandler.searchSources(sourceSearch)); } @Override public SourceRead updateSource(final SourceUpdate sourceUpdate) { - return ConfigurationApi.execute(() -> sourceHandler.updateSource(sourceUpdate)); + return ApiHelper.execute(() -> sourceHandler.updateSource(sourceUpdate)); } } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java new file mode 100644 index 0000000000000..c47ab1fac43e6 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis; + +import io.airbyte.api.generated.SourceDefinitionApi; +import io.airbyte.api.model.generated.CustomSourceDefinitionCreate; +import io.airbyte.api.model.generated.CustomSourceDefinitionUpdate; +import io.airbyte.api.model.generated.PrivateSourceDefinitionRead; +import io.airbyte.api.model.generated.PrivateSourceDefinitionReadList; +import io.airbyte.api.model.generated.SourceDefinitionCreate; +import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; +import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; +import io.airbyte.api.model.generated.SourceDefinitionRead; +import io.airbyte.api.model.generated.SourceDefinitionReadList; +import io.airbyte.api.model.generated.SourceDefinitionUpdate; +import io.airbyte.api.model.generated.WorkspaceIdRequestBody; +import io.airbyte.server.handlers.SourceDefinitionsHandler; +import javax.ws.rs.Path; +import lombok.AllArgsConstructor; + +@Path("/v1/source_definitions") +@AllArgsConstructor +public class SourceDefinitionApiController implements SourceDefinitionApi { + + private final SourceDefinitionsHandler sourceDefinitionsHandler; + + @Override + public SourceDefinitionRead createCustomSourceDefinition(final CustomSourceDefinitionCreate customSourceDefinitionCreate) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.createCustomSourceDefinition(customSourceDefinitionCreate)); + } + + // TODO: Deprecate this route in favor of createCustomSourceDefinition + // since all connector definitions created through the API are custom + @Override + public SourceDefinitionRead createSourceDefinition(final SourceDefinitionCreate sourceDefinitionCreate) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.createPrivateSourceDefinition(sourceDefinitionCreate)); + } + + @Override + public void deleteCustomSourceDefinition(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { + ApiHelper.execute(() -> { + sourceDefinitionsHandler.deleteCustomSourceDefinition(sourceDefinitionIdWithWorkspaceId); + return null; + }); + } + + @Override + public void deleteSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) { + ApiHelper.execute(() -> { + sourceDefinitionsHandler.deleteSourceDefinition(sourceDefinitionIdRequestBody); + return null; + }); + } + + @Override + public SourceDefinitionRead getSourceDefinition(final SourceDefinitionIdRequestBody sourceDefinitionIdRequestBody) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.getSourceDefinition(sourceDefinitionIdRequestBody)); + } + + @Override + public SourceDefinitionRead getSourceDefinitionForWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.getSourceDefinitionForWorkspace(sourceDefinitionIdWithWorkspaceId)); + } + + @Override + public PrivateSourceDefinitionRead grantSourceDefinitionToWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.grantSourceDefinitionToWorkspace(sourceDefinitionIdWithWorkspaceId)); + } + + @Override + public SourceDefinitionReadList listLatestSourceDefinitions() { + return ApiHelper.execute(sourceDefinitionsHandler::listLatestSourceDefinitions); + } + + @Override + public PrivateSourceDefinitionReadList listPrivateSourceDefinitions(final WorkspaceIdRequestBody workspaceIdRequestBody) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.listPrivateSourceDefinitions(workspaceIdRequestBody)); + } + + @Override + public SourceDefinitionReadList listSourceDefinitions() { + return ApiHelper.execute(sourceDefinitionsHandler::listSourceDefinitions); + } + + @Override + public SourceDefinitionReadList listSourceDefinitionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.listSourceDefinitionsForWorkspace(workspaceIdRequestBody)); + } + + @Override + public void revokeSourceDefinitionFromWorkspace(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { + ApiHelper.execute(() -> { + sourceDefinitionsHandler.revokeSourceDefinitionFromWorkspace(sourceDefinitionIdWithWorkspaceId); + return null; + }); + } + + @Override + public SourceDefinitionRead updateCustomSourceDefinition(final CustomSourceDefinitionUpdate customSourceDefinitionUpdate) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.updateCustomSourceDefinition(customSourceDefinitionUpdate)); + } + + @Override + public SourceDefinitionRead updateSourceDefinition(final SourceDefinitionUpdate sourceDefinitionUpdate) { + return ApiHelper.execute(() -> sourceDefinitionsHandler.updateSourceDefinition(sourceDefinitionUpdate)); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiController.java new file mode 100644 index 0000000000000..4068f11740489 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiController.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis; + +import io.airbyte.api.generated.SourceDefinitionSpecificationApi; +import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; +import io.airbyte.api.model.generated.SourceDefinitionSpecificationRead; +import io.airbyte.server.handlers.SchedulerHandler; +import javax.ws.rs.Path; +import lombok.AllArgsConstructor; + +@Path("/v1/source_definition_specifications/get") +@AllArgsConstructor +public class SourceDefinitionSpecificationApiController implements SourceDefinitionSpecificationApi { + + private final SchedulerHandler schedulerHandler; + + @Override + public SourceDefinitionSpecificationRead getSourceDefinitionSpecification(final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId) { + return ApiHelper.execute(() -> schedulerHandler.getSourceDefinitionSpecification(sourceDefinitionIdWithWorkspaceId)); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceOauthApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceOauthApiController.java index fc454fa10cc9b..84e0782a30200 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceOauthApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceOauthApiController.java @@ -22,17 +22,17 @@ public class SourceOauthApiController implements SourceOauthApi { @Override public Map completeSourceOAuth(final CompleteSourceOauthRequest completeSourceOauthRequest) { - return ConfigurationApi.execute(() -> oAuthHandler.completeSourceOAuth(completeSourceOauthRequest)); + return ApiHelper.execute(() -> oAuthHandler.completeSourceOAuth(completeSourceOauthRequest)); } @Override public OAuthConsentRead getSourceOAuthConsent(final SourceOauthConsentRequest sourceOauthConsentRequest) { - return ConfigurationApi.execute(() -> oAuthHandler.getSourceOAuthConsent(sourceOauthConsentRequest)); + return ApiHelper.execute(() -> oAuthHandler.getSourceOAuthConsent(sourceOauthConsentRequest)); } @Override public void setInstancewideSourceOauthParams(final SetInstancewideSourceOauthParamsRequestBody setInstancewideSourceOauthParamsRequestBody) { - ConfigurationApi.execute(() -> { + ApiHelper.execute(() -> { oAuthHandler.setSourceInstancewideOauthParams(setInstancewideSourceOauthParamsRequestBody); return null; }); diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/StateApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/StateApiController.java new file mode 100644 index 0000000000000..e148c1010fe3a --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/StateApiController.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis; + +import io.airbyte.api.generated.StateApi; +import io.airbyte.api.model.generated.ConnectionIdRequestBody; +import io.airbyte.api.model.generated.ConnectionState; +import io.airbyte.api.model.generated.ConnectionStateCreateOrUpdate; +import io.airbyte.server.handlers.StateHandler; +import javax.ws.rs.Path; +import lombok.AllArgsConstructor; + +@Path("/v1/state") +@AllArgsConstructor +public class StateApiController implements StateApi { + + private final StateHandler stateHandler; + + @Override + public ConnectionState createOrUpdateState(final ConnectionStateCreateOrUpdate connectionStateCreateOrUpdate) { + return ApiHelper.execute(() -> stateHandler.createOrUpdateState(connectionStateCreateOrUpdate)); + } + + @Override + public ConnectionState getState(final ConnectionIdRequestBody connectionIdRequestBody) { + return ApiHelper.execute(() -> stateHandler.getState(connectionIdRequestBody)); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java new file mode 100644 index 0000000000000..e08cef94ee0b6 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis; + +import io.airbyte.api.generated.WebBackendApi; +import io.airbyte.api.model.generated.ConnectionIdRequestBody; +import io.airbyte.api.model.generated.ConnectionStateType; +import io.airbyte.api.model.generated.WebBackendConnectionCreate; +import io.airbyte.api.model.generated.WebBackendConnectionRead; +import io.airbyte.api.model.generated.WebBackendConnectionReadList; +import io.airbyte.api.model.generated.WebBackendConnectionRequestBody; +import io.airbyte.api.model.generated.WebBackendConnectionUpdate; +import io.airbyte.api.model.generated.WebBackendGeographiesListResult; +import io.airbyte.api.model.generated.WebBackendWorkspaceState; +import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; +import io.airbyte.api.model.generated.WorkspaceIdRequestBody; +import io.airbyte.server.handlers.WebBackendConnectionsHandler; +import io.airbyte.server.handlers.WebBackendGeographiesHandler; +import javax.ws.rs.Path; +import lombok.AllArgsConstructor; + +@Path("/v1/web_backend") +@AllArgsConstructor +public class WebBackendApiController implements WebBackendApi { + + private final WebBackendConnectionsHandler webBackendConnectionsHandler; + private final WebBackendGeographiesHandler webBackendGeographiesHandler; + + @Override + public ConnectionStateType getStateType(final ConnectionIdRequestBody connectionIdRequestBody) { + return ApiHelper.execute(() -> webBackendConnectionsHandler.getStateType(connectionIdRequestBody)); + } + + @Override + public WebBackendConnectionRead webBackendCreateConnection(final WebBackendConnectionCreate webBackendConnectionCreate) { + return ApiHelper.execute(() -> webBackendConnectionsHandler.webBackendCreateConnection(webBackendConnectionCreate)); + } + + @Override + public WebBackendConnectionRead webBackendGetConnection(final WebBackendConnectionRequestBody webBackendConnectionRequestBody) { + return ApiHelper.execute(() -> webBackendConnectionsHandler.webBackendGetConnection(webBackendConnectionRequestBody)); + } + + @Override + public WebBackendWorkspaceStateResult webBackendGetWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) { + return ApiHelper.execute(() -> webBackendConnectionsHandler.getWorkspaceState(webBackendWorkspaceState)); + } + + @Override + public WebBackendConnectionReadList webBackendListConnectionsForWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { + return ApiHelper.execute(() -> webBackendConnectionsHandler.webBackendListConnectionsForWorkspace(workspaceIdRequestBody)); + } + + @Override + public WebBackendGeographiesListResult webBackendListGeographies() { + return ApiHelper.execute(webBackendGeographiesHandler::listGeographiesOSS); + } + + @Override + public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConnectionUpdate webBackendConnectionUpdate) { + return ApiHelper.execute(() -> webBackendConnectionsHandler.webBackendUpdateConnection(webBackendConnectionUpdate)); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java new file mode 100644 index 0000000000000..bfba9954cfaf3 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/WorkspaceApiController.java @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis; + +import io.airbyte.api.generated.WorkspaceApi; +import io.airbyte.api.model.generated.ConnectionIdRequestBody; +import io.airbyte.api.model.generated.SlugRequestBody; +import io.airbyte.api.model.generated.WorkspaceCreate; +import io.airbyte.api.model.generated.WorkspaceGiveFeedback; +import io.airbyte.api.model.generated.WorkspaceIdRequestBody; +import io.airbyte.api.model.generated.WorkspaceRead; +import io.airbyte.api.model.generated.WorkspaceReadList; +import io.airbyte.api.model.generated.WorkspaceUpdate; +import io.airbyte.api.model.generated.WorkspaceUpdateName; +import io.airbyte.server.handlers.WorkspacesHandler; +import javax.ws.rs.Path; +import lombok.AllArgsConstructor; + +@Path("/v1/workspaces") +@AllArgsConstructor +public class WorkspaceApiController implements WorkspaceApi { + + private final WorkspacesHandler workspacesHandler; + + @Override + public WorkspaceRead createWorkspace(final WorkspaceCreate workspaceCreate) { + return ApiHelper.execute(() -> workspacesHandler.createWorkspace(workspaceCreate)); + } + + @Override + public void deleteWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { + ApiHelper.execute(() -> { + workspacesHandler.deleteWorkspace(workspaceIdRequestBody); + return null; + }); + } + + @Override + public WorkspaceRead getWorkspace(final WorkspaceIdRequestBody workspaceIdRequestBody) { + return ApiHelper.execute(() -> workspacesHandler.getWorkspace(workspaceIdRequestBody)); + } + + @Override + public WorkspaceRead getWorkspaceBySlug(final SlugRequestBody slugRequestBody) { + return ApiHelper.execute(() -> workspacesHandler.getWorkspaceBySlug(slugRequestBody)); + } + + @Override + public WorkspaceReadList listWorkspaces() { + return ApiHelper.execute(workspacesHandler::listWorkspaces); + } + + @Override + public WorkspaceRead updateWorkspace(final WorkspaceUpdate workspaceUpdate) { + return ApiHelper.execute(() -> workspacesHandler.updateWorkspace(workspaceUpdate)); + } + + @Override + public void updateWorkspaceFeedback(final WorkspaceGiveFeedback workspaceGiveFeedback) { + ApiHelper.execute(() -> { + workspacesHandler.setFeedbackDone(workspaceGiveFeedback); + return null; + }); + } + + @Override + public WorkspaceRead updateWorkspaceName(final WorkspaceUpdateName workspaceUpdateName) { + return ApiHelper.execute(() -> workspacesHandler.updateWorkspaceName(workspaceUpdateName)); + } + + @Override + public WorkspaceRead getWorkspaceByConnectionId(final ConnectionIdRequestBody connectionIdRequestBody) { + return ApiHelper.execute(() -> workspacesHandler.getWorkspaceByConnectionId(connectionIdRequestBody)); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/binders/SourceDefinitionApiBinder.java b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/SourceDefinitionApiBinder.java new file mode 100644 index 0000000000000..dfc976191c141 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/SourceDefinitionApiBinder.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.binders; + +import io.airbyte.server.apis.SourceDefinitionApiController; +import io.airbyte.server.apis.factories.SourceDefinitionApiFactory; +import org.glassfish.hk2.utilities.binding.AbstractBinder; +import org.glassfish.jersey.process.internal.RequestScoped; + +public class SourceDefinitionApiBinder extends AbstractBinder { + + @Override + protected void configure() { + bindFactory(SourceDefinitionApiFactory.class) + .to(SourceDefinitionApiController.class) + .in(RequestScoped.class); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/binders/SourceDefinitionSpecificationApiBinder.java b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/SourceDefinitionSpecificationApiBinder.java new file mode 100644 index 0000000000000..5a5ae758f0199 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/SourceDefinitionSpecificationApiBinder.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.binders; + +import io.airbyte.server.apis.SourceDefinitionSpecificationApiController; +import io.airbyte.server.apis.factories.SourceDefinitionSpecificationApiFactory; +import org.glassfish.hk2.utilities.binding.AbstractBinder; +import org.glassfish.jersey.process.internal.RequestScoped; + +public class SourceDefinitionSpecificationApiBinder extends AbstractBinder { + + @Override + protected void configure() { + bindFactory(SourceDefinitionSpecificationApiFactory.class) + .to(SourceDefinitionSpecificationApiController.class) + .in(RequestScoped.class); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/binders/StateApiBinder.java b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/StateApiBinder.java new file mode 100644 index 0000000000000..65ab669528c0f --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/StateApiBinder.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.binders; + +import io.airbyte.server.apis.StateApiController; +import io.airbyte.server.apis.factories.StateApiFactory; +import org.glassfish.hk2.utilities.binding.AbstractBinder; +import org.glassfish.jersey.process.internal.RequestScoped; + +public class StateApiBinder extends AbstractBinder { + + @Override + protected void configure() { + bindFactory(StateApiFactory.class) + .to(StateApiController.class) + .in(RequestScoped.class); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/binders/WebBackendApiBinder.java b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/WebBackendApiBinder.java new file mode 100644 index 0000000000000..8ad66204be335 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/WebBackendApiBinder.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.binders; + +import io.airbyte.server.apis.WebBackendApiController; +import io.airbyte.server.apis.factories.WebBackendApiFactory; +import org.glassfish.hk2.utilities.binding.AbstractBinder; +import org.glassfish.jersey.process.internal.RequestScoped; + +public class WebBackendApiBinder extends AbstractBinder { + + @Override + protected void configure() { + bindFactory(WebBackendApiFactory.class) + .to(WebBackendApiController.class) + .in(RequestScoped.class); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/binders/WorkspaceApiBinder.java b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/WorkspaceApiBinder.java new file mode 100644 index 0000000000000..f114235bac50b --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/binders/WorkspaceApiBinder.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.binders; + +import io.airbyte.server.apis.WorkspaceApiController; +import io.airbyte.server.apis.factories.WorkspaceApiFactory; +import org.glassfish.hk2.utilities.binding.AbstractBinder; +import org.glassfish.jersey.process.internal.RequestScoped; + +public class WorkspaceApiBinder extends AbstractBinder { + + @Override + protected void configure() { + bindFactory(WorkspaceApiFactory.class) + .to(WorkspaceApiController.class) + .in(RequestScoped.class); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/factories/SourceDefinitionApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/SourceDefinitionApiFactory.java new file mode 100644 index 0000000000000..15a08c6c9e850 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/SourceDefinitionApiFactory.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.factories; + +import io.airbyte.server.apis.SourceDefinitionApiController; +import io.airbyte.server.handlers.SourceDefinitionsHandler; +import org.glassfish.hk2.api.Factory; + +public class SourceDefinitionApiFactory implements Factory { + + private static SourceDefinitionsHandler sourceDefinitionsHandler; + + public static void setValues(final SourceDefinitionsHandler sourceDefinitionsHandler) { + SourceDefinitionApiFactory.sourceDefinitionsHandler = sourceDefinitionsHandler; + } + + @Override + public SourceDefinitionApiController provide() { + return new SourceDefinitionApiController(SourceDefinitionApiFactory.sourceDefinitionsHandler); + } + + @Override + public void dispose(final SourceDefinitionApiController instance) { + /* no op */ + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/factories/SourceDefinitionSpecificationApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/SourceDefinitionSpecificationApiFactory.java new file mode 100644 index 0000000000000..8ea1f7d456ecd --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/SourceDefinitionSpecificationApiFactory.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.factories; + +import io.airbyte.server.apis.SourceDefinitionSpecificationApiController; +import io.airbyte.server.handlers.SchedulerHandler; +import org.glassfish.hk2.api.Factory; + +public class SourceDefinitionSpecificationApiFactory implements Factory { + + private static SchedulerHandler schedulerHandler; + + public static void setValues(final SchedulerHandler schedulerHandler) { + SourceDefinitionSpecificationApiFactory.schedulerHandler = schedulerHandler; + } + + @Override + public SourceDefinitionSpecificationApiController provide() { + return new SourceDefinitionSpecificationApiController(SourceDefinitionSpecificationApiFactory.schedulerHandler); + } + + @Override + public void dispose(final SourceDefinitionSpecificationApiController instance) { + /* no op */ + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/factories/StateApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/StateApiFactory.java new file mode 100644 index 0000000000000..0498681d76292 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/StateApiFactory.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.factories; + +import io.airbyte.server.apis.StateApiController; +import io.airbyte.server.handlers.StateHandler; +import org.glassfish.hk2.api.Factory; + +public class StateApiFactory implements Factory { + + private static StateHandler stateHandler; + + public static void setValues(final StateHandler stateHandler) { + StateApiFactory.stateHandler = stateHandler; + } + + @Override + public StateApiController provide() { + return new StateApiController(StateApiFactory.stateHandler); + } + + @Override + public void dispose(final StateApiController instance) { + /* no op */ + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/factories/WebBackendApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/WebBackendApiFactory.java new file mode 100644 index 0000000000000..936e2d9705df8 --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/WebBackendApiFactory.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.factories; + +import io.airbyte.server.apis.WebBackendApiController; +import io.airbyte.server.handlers.WebBackendConnectionsHandler; +import io.airbyte.server.handlers.WebBackendGeographiesHandler; +import org.glassfish.hk2.api.Factory; + +public class WebBackendApiFactory implements Factory { + + private static WebBackendConnectionsHandler webBackendConnectionsHandler; + private static WebBackendGeographiesHandler webBackendGeographiesHandler; + + public static void setValues(final WebBackendConnectionsHandler webBackendConnectionsHandler, + final WebBackendGeographiesHandler webBackendGeographiesHandler) { + WebBackendApiFactory.webBackendConnectionsHandler = webBackendConnectionsHandler; + WebBackendApiFactory.webBackendGeographiesHandler = webBackendGeographiesHandler; + } + + @Override + public WebBackendApiController provide() { + return new WebBackendApiController(WebBackendApiFactory.webBackendConnectionsHandler, WebBackendApiFactory.webBackendGeographiesHandler); + } + + @Override + public void dispose(final WebBackendApiController instance) { + /* no op */ + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/factories/WorkspaceApiFactory.java b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/WorkspaceApiFactory.java new file mode 100644 index 0000000000000..644c7fdadc9aa --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/factories/WorkspaceApiFactory.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.apis.factories; + +import io.airbyte.server.apis.WorkspaceApiController; +import io.airbyte.server.handlers.WorkspacesHandler; +import org.glassfish.hk2.api.Factory; + +public class WorkspaceApiFactory implements Factory { + + private static WorkspacesHandler workspacesHandler; + + public static void setValues(final WorkspacesHandler workspacesHandler) { + WorkspaceApiFactory.workspacesHandler = workspacesHandler; + } + + @Override + public WorkspaceApiController provide() { + return new WorkspaceApiController(WorkspaceApiFactory.workspacesHandler); + } + + @Override + public void dispose(final WorkspaceApiController instance) { + /* no op */ + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/OperationsConverter.java b/airbyte-server/src/main/java/io/airbyte/server/converters/OperationsConverter.java index c5cae02499473..9862ba52b6111 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/converters/OperationsConverter.java +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/OperationsConverter.java @@ -4,10 +4,13 @@ package io.airbyte.server.converters; +import static io.airbyte.api.model.generated.OperatorWebhook.WebhookTypeEnum.DBTCLOUD; + import com.google.common.base.Preconditions; import io.airbyte.api.model.generated.OperationRead; import io.airbyte.api.model.generated.OperatorConfiguration; import io.airbyte.api.model.generated.OperatorNormalization.OptionEnum; +import io.airbyte.api.model.generated.OperatorWebhookDbtCloud; import io.airbyte.commons.enums.Enums; import io.airbyte.config.OperatorDbt; import io.airbyte.config.OperatorNormalization; @@ -15,6 +18,8 @@ import io.airbyte.config.OperatorWebhook; import io.airbyte.config.StandardSyncOperation; import io.airbyte.config.StandardSyncOperation.OperatorType; +import java.util.regex.Matcher; +import java.util.regex.Pattern; public class OperationsConverter { @@ -44,10 +49,7 @@ public static void populateOperatorConfigFromApi(final OperatorConfiguration ope case WEBHOOK -> { Preconditions.checkArgument(operatorConfig.getWebhook() != null); // TODO(mfsiega-airbyte): check that the webhook config id references a real webhook config. - standardSyncOperation.withOperatorWebhook(new OperatorWebhook() - .withExecutionUrl(operatorConfig.getWebhook().getExecutionUrl()) - .withExecutionBody(operatorConfig.getWebhook().getExecutionBody()) - .withWebhookConfigId(operatorConfig.getWebhook().getWebhookConfigId())); + standardSyncOperation.withOperatorWebhook(webhookOperatorFromConfig(operatorConfig.getWebhook())); // Null out the other configs, since it's mutually exclusive. We need to do this if it's an update. standardSyncOperation.withOperatorNormalization(null); standardSyncOperation.withOperatorDbt(null); @@ -82,10 +84,7 @@ public static OperationRead operationReadFromPersistedOperation(final StandardSy } case WEBHOOK -> { Preconditions.checkArgument(standardSyncOperation.getOperatorWebhook() != null); - operatorConfiguration.webhook(new io.airbyte.api.model.generated.OperatorWebhook() - .webhookConfigId(standardSyncOperation.getOperatorWebhook().getWebhookConfigId()) - .executionUrl(standardSyncOperation.getOperatorWebhook().getExecutionUrl()) - .executionBody(standardSyncOperation.getOperatorWebhook().getExecutionBody())); + operatorConfiguration.webhook(webhookOperatorFromPersistence(standardSyncOperation.getOperatorWebhook())); } } return new OperationRead() @@ -95,4 +94,68 @@ public static OperationRead operationReadFromPersistedOperation(final StandardSy .operatorConfiguration(operatorConfiguration); } + private static OperatorWebhook webhookOperatorFromConfig(io.airbyte.api.model.generated.OperatorWebhook webhookConfig) { + final var operatorWebhook = new OperatorWebhook().withWebhookConfigId(webhookConfig.getWebhookConfigId()); + // TODO(mfsiega-airbyte): remove this once the frontend is sending the new format. + if (webhookConfig.getWebhookType() == null) { + return operatorWebhook + .withExecutionUrl(webhookConfig.getExecutionUrl()) + .withExecutionBody(webhookConfig.getExecutionBody()); + } + switch (webhookConfig.getWebhookType()) { + case DBTCLOUD -> { + return operatorWebhook + .withExecutionUrl(DbtCloudOperationConverter.getExecutionUrlFrom(webhookConfig.getDbtCloud())) + .withExecutionBody(DbtCloudOperationConverter.getDbtCloudExecutionBody()); + } + // Future webhook operator types added here. + } + throw new IllegalArgumentException("Unsupported webhook operation type"); + } + + private static io.airbyte.api.model.generated.OperatorWebhook webhookOperatorFromPersistence(final OperatorWebhook persistedWebhook) { + final io.airbyte.api.model.generated.OperatorWebhook webhookOperator = new io.airbyte.api.model.generated.OperatorWebhook() + .webhookConfigId(persistedWebhook.getWebhookConfigId()); + OperatorWebhookDbtCloud dbtCloudOperator = DbtCloudOperationConverter.parseFrom(persistedWebhook); + if (dbtCloudOperator != null) { + webhookOperator.webhookType(DBTCLOUD).dbtCloud(dbtCloudOperator); + // TODO(mfsiega-airbyte): remove once frontend switches to new format. + // Dual-write deprecated webhook format. + webhookOperator.executionUrl(DbtCloudOperationConverter.getExecutionUrlFrom(dbtCloudOperator)); + webhookOperator.executionBody(DbtCloudOperationConverter.getDbtCloudExecutionBody()); + } else { + throw new IllegalArgumentException("Unexpected webhook operator config"); + } + return webhookOperator; + } + + private static class DbtCloudOperationConverter { + + // See https://docs.getdbt.com/dbt-cloud/api-v2 for documentation on dbt Cloud API endpoints. + final static Pattern dbtUrlPattern = Pattern.compile("^https://cloud\\.getdbt\\.com/api/v2/accounts/(\\d+)/jobs/(\\d+)/run/$"); + private static final int ACCOUNT_REGEX_GROUP = 1; + private static final int JOB_REGEX_GROUP = 2; + + private static OperatorWebhookDbtCloud parseFrom(OperatorWebhook persistedWebhook) { + Matcher dbtCloudUrlMatcher = dbtUrlPattern.matcher(persistedWebhook.getExecutionUrl()); + final var dbtCloudConfig = new OperatorWebhookDbtCloud(); + if (dbtCloudUrlMatcher.matches()) { + dbtCloudConfig.setAccountId(Integer.valueOf(dbtCloudUrlMatcher.group(ACCOUNT_REGEX_GROUP))); + dbtCloudConfig.setJobId(Integer.valueOf(dbtCloudUrlMatcher.group(JOB_REGEX_GROUP))); + return dbtCloudConfig; + } + return null; + } + + private static String getExecutionUrlFrom(final OperatorWebhookDbtCloud dbtCloudConfig) { + return String.format("https://cloud.getdbt.com/api/v2/accounts/%d/jobs/%d/run/", dbtCloudConfig.getAccountId(), + dbtCloudConfig.getJobId()); + } + + private static String getDbtCloudExecutionBody() { + return "{\"cause\": \"airbyte\"}"; + } + + } + } diff --git a/airbyte-server/src/main/java/io/airbyte/server/converters/WorkflowStateConverter.java b/airbyte-server/src/main/java/io/airbyte/server/converters/WorkflowStateConverter.java new file mode 100644 index 0000000000000..ff68bb6b1dd2d --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/converters/WorkflowStateConverter.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.converters; + +import io.airbyte.api.model.generated.WorkflowStateRead; +import io.airbyte.commons.temporal.scheduling.state.WorkflowState; + +public class WorkflowStateConverter { + + public WorkflowStateRead getWorkflowStateRead(final WorkflowState workflowState) { + return new WorkflowStateRead().running(workflowState.isRunning()); + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java index fe1df0a0dfd71..41c013e2a43b3 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/ConnectionsHandler.java @@ -78,29 +78,34 @@ public class ConnectionsHandler { private final WorkspaceHelper workspaceHelper; private final TrackingClient trackingClient; private final EventRunner eventRunner; + private final ConnectionHelper connectionHelper; @VisibleForTesting ConnectionsHandler(final ConfigRepository configRepository, final Supplier uuidGenerator, final WorkspaceHelper workspaceHelper, final TrackingClient trackingClient, - final EventRunner eventRunner) { + final EventRunner eventRunner, + final ConnectionHelper connectionHelper) { this.configRepository = configRepository; this.uuidGenerator = uuidGenerator; this.workspaceHelper = workspaceHelper; this.trackingClient = trackingClient; this.eventRunner = eventRunner; + this.connectionHelper = connectionHelper; } public ConnectionsHandler(final ConfigRepository configRepository, final WorkspaceHelper workspaceHelper, final TrackingClient trackingClient, - final EventRunner eventRunner) { + final EventRunner eventRunner, + final ConnectionHelper connectionHelper) { this(configRepository, UUID::randomUUID, workspaceHelper, trackingClient, - eventRunner); + eventRunner, + connectionHelper); } @@ -545,8 +550,9 @@ public boolean matchSearch(final DestinationSearch destinationSearch, final Dest return (destinationReadFromSearch == null || destinationReadFromSearch.equals(destinationRead)); } - public void deleteConnection(final UUID connectionId) { - eventRunner.deleteConnection(connectionId); + public void deleteConnection(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { + connectionHelper.deleteConnection(connectionId); + eventRunner.forceDeleteConnection(connectionId); } private ConnectionRead buildConnectionRead(final UUID connectionId) diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationDefinitionsHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationDefinitionsHandler.java index 156b478f0663f..6eda6d4e1da50 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationDefinitionsHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/DestinationDefinitionsHandler.java @@ -27,6 +27,7 @@ import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.commons.version.Version; import io.airbyte.config.ActorDefinitionResourceRequirements; +import io.airbyte.config.ActorType; import io.airbyte.config.Configs; import io.airbyte.config.EnvConfigs; import io.airbyte.config.StandardDestinationDefinition; @@ -273,6 +274,7 @@ public DestinationDefinitionRead updateDestinationDefinition(final DestinationDe .withResourceRequirements(updatedResourceReqs); configRepository.writeStandardDestinationDefinition(newDestination); + configRepository.clearUnsupportedProtocolVersionFlag(newDestination.getDestinationDefinitionId(), ActorType.DESTINATION, protocolVersionRange); return buildDestinationDefinitionRead(newDestination); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/JobHistoryHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/JobHistoryHandler.java index e25bee37f04be..f61476a186009 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/JobHistoryHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/JobHistoryHandler.java @@ -25,6 +25,7 @@ import io.airbyte.api.model.generated.SourceIdRequestBody; import io.airbyte.api.model.generated.SourceRead; import io.airbyte.commons.enums.Enums; +import io.airbyte.commons.temporal.TemporalClient; import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.config.Configs.WorkerEnvironment; import io.airbyte.config.JobConfig; @@ -35,6 +36,7 @@ import io.airbyte.persistence.job.models.Job; import io.airbyte.persistence.job.models.JobStatus; import io.airbyte.server.converters.JobConverter; +import io.airbyte.server.converters.WorkflowStateConverter; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.util.Collections; @@ -54,7 +56,9 @@ public class JobHistoryHandler { public static final int DEFAULT_PAGE_SIZE = 200; private final JobPersistence jobPersistence; private final JobConverter jobConverter; + private final WorkflowStateConverter workflowStateConverter; private final AirbyteVersion airbyteVersion; + private final TemporalClient temporalClient; public JobHistoryHandler(final JobPersistence jobPersistence, final WorkerEnvironment workerEnvironment, @@ -64,8 +68,10 @@ public JobHistoryHandler(final JobPersistence jobPersistence, final SourceDefinitionsHandler sourceDefinitionsHandler, final DestinationHandler destinationHandler, final DestinationDefinitionsHandler destinationDefinitionsHandler, - final AirbyteVersion airbyteVersion) { + final AirbyteVersion airbyteVersion, + final TemporalClient temporalClient) { jobConverter = new JobConverter(workerEnvironment, logConfigs); + workflowStateConverter = new WorkflowStateConverter(); this.jobPersistence = jobPersistence; this.connectionsHandler = connectionsHandler; this.sourceHandler = sourceHandler; @@ -73,6 +79,21 @@ public JobHistoryHandler(final JobPersistence jobPersistence, this.destinationHandler = destinationHandler; this.destinationDefinitionsHandler = destinationDefinitionsHandler; this.airbyteVersion = airbyteVersion; + this.temporalClient = temporalClient; + } + + @Deprecated(forRemoval = true) + public JobHistoryHandler(final JobPersistence jobPersistence, + final WorkerEnvironment workerEnvironment, + final LogConfigs logConfigs, + final ConnectionsHandler connectionsHandler, + final SourceHandler sourceHandler, + final SourceDefinitionsHandler sourceDefinitionsHandler, + final DestinationHandler destinationHandler, + final DestinationDefinitionsHandler destinationDefinitionsHandler, + final AirbyteVersion airbyteVersion) { + this(jobPersistence, workerEnvironment, logConfigs, connectionsHandler, sourceHandler, sourceDefinitionsHandler, destinationHandler, + destinationDefinitionsHandler, airbyteVersion, null); } @SuppressWarnings("UnstableApiUsage") @@ -122,7 +143,15 @@ public JobDebugInfoRead getJobDebugInfo(final JobIdRequestBody jobIdRequestBody) final Job job = jobPersistence.getJob(jobIdRequestBody.getId()); final JobInfoRead jobinfoRead = jobConverter.getJobInfoRead(job); - return buildJobDebugInfoRead(jobinfoRead); + final JobDebugInfoRead jobDebugInfoRead = buildJobDebugInfoRead(jobinfoRead); + if (temporalClient != null) { + final UUID connectionId = UUID.fromString(job.getScope()); + temporalClient.getWorkflowState(connectionId) + .map(workflowStateConverter::getWorkflowStateRead) + .ifPresent(jobDebugInfoRead::setWorkflowState); + } + + return jobDebugInfoRead; } public Optional getLatestRunningSyncJob(final UUID connectionId) throws IOException { diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/OAuthHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/OAuthHandler.java index 92241095c4a68..63667a86df5ad 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/OAuthHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/OAuthHandler.java @@ -4,6 +4,8 @@ package io.airbyte.server.handlers; +import com.fasterxml.jackson.databind.JsonNode; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.analytics.TrackingClient; import io.airbyte.api.model.generated.CompleteDestinationOAuthRequest; import io.airbyte.api.model.generated.CompleteSourceOauthRequest; @@ -12,23 +14,32 @@ import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody; import io.airbyte.api.model.generated.SetInstancewideSourceOauthParamsRequestBody; import io.airbyte.api.model.generated.SourceOauthConsentRequest; +import io.airbyte.commons.constants.AirbyteSecretConstants; +import io.airbyte.commons.json.JsonPaths; import io.airbyte.commons.json.Jsons; +import io.airbyte.config.DestinationConnection; import io.airbyte.config.DestinationOAuthParameter; +import io.airbyte.config.SourceConnection; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.oauth.OAuthImplementationFactory; import io.airbyte.persistence.job.factory.OAuthConfigSupplier; import io.airbyte.persistence.job.tracker.TrackingMetadata; import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.server.handlers.helpers.OAuthPathExtractor; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.net.http.HttpClient; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.UUID; +import java.util.stream.Collectors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,131 +51,188 @@ public class OAuthHandler { private final ConfigRepository configRepository; private final OAuthImplementationFactory oAuthImplementationFactory; private final TrackingClient trackingClient; + private final SecretsRepositoryReader secretsRepositoryReader; public OAuthHandler(final ConfigRepository configRepository, final HttpClient httpClient, - final TrackingClient trackingClient) { + final TrackingClient trackingClient, + final SecretsRepositoryReader secretsRepositoryReader) { this.configRepository = configRepository; this.oAuthImplementationFactory = new OAuthImplementationFactory(configRepository, httpClient); this.trackingClient = trackingClient; + this.secretsRepositoryReader = secretsRepositoryReader; } - public OAuthConsentRead getSourceOAuthConsent(final SourceOauthConsentRequest sourceDefinitionIdRequestBody) + public OAuthConsentRead getSourceOAuthConsent(final SourceOauthConsentRequest sourceOauthConsentRequest) throws JsonValidationException, ConfigNotFoundException, IOException { final StandardSourceDefinition sourceDefinition = - configRepository.getStandardSourceDefinition(sourceDefinitionIdRequestBody.getSourceDefinitionId()); + configRepository.getStandardSourceDefinition(sourceOauthConsentRequest.getSourceDefinitionId()); final OAuthFlowImplementation oAuthFlowImplementation = oAuthImplementationFactory.create(sourceDefinition); final ConnectorSpecification spec = sourceDefinition.getSpec(); - final Map metadata = generateSourceMetadata(sourceDefinitionIdRequestBody.getSourceDefinitionId()); + final Map metadata = generateSourceMetadata(sourceOauthConsentRequest.getSourceDefinitionId()); final OAuthConsentRead result; if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { + final JsonNode oAuthInputConfigurationForConsent; + + if (sourceOauthConsentRequest.getSourceId() == null) { + oAuthInputConfigurationForConsent = sourceOauthConsentRequest.getoAuthInputConfiguration(); + } else { + final SourceConnection hydratedSourceConnection = + secretsRepositoryReader.getSourceConnectionWithSecrets(sourceOauthConsentRequest.getSourceId()); + + oAuthInputConfigurationForConsent = getOAuthInputConfigurationForConsent(spec, + hydratedSourceConnection.getConfiguration(), + sourceOauthConsentRequest.getoAuthInputConfiguration()); + } + result = new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getSourceConsentUrl( - sourceDefinitionIdRequestBody.getWorkspaceId(), - sourceDefinitionIdRequestBody.getSourceDefinitionId(), - sourceDefinitionIdRequestBody.getRedirectUrl(), - sourceDefinitionIdRequestBody.getoAuthInputConfiguration(), + sourceOauthConsentRequest.getWorkspaceId(), + sourceOauthConsentRequest.getSourceDefinitionId(), + sourceOauthConsentRequest.getRedirectUrl(), + oAuthInputConfigurationForConsent, spec.getAdvancedAuth().getOauthConfigSpecification())); } else { result = new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getSourceConsentUrl( - sourceDefinitionIdRequestBody.getWorkspaceId(), - sourceDefinitionIdRequestBody.getSourceDefinitionId(), - sourceDefinitionIdRequestBody.getRedirectUrl(), Jsons.emptyObject(), null)); + sourceOauthConsentRequest.getWorkspaceId(), + sourceOauthConsentRequest.getSourceDefinitionId(), + sourceOauthConsentRequest.getRedirectUrl(), Jsons.emptyObject(), null)); } try { - trackingClient.track(sourceDefinitionIdRequestBody.getWorkspaceId(), "Get Oauth Consent URL - Backend", metadata); + trackingClient.track(sourceOauthConsentRequest.getWorkspaceId(), "Get Oauth Consent URL - Backend", metadata); } catch (final Exception e) { LOGGER.error(ERROR_MESSAGE, e); } return result; } - public OAuthConsentRead getDestinationOAuthConsent(final DestinationOauthConsentRequest destinationDefinitionIdRequestBody) + public OAuthConsentRead getDestinationOAuthConsent(final DestinationOauthConsentRequest destinationOauthConsentRequest) throws JsonValidationException, ConfigNotFoundException, IOException { final StandardDestinationDefinition destinationDefinition = - configRepository.getStandardDestinationDefinition(destinationDefinitionIdRequestBody.getDestinationDefinitionId()); + configRepository.getStandardDestinationDefinition(destinationOauthConsentRequest.getDestinationDefinitionId()); final OAuthFlowImplementation oAuthFlowImplementation = oAuthImplementationFactory.create(destinationDefinition); final ConnectorSpecification spec = destinationDefinition.getSpec(); - final Map metadata = generateDestinationMetadata(destinationDefinitionIdRequestBody.getDestinationDefinitionId()); + final Map metadata = generateDestinationMetadata(destinationOauthConsentRequest.getDestinationDefinitionId()); final OAuthConsentRead result; if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { + final JsonNode oAuthInputConfigurationForConsent; + + if (destinationOauthConsentRequest.getDestinationId() == null) { + oAuthInputConfigurationForConsent = destinationOauthConsentRequest.getoAuthInputConfiguration(); + } else { + final DestinationConnection hydratedSourceConnection = + secretsRepositoryReader.getDestinationConnectionWithSecrets(destinationOauthConsentRequest.getDestinationId()); + + oAuthInputConfigurationForConsent = getOAuthInputConfigurationForConsent(spec, + hydratedSourceConnection.getConfiguration(), + destinationOauthConsentRequest.getoAuthInputConfiguration()); + + } + result = new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getDestinationConsentUrl( - destinationDefinitionIdRequestBody.getWorkspaceId(), - destinationDefinitionIdRequestBody.getDestinationDefinitionId(), - destinationDefinitionIdRequestBody.getRedirectUrl(), - destinationDefinitionIdRequestBody.getoAuthInputConfiguration(), + destinationOauthConsentRequest.getWorkspaceId(), + destinationOauthConsentRequest.getDestinationDefinitionId(), + destinationOauthConsentRequest.getRedirectUrl(), + oAuthInputConfigurationForConsent, spec.getAdvancedAuth().getOauthConfigSpecification())); } else { result = new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getDestinationConsentUrl( - destinationDefinitionIdRequestBody.getWorkspaceId(), - destinationDefinitionIdRequestBody.getDestinationDefinitionId(), - destinationDefinitionIdRequestBody.getRedirectUrl(), Jsons.emptyObject(), null)); + destinationOauthConsentRequest.getWorkspaceId(), + destinationOauthConsentRequest.getDestinationDefinitionId(), + destinationOauthConsentRequest.getRedirectUrl(), Jsons.emptyObject(), null)); } try { - trackingClient.track(destinationDefinitionIdRequestBody.getWorkspaceId(), "Get Oauth Consent URL - Backend", metadata); + trackingClient.track(destinationOauthConsentRequest.getWorkspaceId(), "Get Oauth Consent URL - Backend", metadata); } catch (final Exception e) { LOGGER.error(ERROR_MESSAGE, e); } return result; } - public Map completeSourceOAuth(final CompleteSourceOauthRequest oauthSourceRequestBody) + public Map completeSourceOAuth(final CompleteSourceOauthRequest completeSourceOauthRequest) throws JsonValidationException, ConfigNotFoundException, IOException { final StandardSourceDefinition sourceDefinition = - configRepository.getStandardSourceDefinition(oauthSourceRequestBody.getSourceDefinitionId()); + configRepository.getStandardSourceDefinition(completeSourceOauthRequest.getSourceDefinitionId()); final OAuthFlowImplementation oAuthFlowImplementation = oAuthImplementationFactory.create(sourceDefinition); final ConnectorSpecification spec = sourceDefinition.getSpec(); - final Map metadata = generateSourceMetadata(oauthSourceRequestBody.getSourceDefinitionId()); + final Map metadata = generateSourceMetadata(completeSourceOauthRequest.getSourceDefinitionId()); final Map result; if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { + final JsonNode oAuthInputConfigurationForConsent; + + if (completeSourceOauthRequest.getSourceId() == null) { + oAuthInputConfigurationForConsent = completeSourceOauthRequest.getoAuthInputConfiguration(); + } else { + final SourceConnection hydratedSourceConnection = + secretsRepositoryReader.getSourceConnectionWithSecrets(completeSourceOauthRequest.getSourceId()); + + oAuthInputConfigurationForConsent = getOAuthInputConfigurationForConsent(spec, + hydratedSourceConnection.getConfiguration(), + completeSourceOauthRequest.getoAuthInputConfiguration()); + } + result = oAuthFlowImplementation.completeSourceOAuth( - oauthSourceRequestBody.getWorkspaceId(), - oauthSourceRequestBody.getSourceDefinitionId(), - oauthSourceRequestBody.getQueryParams(), - oauthSourceRequestBody.getRedirectUrl(), - oauthSourceRequestBody.getoAuthInputConfiguration(), + completeSourceOauthRequest.getWorkspaceId(), + completeSourceOauthRequest.getSourceDefinitionId(), + completeSourceOauthRequest.getQueryParams(), + completeSourceOauthRequest.getRedirectUrl(), + oAuthInputConfigurationForConsent, spec.getAdvancedAuth().getOauthConfigSpecification()); } else { // deprecated but this path is kept for connectors that don't define OAuth Spec yet result = oAuthFlowImplementation.completeSourceOAuth( - oauthSourceRequestBody.getWorkspaceId(), - oauthSourceRequestBody.getSourceDefinitionId(), - oauthSourceRequestBody.getQueryParams(), - oauthSourceRequestBody.getRedirectUrl()); + completeSourceOauthRequest.getWorkspaceId(), + completeSourceOauthRequest.getSourceDefinitionId(), + completeSourceOauthRequest.getQueryParams(), + completeSourceOauthRequest.getRedirectUrl()); } try { - trackingClient.track(oauthSourceRequestBody.getWorkspaceId(), "Complete OAuth Flow - Backend", metadata); + trackingClient.track(completeSourceOauthRequest.getWorkspaceId(), "Complete OAuth Flow - Backend", metadata); } catch (final Exception e) { LOGGER.error(ERROR_MESSAGE, e); } return result; } - public Map completeDestinationOAuth(final CompleteDestinationOAuthRequest oauthDestinationRequestBody) + public Map completeDestinationOAuth(final CompleteDestinationOAuthRequest completeDestinationOAuthRequest) throws JsonValidationException, ConfigNotFoundException, IOException { final StandardDestinationDefinition destinationDefinition = - configRepository.getStandardDestinationDefinition(oauthDestinationRequestBody.getDestinationDefinitionId()); + configRepository.getStandardDestinationDefinition(completeDestinationOAuthRequest.getDestinationDefinitionId()); final OAuthFlowImplementation oAuthFlowImplementation = oAuthImplementationFactory.create(destinationDefinition); final ConnectorSpecification spec = destinationDefinition.getSpec(); - final Map metadata = generateDestinationMetadata(oauthDestinationRequestBody.getDestinationDefinitionId()); + final Map metadata = generateDestinationMetadata(completeDestinationOAuthRequest.getDestinationDefinitionId()); final Map result; if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { + final JsonNode oAuthInputConfigurationForConsent; + + if (completeDestinationOAuthRequest.getDestinationId() == null) { + oAuthInputConfigurationForConsent = completeDestinationOAuthRequest.getoAuthInputConfiguration(); + } else { + final DestinationConnection hydratedSourceConnection = + secretsRepositoryReader.getDestinationConnectionWithSecrets(completeDestinationOAuthRequest.getDestinationId()); + + oAuthInputConfigurationForConsent = getOAuthInputConfigurationForConsent(spec, + hydratedSourceConnection.getConfiguration(), + completeDestinationOAuthRequest.getoAuthInputConfiguration()); + + } + result = oAuthFlowImplementation.completeDestinationOAuth( - oauthDestinationRequestBody.getWorkspaceId(), - oauthDestinationRequestBody.getDestinationDefinitionId(), - oauthDestinationRequestBody.getQueryParams(), - oauthDestinationRequestBody.getRedirectUrl(), - oauthDestinationRequestBody.getoAuthInputConfiguration(), + completeDestinationOAuthRequest.getWorkspaceId(), + completeDestinationOAuthRequest.getDestinationDefinitionId(), + completeDestinationOAuthRequest.getQueryParams(), + completeDestinationOAuthRequest.getRedirectUrl(), + oAuthInputConfigurationForConsent, spec.getAdvancedAuth().getOauthConfigSpecification()); } else { // deprecated but this path is kept for connectors that don't define OAuth Spec yet result = oAuthFlowImplementation.completeDestinationOAuth( - oauthDestinationRequestBody.getWorkspaceId(), - oauthDestinationRequestBody.getDestinationDefinitionId(), - oauthDestinationRequestBody.getQueryParams(), - oauthDestinationRequestBody.getRedirectUrl()); + completeDestinationOAuthRequest.getWorkspaceId(), + completeDestinationOAuthRequest.getDestinationDefinitionId(), + completeDestinationOAuthRequest.getQueryParams(), + completeDestinationOAuthRequest.getRedirectUrl()); } try { - trackingClient.track(oauthDestinationRequestBody.getWorkspaceId(), "Complete OAuth Flow - Backend", metadata); + trackingClient.track(completeDestinationOAuthRequest.getWorkspaceId(), "Complete OAuth Flow - Backend", metadata); } catch (final Exception e) { LOGGER.error(ERROR_MESSAGE, e); } @@ -195,6 +263,19 @@ public void setDestinationInstancewideOauthParams(final SetInstancewideDestinati configRepository.writeDestinationOAuthParam(param); } + private JsonNode getOAuthInputConfigurationForConsent(final ConnectorSpecification spec, + final JsonNode hydratedSourceConnectionConfiguration, + final JsonNode oAuthInputConfiguration) { + final Map fieldsToGet = + buildJsonPathFromOAuthFlowInitParameters(OAuthPathExtractor.extractOauthConfigurationPaths( + spec.getAdvancedAuth().getOauthConfigSpecification().getOauthUserInputFromConnectorConfigSpecification())); + + final JsonNode oAuthInputConfigurationFromDB = getOAuthInputConfiguration(hydratedSourceConnectionConfiguration, fieldsToGet); + + return getOauthFromDBIfNeeded(oAuthInputConfigurationFromDB, + oAuthInputConfiguration); + } + private Map generateSourceMetadata(final UUID sourceDefinitionId) throws JsonValidationException, ConfigNotFoundException, IOException { final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); @@ -207,4 +288,40 @@ private Map generateDestinationMetadata(final UUID destinationDe return TrackingMetadata.generateDestinationDefinitionMetadata(destinationDefinition); } + @VisibleForTesting + Map buildJsonPathFromOAuthFlowInitParameters(final Map> oAuthFlowInitParameters) { + return oAuthFlowInitParameters.entrySet().stream() + .map(entry -> Map.entry(entry.getKey(), "$." + String.join(".", entry.getValue()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @VisibleForTesting + JsonNode getOauthFromDBIfNeeded(final JsonNode oAuthInputConfigurationFromDB, final JsonNode oAuthInputConfigurationFromInput) { + final Map result = new HashMap<>(); + + Jsons.deserializeToStringMap(oAuthInputConfigurationFromInput) + .forEach((k, v) -> { + if (AirbyteSecretConstants.SECRETS_MASK.equals(v)) { + if (oAuthInputConfigurationFromDB.has(k)) { + result.put(k, oAuthInputConfigurationFromDB.get(k).textValue()); + } else { + LOGGER.warn("Missing the key {} in the config store in DB", k); + } + + } else { + result.put(k, v); + } + }); + + return Jsons.jsonNode(result); + } + + @VisibleForTesting + JsonNode getOAuthInputConfiguration(final JsonNode hydratedSourceConnectionConfiguration, final Map pathsToGet) { + return Jsons.jsonNode(pathsToGet.entrySet().stream() + .collect(Collectors.toMap( + Map.Entry::getKey, + entry -> JsonPaths.getSingleValue(hydratedSourceConnectionConfiguration, entry.getValue()).get()))); + } + } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceDefinitionsHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceDefinitionsHandler.java index b508be1ed6412..7d07cb59ec1aa 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceDefinitionsHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceDefinitionsHandler.java @@ -28,6 +28,7 @@ import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.commons.version.Version; import io.airbyte.config.ActorDefinitionResourceRequirements; +import io.airbyte.config.ActorType; import io.airbyte.config.Configs; import io.airbyte.config.EnvConfigs; import io.airbyte.config.StandardSourceDefinition; @@ -274,6 +275,8 @@ public SourceDefinitionRead updateSourceDefinition(final SourceDefinitionUpdate .withResourceRequirements(updatedResourceReqs); configRepository.writeStandardSourceDefinition(newSource); + configRepository.clearUnsupportedProtocolVersionFlag(newSource.getSourceDefinitionId(), ActorType.SOURCE, protocolVersionRange); + return buildSourceDefinitionRead(newSource); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java index 5751d271900cb..ab1e1092d2359 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/SourceHandler.java @@ -213,11 +213,15 @@ public void deleteSource(final SourceRead source) final var workspaceIdRequestBody = new WorkspaceIdRequestBody() .workspaceId(source.getWorkspaceId()); - connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody) + final List uuidsToDelete = connectionsHandler.listConnectionsForWorkspace(workspaceIdRequestBody) .getConnections().stream() .filter(con -> con.getSourceId().equals(source.getSourceId())) .map(ConnectionRead::getConnectionId) - .forEach(connectionsHandler::deleteConnection); + .toList(); + + for (final UUID uuidToDelete : uuidsToDelete) { + connectionsHandler.deleteConnection(uuidToDelete); + } final var spec = getSpecFromSourceId(source.getSourceId()); final var fullConfig = secretsRepositoryReader.getSourceConnectionWithSecrets(source.getSourceId()).getConfiguration(); diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/WorkspacesHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/WorkspacesHandler.java index 05b37a8341e8c..63e88b7bbfefa 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/WorkspacesHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/WorkspacesHandler.java @@ -186,7 +186,14 @@ public WorkspaceRead updateWorkspace(final WorkspaceUpdate workspacePatch) throw LOGGER.debug("Patched Workspace before persisting: {}", workspace); - persistStandardWorkspace(workspace); + if (workspacePatch.getWebhookConfigs() == null) { + // We aren't persisting any secrets. It's safe (and necessary) to use the NoSecrets variant because + // we never hydrated them in the first place. + configRepository.writeStandardWorkspaceNoSecrets(workspace); + } else { + // We're saving new webhook configs, so we need to persist the secrets. + persistStandardWorkspace(workspace); + } // after updating email or tracking info, we need to re-identify the instance. TrackingClientSingleton.get().identify(workspaceId); @@ -204,7 +211,9 @@ public WorkspaceRead updateWorkspaceName(final WorkspaceUpdateName workspaceUpda .withName(workspaceUpdateName.getName()) .withSlug(generateUniqueSlug(workspaceUpdateName.getName())); - persistStandardWorkspace(persistedWorkspace); + // NOTE: it's safe (and necessary) to use the NoSecrets variant because we never hydrated them in + // the first place. + configRepository.writeStandardWorkspaceNoSecrets(persistedWorkspace); return buildWorkspaceReadFromId(workspaceId); } diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/OAuthPathExtractor.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/OAuthPathExtractor.java new file mode 100644 index 0000000000000..ddf74bc4767eb --- /dev/null +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/helpers/OAuthPathExtractor.java @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.handlers.helpers; + +import com.fasterxml.jackson.databind.JsonNode; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class OAuthPathExtractor { + + private static final String PROPERTIES = "properties"; + private static final String PATH_IN_CONNECTOR_CONFIG = "path_in_connector_config"; + + public static Map> extractOauthConfigurationPaths(final JsonNode configuration) { + + if (configuration.has(PROPERTIES) && configuration.get(PROPERTIES).isObject()) { + final Map> result = new HashMap<>(); + + configuration.get(PROPERTIES).fields().forEachRemaining(entry -> { + final JsonNode value = entry.getValue(); + if (value.isObject() && value.has(PATH_IN_CONNECTOR_CONFIG) && value.get(PATH_IN_CONNECTOR_CONFIG).isArray()) { + final List path = new ArrayList<>(); + for (final JsonNode pathPart : value.get(PATH_IN_CONNECTOR_CONFIG)) { + path.add(pathPart.textValue()); + } + result.put(entry.getKey(), path); + } + }); + + return result; + } else { + return new HashMap<>(); + } + } + +} diff --git a/airbyte-server/src/main/java/io/airbyte/server/scheduler/EventRunner.java b/airbyte-server/src/main/java/io/airbyte/server/scheduler/EventRunner.java index ca2b47bb6cc32..bc4a83a07042a 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/scheduler/EventRunner.java +++ b/airbyte-server/src/main/java/io/airbyte/server/scheduler/EventRunner.java @@ -20,8 +20,10 @@ public interface EventRunner { ManualOperationResult resetConnection(final UUID connectionId, final List streamsToReset, final boolean runSyncImmediately); - void deleteConnection(final UUID connectionId); + void forceDeleteConnection(final UUID connectionId); + // TODO: Delete + @Deprecated(forRemoval = true) void migrateSyncIfNeeded(final Set connectionIds); void update(final UUID connectionId); diff --git a/airbyte-server/src/main/java/io/airbyte/server/scheduler/TemporalEventRunner.java b/airbyte-server/src/main/java/io/airbyte/server/scheduler/TemporalEventRunner.java index 79b54ce9a0580..5aa469a1a9d5a 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/scheduler/TemporalEventRunner.java +++ b/airbyte-server/src/main/java/io/airbyte/server/scheduler/TemporalEventRunner.java @@ -40,8 +40,8 @@ public ManualOperationResult resetConnection(final UUID connectionId, } @Override - public void deleteConnection(final UUID connectionId) { - temporalClient.deleteConnection(connectionId); + public void forceDeleteConnection(final UUID connectionId) { + temporalClient.forceDeleteWorkflow(connectionId); } @Override diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java index 08086c7e3bae6..9e48179153ac1 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/ConnectionsHandlerTest.java @@ -66,6 +66,7 @@ import io.airbyte.server.helpers.ConnectionHelpers; import io.airbyte.server.scheduler.EventRunner; import io.airbyte.validation.json.JsonValidationException; +import io.airbyte.workers.helper.ConnectionHelper; import java.io.IOException; import java.util.Collections; import java.util.List; @@ -98,6 +99,7 @@ class ConnectionsHandlerTest { private WorkspaceHelper workspaceHelper; private TrackingClient trackingClient; private EventRunner eventRunner; + private ConnectionHelper connectionHelper; private static final String PRESTO_TO_HUDI = "presto to hudi"; private static final String PRESTO_TO_HUDI_PREFIX = "presto_to_hudi"; @@ -173,7 +175,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio workspaceHelper = mock(WorkspaceHelper.class); trackingClient = mock(TrackingClient.class); eventRunner = mock(EventRunner.class); - + connectionHelper = mock(ConnectionHelper.class); when(workspaceHelper.getWorkspaceForSourceIdIgnoreExceptions(sourceId)).thenReturn(workspaceId); when(workspaceHelper.getWorkspaceForDestinationIdIgnoreExceptions(destinationId)).thenReturn(workspaceId); when(workspaceHelper.getWorkspaceForOperationIdIgnoreExceptions(operationId)).thenReturn(workspaceId); @@ -190,7 +192,8 @@ void setUp() throws JsonValidationException, ConfigNotFoundException, IOExceptio uuidGenerator, workspaceHelper, trackingClient, - eventRunner); + eventRunner, + connectionHelper); when(uuidGenerator.get()).thenReturn(standardSync.getConnectionId()); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() @@ -831,10 +834,10 @@ void testSearchConnections() throws JsonValidationException, ConfigNotFoundExcep } @Test - void testDeleteConnection() { + void testDeleteConnection() throws JsonValidationException, ConfigNotFoundException, IOException { connectionsHandler.deleteConnection(connectionId); - verify(eventRunner).deleteConnection(connectionId); + verify(connectionHelper).deleteConnection(connectionId); } @Test @@ -904,7 +907,8 @@ void setUp() { uuidGenerator, workspaceHelper, trackingClient, - eventRunner); + eventRunner, + connectionHelper); } @Test diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationDefinitionsHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationDefinitionsHandlerTest.java index afbb40d1f8c5d..daad65516f511 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationDefinitionsHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/DestinationDefinitionsHandlerTest.java @@ -31,7 +31,11 @@ import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.docker.DockerUtils; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.config.ActorDefinitionResourceRequirements; +import io.airbyte.config.ActorType; +import io.airbyte.config.Configs; +import io.airbyte.config.EnvConfigs; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.StandardDestinationDefinition; @@ -535,6 +539,12 @@ void testUpdateDestination() throws ConfigNotFoundException, IOException, JsonVa assertEquals(newDockerImageTag, destinationRead.getDockerImageTag()); verify(schedulerSynchronousClient).createGetSpecJob(newImageName); verify(configRepository).writeStandardDestinationDefinition(updatedDestination); + + final Configs configs = new EnvConfigs(); + final AirbyteProtocolVersionRange protocolVersionRange = + new AirbyteProtocolVersionRange(configs.getAirbyteProtocolVersionMin(), configs.getAirbyteProtocolVersionMax()); + verify(configRepository).clearUnsupportedProtocolVersionFlag(updatedDestination.getDestinationDefinitionId(), ActorType.DESTINATION, + protocolVersionRange); } @Test diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/OAuthHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/OAuthHandlerTest.java index 0ea12a1e0594e..4b6ad812cac08 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/OAuthHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/OAuthHandlerTest.java @@ -8,6 +8,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.fasterxml.jackson.databind.JsonNode; import io.airbyte.analytics.TrackingClient; import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody; import io.airbyte.api.model.generated.SetInstancewideSourceOauthParamsRequestBody; @@ -15,6 +16,7 @@ import io.airbyte.config.DestinationOAuthParameter; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.config.persistence.SecretsRepositoryReader; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.net.http.HttpClient; @@ -34,6 +36,7 @@ class OAuthHandlerTest { private OAuthHandler handler; private TrackingClient trackingClient; private HttpClient httpClient; + private SecretsRepositoryReader secretsRepositoryReader; private static final String CLIENT_ID = "123"; private static final String CLIENT_ID_KEY = "client_id"; private static final String CLIENT_SECRET_KEY = "client_secret"; @@ -44,7 +47,8 @@ public void init() { configRepository = Mockito.mock(ConfigRepository.class); trackingClient = mock(TrackingClient.class); httpClient = Mockito.mock(HttpClient.class); - handler = new OAuthHandler(configRepository, httpClient, trackingClient); + secretsRepositoryReader = mock(SecretsRepositoryReader.class); + handler = new OAuthHandler(configRepository, httpClient, trackingClient, secretsRepositoryReader); } @Test @@ -151,4 +155,77 @@ void resetDestinationInstancewideOauthParams() throws JsonValidationException, I assertEquals(oauthParameterId, capturedValues.get(1).getOauthParameterId()); } + @Test + void testBuildJsonPathFromOAuthFlowInitParameters() { + final Map> input = Map.ofEntries( + Map.entry("field1", List.of("1")), + Map.entry("field2", List.of("2", "3"))); + + final Map expected = Map.ofEntries( + Map.entry("field1", "$.1"), + Map.entry("field2", "$.2.3")); + + assertEquals(expected, handler.buildJsonPathFromOAuthFlowInitParameters(input)); + } + + @Test + void testGetOAuthInputConfiguration() { + final JsonNode hydratedConfig = Jsons.deserialize( + """ + { + "field1": "1", + "field2": "2", + "field3": { + "field3_1": "3_1", + "field3_2": "3_2" + } + } + """); + + final Map pathsToGet = Map.ofEntries( + Map.entry("field1", "$.field1"), + Map.entry("field3_1", "$.field3.field3_1"), + Map.entry("field3_2", "$.field3.field3_2")); + + final JsonNode expected = Jsons.deserialize( + """ + { + "field1": "1", + "field3_1": "3_1", + "field3_2": "3_2" + } + """); + + assertEquals(expected, handler.getOAuthInputConfiguration(hydratedConfig, pathsToGet)); + } + + @Test + void testGetOauthFromDBIfNeeded() { + final JsonNode fromInput = Jsons.deserialize( + """ + { + "testMask": "**********", + "testNotMask": "this" + } + """); + + final JsonNode fromDb = Jsons.deserialize( + """ + { + "testMask": "mask", + "testNotMask": "notThis" + } + """); + + final JsonNode expected = Jsons.deserialize( + """ + { + "testMask": "mask", + "testNotMask": "this" + } + """); + + assertEquals(expected, handler.getOauthFromDBIfNeeded(fromDb, fromInput)); + } + } diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/OperationsHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/OperationsHandlerTest.java index a9784d06530bb..e3e2d49542f19 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/OperationsHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/OperationsHandlerTest.java @@ -5,6 +5,7 @@ package io.airbyte.server.handlers; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; @@ -24,6 +25,8 @@ import io.airbyte.api.model.generated.OperatorNormalization.OptionEnum; import io.airbyte.api.model.generated.OperatorType; import io.airbyte.api.model.generated.OperatorWebhook; +import io.airbyte.api.model.generated.OperatorWebhook.WebhookTypeEnum; +import io.airbyte.api.model.generated.OperatorWebhookDbtCloud; import io.airbyte.commons.enums.Enums; import io.airbyte.config.OperatorNormalization.Option; import io.airbyte.config.StandardSync; @@ -45,10 +48,12 @@ class OperationsHandlerTest { private static final String WEBHOOK_OPERATION_NAME = "fake-operation-name"; private static final UUID WEBHOOK_CONFIG_ID = UUID.randomUUID(); - private static final String WEBHOOK_EXECUTION_URL = "fake-execution-url"; - private static final String WEBHOOK_EXECUTION_BODY = "fake-execution-body"; private static final UUID WEBHOOK_OPERATION_ID = UUID.randomUUID(); - public static final String NEW_EXECUTION_URL = "new-execution-url"; + private static final Integer DBT_CLOUD_WEBHOOK_ACCOUNT_ID = 123; + private static final Integer DBT_CLOUD_WEBHOOK_JOB_ID = 456; + private static final Integer NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID = 789; + public static final String EXECUTION_BODY = "{\"cause\": \"airbyte\"}"; + public static final String EXECUTION_URL_TEMPLATE = "https://cloud.getdbt.com/api/v2/accounts/%d/jobs/%d/run/"; private ConfigRepository configRepository; private Supplier uuidGenerator; private OperationsHandler operationsHandler; @@ -104,8 +109,10 @@ void testCreateWebhookOperation() throws JsonValidationException, ConfigNotFound when(uuidGenerator.get()).thenReturn(WEBHOOK_OPERATION_ID); final OperatorWebhook webhookConfig = new OperatorWebhook() .webhookConfigId(WEBHOOK_CONFIG_ID) - .executionUrl(WEBHOOK_EXECUTION_URL) - .executionBody(WEBHOOK_EXECUTION_BODY); + .webhookType(WebhookTypeEnum.DBTCLOUD) + .dbtCloud(new OperatorWebhookDbtCloud() + .accountId(DBT_CLOUD_WEBHOOK_ACCOUNT_ID) + .jobId(DBT_CLOUD_WEBHOOK_JOB_ID)); final OperationCreate operationCreate = new OperationCreate() .workspaceId(standardSyncOperation.getWorkspaceId()) .name(WEBHOOK_OPERATION_NAME) @@ -119,8 +126,9 @@ void testCreateWebhookOperation() throws JsonValidationException, ConfigNotFound .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) .withOperatorWebhook(new io.airbyte.config.OperatorWebhook() .withWebhookConfigId(WEBHOOK_CONFIG_ID) - .withExecutionUrl(WEBHOOK_EXECUTION_URL) - .withExecutionBody(WEBHOOK_EXECUTION_BODY)) + .withExecutionUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, + DBT_CLOUD_WEBHOOK_JOB_ID)) + .withExecutionBody(EXECUTION_BODY)) .withTombstone(false); when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(expectedPersistedOperation); @@ -131,7 +139,12 @@ void testCreateWebhookOperation() throws JsonValidationException, ConfigNotFound assertEquals(WEBHOOK_OPERATION_ID, actualOperationRead.getOperationId()); assertEquals(WEBHOOK_OPERATION_NAME, actualOperationRead.getName()); assertEquals(OperatorType.WEBHOOK, actualOperationRead.getOperatorConfiguration().getOperatorType()); - assertEquals(webhookConfig, actualOperationRead.getOperatorConfiguration().getWebhook()); + + // NOTE: we expect the server to dual-write on read until the frontend moves to the new format. + final OperatorWebhook expectedWebhookConfigRead = + webhookConfig.executionUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, + DBT_CLOUD_WEBHOOK_JOB_ID)).executionBody(EXECUTION_BODY); + assertEquals(expectedWebhookConfigRead, actualOperationRead.getOperatorConfiguration().getWebhook()); verify(configRepository).writeStandardSyncOperation(eq(expectedPersistedOperation)); } @@ -189,36 +202,58 @@ void testUpdateWebhookOperation() throws JsonValidationException, ConfigNotFound when(uuidGenerator.get()).thenReturn(WEBHOOK_OPERATION_ID); final OperatorWebhook webhookConfig = new OperatorWebhook() .webhookConfigId(WEBHOOK_CONFIG_ID) - .executionUrl(NEW_EXECUTION_URL) - .executionBody(WEBHOOK_EXECUTION_BODY); + .webhookType(WebhookTypeEnum.DBTCLOUD) + .dbtCloud(new OperatorWebhookDbtCloud() + .accountId(NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID) + .jobId(DBT_CLOUD_WEBHOOK_JOB_ID)); final OperationUpdate operationUpdate = new OperationUpdate() .name(WEBHOOK_OPERATION_NAME) .operationId(WEBHOOK_OPERATION_ID) .operatorConfiguration(new OperatorConfiguration() .operatorType(OperatorType.WEBHOOK).webhook(webhookConfig)); + final var persistedWebhook = new io.airbyte.config.OperatorWebhook() + .withWebhookConfigId(WEBHOOK_CONFIG_ID) + .withExecutionUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, + DBT_CLOUD_WEBHOOK_JOB_ID)) + .withExecutionBody(EXECUTION_BODY); + + final var updatedWebhook = new io.airbyte.config.OperatorWebhook() + .withWebhookConfigId(WEBHOOK_CONFIG_ID) + .withExecutionUrl(String.format(EXECUTION_URL_TEMPLATE, NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID, + DBT_CLOUD_WEBHOOK_JOB_ID)) + .withExecutionBody(EXECUTION_BODY); + final StandardSyncOperation persistedOperation = new StandardSyncOperation() .withWorkspaceId(standardSyncOperation.getWorkspaceId()) .withOperationId(WEBHOOK_OPERATION_ID) .withName(WEBHOOK_OPERATION_NAME) .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) - .withOperatorWebhook(new io.airbyte.config.OperatorWebhook() - .withWebhookConfigId(WEBHOOK_CONFIG_ID) - .withExecutionUrl(WEBHOOK_EXECUTION_URL) - .withExecutionBody(WEBHOOK_EXECUTION_BODY)); + .withOperatorWebhook(persistedWebhook); - when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(persistedOperation); + final StandardSyncOperation updatedOperation = new StandardSyncOperation() + .withWorkspaceId(standardSyncOperation.getWorkspaceId()) + .withOperationId(WEBHOOK_OPERATION_ID) + .withName(WEBHOOK_OPERATION_NAME) + .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) + .withOperatorWebhook(updatedWebhook); + + when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(persistedOperation).thenReturn(updatedOperation); final OperationRead actualOperationRead = operationsHandler.updateOperation(operationUpdate); assertEquals(WEBHOOK_OPERATION_ID, actualOperationRead.getOperationId()); assertEquals(WEBHOOK_OPERATION_NAME, actualOperationRead.getName()); assertEquals(OperatorType.WEBHOOK, actualOperationRead.getOperatorConfiguration().getOperatorType()); - assertEquals(webhookConfig, actualOperationRead.getOperatorConfiguration().getWebhook()); + final OperatorWebhook expectedWebhookConfigRead = + webhookConfig.executionUrl(String.format(EXECUTION_URL_TEMPLATE, NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID, + DBT_CLOUD_WEBHOOK_JOB_ID)).executionBody(EXECUTION_BODY); + assertEquals(expectedWebhookConfigRead, actualOperationRead.getOperatorConfiguration().getWebhook()); verify(configRepository) .writeStandardSyncOperation(persistedOperation.withOperatorWebhook(persistedOperation.getOperatorWebhook().withExecutionUrl( - NEW_EXECUTION_URL))); + String.format(EXECUTION_URL_TEMPLATE, NEW_DBT_CLOUD_WEBHOOK_ACCOUNT_ID, + DBT_CLOUD_WEBHOOK_JOB_ID)))); } @Test @@ -313,4 +348,39 @@ void testEnumConversion() { io.airbyte.config.OperatorNormalization.Option.class)); } + @Test + void testDbtCloudRegex() { + // Validate that a non-url is rejected. + assertThrows(IllegalArgumentException.class, () -> checkDbtCloudUrl("not-a-url")); + // Validate that the URL is anchored to the beginning. + assertThrows(IllegalArgumentException.class, + () -> checkDbtCloudUrl("some-nonsense-" + String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, + DBT_CLOUD_WEBHOOK_JOB_ID))); + // Validate that the URL is anchored to the end. + assertThrows(IllegalArgumentException.class, + () -> checkDbtCloudUrl(String.format(EXECUTION_URL_TEMPLATE, DBT_CLOUD_WEBHOOK_ACCOUNT_ID, + DBT_CLOUD_WEBHOOK_JOB_ID) + "-some-nonsense")); + // Validate that the account id must be an integer. + assertThrows(IllegalArgumentException.class, () -> checkDbtCloudUrl("https://cloud.getdbt.com/api/v2/accounts/abc/jobs/123/run/")); + // Validate that the job id must be an integer. + assertThrows(IllegalArgumentException.class, () -> checkDbtCloudUrl("https://cloud.getdbt.com/api/v2/accounts/123/jobs/abc/run/")); + } + + private void checkDbtCloudUrl(final String urlToCheck) throws JsonValidationException, ConfigNotFoundException, IOException { + final StandardSyncOperation persistedOperation = new StandardSyncOperation() + .withWorkspaceId(standardSyncOperation.getWorkspaceId()) + .withOperationId(WEBHOOK_OPERATION_ID) + .withName(WEBHOOK_OPERATION_NAME) + .withOperatorType(StandardSyncOperation.OperatorType.WEBHOOK) + .withOperatorWebhook(new io.airbyte.config.OperatorWebhook() + .withWebhookConfigId(WEBHOOK_CONFIG_ID) + .withExecutionUrl(urlToCheck) + .withExecutionBody(EXECUTION_BODY)) + .withTombstone(false); + when(configRepository.getStandardSyncOperation(WEBHOOK_OPERATION_ID)).thenReturn(persistedOperation); + + final OperationIdRequestBody operationIdRequestBody = new OperationIdRequestBody().operationId(WEBHOOK_OPERATION_ID); + operationsHandler.getOperation(operationIdRequestBody); + } + } diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceDefinitionsHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceDefinitionsHandlerTest.java index 07acc870461a7..733c50995ca75 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceDefinitionsHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/SourceDefinitionsHandlerTest.java @@ -32,7 +32,11 @@ import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.docker.DockerUtils; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.version.AirbyteProtocolVersionRange; import io.airbyte.config.ActorDefinitionResourceRequirements; +import io.airbyte.config.ActorType; +import io.airbyte.config.Configs; +import io.airbyte.config.EnvConfigs; import io.airbyte.config.JobConfig.ConfigType; import io.airbyte.config.ResourceRequirements; import io.airbyte.config.StandardSourceDefinition; @@ -524,6 +528,11 @@ void testUpdateSourceDefinition() throws ConfigNotFoundException, IOException, J assertEquals(newDockerImageTag, sourceDefinitionRead.getDockerImageTag()); verify(schedulerSynchronousClient).createGetSpecJob(newImageName); verify(configRepository).writeStandardSourceDefinition(updatedSource); + + final Configs configs = new EnvConfigs(); + final AirbyteProtocolVersionRange protocolVersionRange = + new AirbyteProtocolVersionRange(configs.getAirbyteProtocolVersionMin(), configs.getAirbyteProtocolVersionMax()); + verify(configRepository).clearUnsupportedProtocolVersionFlag(updatedSource.getSourceDefinitionId(), ActorType.SOURCE, protocolVersionRange); } @Test diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/WorkspacesHandlerTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/WorkspacesHandlerTest.java index 466671991f584..4e604e12855da 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/handlers/WorkspacesHandlerTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/WorkspacesHandlerTest.java @@ -58,13 +58,16 @@ class WorkspacesHandlerTest { - public static final String FAILURE_NOTIFICATION_WEBHOOK = "http://airbyte.notifications/failure"; - public static final String NEW_WORKSPACE = "new workspace"; - public static final String TEST_NAME = "test-name"; + private static final String FAILURE_NOTIFICATION_WEBHOOK = "http://airbyte.notifications/failure"; + private static final String NEW_WORKSPACE = "new workspace"; + private static final String TEST_NAME = "test-name"; + + private static final String TEST_AUTH_TOKEN = "test-auth-token"; private static final UUID WEBHOOK_CONFIG_ID = UUID.randomUUID(); private static final JsonNode PERSISTED_WEBHOOK_CONFIGS = Jsons.deserialize( String.format("{\"webhookConfigs\": [{\"id\": \"%s\", \"name\": \"%s\", \"authToken\": {\"_secret\": \"a-secret_v1\"}}]}", WEBHOOK_CONFIG_ID, TEST_NAME)); + public static final String UPDATED = "updated"; private ConfigRepository configRepository; private SecretsRepositoryWriter secretsRepositoryWriter; private ConnectionsHandler connectionsHandler; @@ -149,7 +152,7 @@ void testCreateWorkspace() throws JsonValidationException, IOException, ConfigNo .securityUpdates(false) .notifications(List.of(generateApiNotification())) .defaultGeography(GEOGRAPHY_US) - .webhookConfigs(List.of(new WebhookConfigWrite().name(TEST_NAME).authToken("test-auth-token"))); + .webhookConfigs(List.of(new WebhookConfigWrite().name(TEST_NAME).authToken(TEST_AUTH_TOKEN))); final WorkspaceRead actualRead = workspacesHandler.createWorkspace(workspaceCreate); final WorkspaceRead expectedRead = new WorkspaceRead() @@ -359,7 +362,7 @@ void testGetWorkspaceByConnectionId() { @Test void testUpdateWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { final io.airbyte.api.model.generated.Notification apiNotification = generateApiNotification(); - apiNotification.getSlackConfiguration().webhook("updated"); + apiNotification.getSlackConfiguration().webhook(UPDATED); final WorkspaceUpdate workspaceUpdate = new WorkspaceUpdate() .workspaceId(workspace.getWorkspaceId()) .anonymousDataCollection(true) @@ -372,7 +375,7 @@ void testUpdateWorkspace() throws JsonValidationException, ConfigNotFoundExcepti .webhookConfigs(List.of(new WebhookConfigWrite().name(TEST_NAME).authToken("test-auth-token"))); final Notification expectedNotification = generateNotification(); - expectedNotification.getSlackConfiguration().withWebhook("updated"); + expectedNotification.getSlackConfiguration().withWebhook(UPDATED); final StandardWorkspace expectedWorkspace = new StandardWorkspace() .withWorkspaceId(workspace.getWorkspaceId()) .withCustomerId(workspace.getCustomerId()) @@ -398,7 +401,7 @@ void testUpdateWorkspace() throws JsonValidationException, ConfigNotFoundExcepti final WorkspaceRead actualWorkspaceRead = workspacesHandler.updateWorkspace(workspaceUpdate); final io.airbyte.api.model.generated.Notification expectedNotificationRead = generateApiNotification(); - expectedNotificationRead.getSlackConfiguration().webhook("updated"); + expectedNotificationRead.getSlackConfiguration().webhook(UPDATED); final WorkspaceRead expectedWorkspaceRead = new WorkspaceRead() .workspaceId(workspace.getWorkspaceId()) .customerId(workspace.getCustomerId()) @@ -419,6 +422,43 @@ void testUpdateWorkspace() throws JsonValidationException, ConfigNotFoundExcepti assertEquals(expectedWorkspaceRead, actualWorkspaceRead); } + @Test + void testUpdateWorkspaceWithoutWebhookConfigs() throws JsonValidationException, ConfigNotFoundException, IOException { + final io.airbyte.api.model.generated.Notification apiNotification = generateApiNotification(); + apiNotification.getSlackConfiguration().webhook(UPDATED); + final WorkspaceUpdate workspaceUpdate = new WorkspaceUpdate() + .workspaceId(workspace.getWorkspaceId()) + .anonymousDataCollection(false); + + final Notification expectedNotification = generateNotification(); + expectedNotification.getSlackConfiguration().withWebhook(UPDATED); + final StandardWorkspace expectedWorkspace = new StandardWorkspace() + .withWorkspaceId(workspace.getWorkspaceId()) + .withCustomerId(workspace.getCustomerId()) + .withEmail(TEST_EMAIL) + .withName(TEST_WORKSPACE_NAME) + .withSlug(TEST_WORKSPACE_SLUG) + .withAnonymousDataCollection(true) + .withSecurityUpdates(false) + .withNews(false) + .withInitialSetupComplete(true) + .withDisplaySetupWizard(false) + .withTombstone(false) + .withNotifications(List.of(expectedNotification)) + .withDefaultGeography(Geography.US) + .withWebhookOperationConfigs(PERSISTED_WEBHOOK_CONFIGS); + + when(uuidSupplier.get()).thenReturn(WEBHOOK_CONFIG_ID); + + when(configRepository.getStandardWorkspaceNoSecrets(workspace.getWorkspaceId(), false)) + .thenReturn(expectedWorkspace) + .thenReturn(expectedWorkspace.withAnonymousDataCollection(false)); + + workspacesHandler.updateWorkspace(workspaceUpdate); + + verify(configRepository).writeStandardWorkspaceNoSecrets(expectedWorkspace); + } + @Test @DisplayName("Updating workspace name should update name and slug") void testUpdateWorkspaceNoNameUpdate() throws JsonValidationException, ConfigNotFoundException, IOException { diff --git a/airbyte-server/src/test/java/io/airbyte/server/handlers/helper/OAuthPathExtractorTest.java b/airbyte-server/src/test/java/io/airbyte/server/handlers/helper/OAuthPathExtractorTest.java new file mode 100644 index 0000000000000..e7c7d95f83080 --- /dev/null +++ b/airbyte-server/src/test/java/io/airbyte/server/handlers/helper/OAuthPathExtractorTest.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2022 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.server.handlers.helper; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.server.handlers.helpers.OAuthPathExtractor; +import java.util.List; +import java.util.Map; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; + +class OAuthPathExtractorTest { + + @Test + void testExtract() { + final JsonNode input = Jsons.deserialize(""" + { + "type": "object", + "additionalProperties": false, + "properties": { + "tenant_id": { + "type": "string", + "path_in_connector_config": ["tenant_id"] + }, + "another_property": { + "type": "string", + "path_in_connector_config": ["another", "property"] + } + } + } + """); + + final Map> expected = Map.ofEntries( + Map.entry("tenant_id", List.of("tenant_id")), + Map.entry("another_property", List.of("another", "property"))); + + Assertions.assertThat(OAuthPathExtractor.extractOauthConfigurationPaths(input)) + .containsExactlyInAnyOrderEntriesOf(expected); + } + +} diff --git a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java index 433f7e82ab4e0..a85f6498975ab 100644 --- a/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java +++ b/airbyte-test-utils/src/main/java/io/airbyte/test/utils/AirbyteAcceptanceTestHarness.java @@ -33,6 +33,7 @@ import io.airbyte.api.client.model.generated.DestinationRead; import io.airbyte.api.client.model.generated.DestinationSyncMode; import io.airbyte.api.client.model.generated.JobConfigType; +import io.airbyte.api.client.model.generated.JobDebugInfoRead; import io.airbyte.api.client.model.generated.JobIdRequestBody; import io.airbyte.api.client.model.generated.JobListRequestBody; import io.airbyte.api.client.model.generated.JobRead; @@ -794,6 +795,23 @@ public static JobRead waitWhileJobHasStatus(final JobsApi jobsApi, return job; } + @SuppressWarnings("BusyWait") + public static void waitWhileJobIsRunning(final JobsApi jobsApi, final JobRead job, final Duration maxWaitTime) + throws ApiException, InterruptedException { + final Instant waitStart = Instant.now(); + JobDebugInfoRead jobDebugInfoRead = jobsApi.getJobDebugInfo(new JobIdRequestBody().id(job.getId())); + LOGGER.info("workflow state: {}", jobDebugInfoRead.getWorkflowState()); + while (jobDebugInfoRead.getWorkflowState() != null && jobDebugInfoRead.getWorkflowState().getRunning()) { + if (Duration.between(waitStart, Instant.now()).compareTo(maxWaitTime) > 0) { + LOGGER.info("Max wait time of {} has been reached. Stopping wait.", maxWaitTime); + break; + } + LOGGER.info("waiting: job id: {}, workflowState.isRunning is still true", job.getId()); + sleep(1000); + jobDebugInfoRead = jobsApi.getJobDebugInfo(new JobIdRequestBody().id(job.getId())); + } + } + @SuppressWarnings("BusyWait") public static ConnectionState waitForConnectionState(final AirbyteApiClient apiClient, final UUID connectionId) throws ApiException, InterruptedException { diff --git a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java index af778f649cbe4..757f2a90018fd 100644 --- a/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java +++ b/airbyte-tests/src/acceptanceTests/java/io/airbyte/test/acceptance/BasicAcceptanceTests.java @@ -13,6 +13,7 @@ import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.STREAM_NAME; import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitForSuccessfulJob; import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitWhileJobHasStatus; +import static io.airbyte.test.utils.AirbyteAcceptanceTestHarness.waitWhileJobIsRunning; import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -65,6 +66,8 @@ import io.airbyte.api.client.model.generated.OperatorConfiguration; import io.airbyte.api.client.model.generated.OperatorType; import io.airbyte.api.client.model.generated.OperatorWebhook; +import io.airbyte.api.client.model.generated.OperatorWebhook.WebhookTypeEnum; +import io.airbyte.api.client.model.generated.OperatorWebhookDbtCloud; import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; import io.airbyte.api.client.model.generated.SourceDefinitionIdWithWorkspaceId; import io.airbyte.api.client.model.generated.SourceDefinitionRead; @@ -464,10 +467,9 @@ void testWebhookOperationExecutesSuccessfully() throws Exception { .operatorType(OperatorType.WEBHOOK) .webhook(new OperatorWebhook() .webhookConfigId(workspaceRead.getWebhookConfigs().get(0).getId()) - // NOTE: reqres.in is free service that hosts a REST API intended for testing frontend/client code. - // We use it here as an endpoint that will accept an HTTP POST. - .executionUrl("https://reqres.in/api/users") - .executionBody("{\"name\": \"morpheus\", \"job\": \"leader\"}")))); + // NOTE: this dbt Cloud config won't actually work, but the sync should still succeed. + .webhookType(WebhookTypeEnum.DBTCLOUD) + .dbtCloud(new OperatorWebhookDbtCloud().accountId(123).jobId(456))))); // create a connection with the new operation. final UUID sourceId = testHarness.createPostgresSource().getSourceId(); final UUID destinationId = testHarness.createPostgresDestination().getDestinationId(); @@ -641,6 +643,11 @@ void testIncrementalSync() throws Exception { final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitWhileJobHasStatus(apiClient.getJobsApi(), jobInfoRead.getJob(), Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING, JobStatus.INCOMPLETE, JobStatus.FAILED)); + // This is a band-aid to prevent some race conditions where the job status was updated but we may + // still be cleaning up some data in the reset table. This would be an argument for reworking the + // source of truth of the replication workflow state to be in DB rather than in Memory and + // serialized automagically by temporal + waitWhileJobIsRunning(apiClient.getJobsApi(), jobInfoRead.getJob(), Duration.ofMinutes(1)); LOGGER.info("state after reset: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); @@ -658,7 +665,6 @@ void testIncrementalSync() throws Exception { } - @Disabled @Test @Order(14) void testDeleteConnection() throws Exception { @@ -689,9 +695,6 @@ void testDeleteConnection() throws Exception { // connectionIds.remove(connectionId); // todo remove testHarness.removeConnection(connectionId); - LOGGER.info("Waiting for connection to be deleted..."); - Thread.sleep(5000); - ConnectionStatus connectionStatus = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)).getStatus(); assertEquals(ConnectionStatus.DEPRECATED, connectionStatus); @@ -711,9 +714,6 @@ void testDeleteConnection() throws Exception { // we should still be able to delete the connection when the temporal workflow is in this state apiClient.getConnectionApi().deleteConnection(new ConnectionIdRequestBody().connectionId(connectionId)); - LOGGER.info("Waiting for connection to be deleted..."); - Thread.sleep(5000); - connectionStatus = apiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody().connectionId(connectionId)).getStatus(); assertEquals(ConnectionStatus.DEPRECATED, connectionStatus); } @@ -940,6 +940,11 @@ void testSyncAfterUpgradeToPerStreamState(final TestInfo testInfo) throws Except final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitWhileJobHasStatus(apiClient.getJobsApi(), jobInfoRead.getJob(), Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING, JobStatus.INCOMPLETE, JobStatus.FAILED)); + // This is a band-aid to prevent some race conditions where the job status was updated but we may + // still be cleaning up some data in the reset table. This would be an argument for reworking the + // source of truth of the replication workflow state to be in DB rather than in Memory and + // serialized automagically by temporal + waitWhileJobIsRunning(apiClient.getJobsApi(), jobInfoRead.getJob(), Duration.ofMinutes(1)); LOGGER.info("state after reset: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); @@ -1245,6 +1250,11 @@ void testIncrementalSyncMultipleStreams() throws Exception { final JobInfoRead jobInfoRead = apiClient.getConnectionApi().resetConnection(new ConnectionIdRequestBody().connectionId(connectionId)); waitWhileJobHasStatus(apiClient.getJobsApi(), jobInfoRead.getJob(), Sets.newHashSet(JobStatus.PENDING, JobStatus.RUNNING, JobStatus.INCOMPLETE, JobStatus.FAILED)); + // This is a band-aid to prevent some race conditions where the job status was updated but we may + // still be cleaning up some data in the reset table. This would be an argument for reworking the + // source of truth of the replication workflow state to be in DB rather than in Memory and + // serialized automagically by temporal + waitWhileJobIsRunning(apiClient.getJobsApi(), jobInfoRead.getJob(), Duration.ofMinutes(1)); LOGGER.info("state after reset: {}", apiClient.getStateApi().getState(new ConnectionIdRequestBody().connectionId(connectionId))); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/onboarding.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/onboarding.spec.ts index 3e5d5446273f4..2ea56269f4434 100644 --- a/airbyte-webapp-e2e-tests/cypress/integration/onboarding.spec.ts +++ b/airbyte-webapp-e2e-tests/cypress/integration/onboarding.spec.ts @@ -6,7 +6,7 @@ describe("Preferences actions", () => { initialSetupCompleted(false); }); - it("Should redirect to onboarding after email is entered", () => { + it("Should redirect to connections page after email is entered", () => { cy.visit("/preferences"); cy.url().should("include", `/preferences`); @@ -15,6 +15,6 @@ describe("Preferences actions", () => { submitButtonClick(); - cy.url().should("match", /.*\/onboarding/); + cy.url().should("match", /.*\/connections/); }); }); diff --git a/airbyte-webapp-e2e-tests/cypress/integration/source.spec.ts b/airbyte-webapp-e2e-tests/cypress/integration/source.spec.ts index 939f030830483..07995c845d95b 100644 --- a/airbyte-webapp-e2e-tests/cypress/integration/source.spec.ts +++ b/airbyte-webapp-e2e-tests/cypress/integration/source.spec.ts @@ -1,16 +1,20 @@ -import { appendRandomString } from "commands/common"; +import { appendRandomString, submitButtonClick } from "commands/common"; import { createPostgresSource, deleteSource, updateSource } from "commands/source"; import { initialSetupCompleted } from "commands/workspaces"; +import { goToSourcePage, openNewSourceForm } from "pages/sourcePage"; +import { openHomepage } from "pages/sidebar"; +import { selectServiceType } from "pages/createConnectorPage"; +import { fillPokeAPIForm } from "commands/connector"; describe("Source main actions", () => { - beforeEach(() => { - initialSetupCompleted(); - }); + beforeEach(() => initialSetupCompleted()); it("Create new source", () => { + cy.intercept("/api/v1/sources/create").as("createSource"); createPostgresSource("Test source cypress"); - cy.url().should("include", `/source/`); + cy.wait("@createSource", {timeout: 30000}).then((interception) => { + assert("include", `/source/${interception.response?.body.Id}`)}); }); //TODO: add update source on some other connector or create 1 more user for pg @@ -32,3 +36,60 @@ describe("Source main actions", () => { cy.get("div").contains(sourceName).should("not.exist"); }); }); + +describe("Unsaved changes modal", () => { + beforeEach(() => initialSetupCompleted()); + + it("Check leaving Source page without any changes", () => { + goToSourcePage(); + openNewSourceForm(); + + openHomepage(); + + cy.url().should("include", "/connections"); + cy.get("[data-testid='confirmationModal']").should("not.exist"); + }); + + it("Check leaving Source page without any changes after selection type", () => { + goToSourcePage(); + openNewSourceForm(); + selectServiceType("PokeAPI"); + + openHomepage(); + + cy.url().should("include", "/connections"); + cy.get("[data-testid='confirmationModal']").should("not.exist"); + }); + + it("Check leaving Source page without any changes", () => { + goToSourcePage(); + openNewSourceForm(); + fillPokeAPIForm("testName", "ditto"); + + openHomepage(); + + cy.get("[data-testid='confirmationModal']").should("exist"); + cy.get("[data-testid='confirmationModal']").contains("Discard changes"); + cy.get("[data-testid='confirmationModal']") + .contains("There are unsaved changes. Are you sure you want to discard your changes?"); + }); + + //BUG - https://github.com/airbytehq/airbyte/issues/18246 + it.skip("Check leaving Source page after failing testing", () => { + cy.intercept("/api/v1/scheduler/sources/check_connection").as("checkSourceUpdateConnection"); + + goToSourcePage(); + openNewSourceForm(); + fillPokeAPIForm("testName", "name"); + submitButtonClick(); + + cy.wait("@checkSourceUpdateConnection", {timeout: 5000}); + + openHomepage(); + + cy.get("[data-testid='confirmationModal']").should("exist"); + cy.get("[data-testid='confirmationModal']").contains("Discard changes"); + cy.get("[data-testid='confirmationModal']") + .contains("There are unsaved changes. Are you sure you want to discard your changes?"); + }); +}); diff --git a/airbyte-webapp-e2e-tests/cypress/pages/destinationPage.ts b/airbyte-webapp-e2e-tests/cypress/pages/destinationPage.ts index 127c3efd5cc5f..da603c01b3ac6 100644 --- a/airbyte-webapp-e2e-tests/cypress/pages/destinationPage.ts +++ b/airbyte-webapp-e2e-tests/cypress/pages/destinationPage.ts @@ -1,5 +1,5 @@ const newDestination = "button[data-id='new-destination'"; -const addSourceButton = "div[data-testid='select-source']"; +const addSourceButton = "button[data-id='select-source']"; export const goToDestinationPage = () => { cy.intercept("/api/v1/destinations/list").as("getDestinationsList"); diff --git a/airbyte-webapp-e2e-tests/cypress/pages/sidebar.ts b/airbyte-webapp-e2e-tests/cypress/pages/sidebar.ts index 31ff3cb2742f4..c19b45ef907d6 100644 --- a/airbyte-webapp-e2e-tests/cypress/pages/sidebar.ts +++ b/airbyte-webapp-e2e-tests/cypress/pages/sidebar.ts @@ -1,5 +1,10 @@ const setting = "nav a[href*='settings']"; +const homepage = "[aria-label='Homepage']"; export const openSettings = () => { cy.get(setting).click(); }; + +export const openHomepage = () => { + cy.get(homepage).click(); +}; diff --git a/airbyte-webapp/.env b/airbyte-webapp/.env index 4e35f5bc0deb8..b6cf64f3beafe 100644 --- a/airbyte-webapp/.env +++ b/airbyte-webapp/.env @@ -3,3 +3,4 @@ REACT_APP_FULL_STORY_ORG=13AXQ4 REACT_APP_SENTRY_DSN= REACT_APP_INTERCOM_APP_ID=nj1oam7s REACT_APP_OSANO=16A0CTTE7vE8m1Qif/67beec9b-e563-4736-bdb4-4fe4adc39d48 +REACT_APP_CONNECTOR_BUILDER_API=/connector-builder-api/ diff --git a/airbyte-webapp/.storybook/withProvider.tsx b/airbyte-webapp/.storybook/withProvider.tsx index 8f5d0e54c2e80..83cf558677a8c 100644 --- a/airbyte-webapp/.storybook/withProvider.tsx +++ b/airbyte-webapp/.storybook/withProvider.tsx @@ -12,17 +12,14 @@ import { FeatureService } from "../src/hooks/services/Feature"; import { ConfigServiceProvider, defaultConfig } from "../src/config"; import { DocumentationPanelProvider } from "../src/views/Connector/ConnectorDocumentationLayout/DocumentationPanelContext"; import { ServicesProvider } from "../src/core/servicesProvider"; -import { analyticsServiceContext, AnalyticsServiceProviderValue } from "../src/hooks/services/Analytics"; +import { analyticsServiceContext } from "../src/hooks/services/Analytics"; +import type { AnalyticsService } from "../src/core/analytics"; -const AnalyticsContextMock: AnalyticsServiceProviderValue = { - analyticsContext: {}, - setContext: () => {}, - addContextProps: () => {}, - removeContextProps: () => {}, - service: { +const analyticsContextMock: AnalyticsService = { track: () => {}, - }, -} as unknown as AnalyticsServiceProviderValue; + setContext: () => {}, + removeFromContext: () => {}, +} as unknown as AnalyticsService; const queryClient = new QueryClient({ defaultOptions: { @@ -35,7 +32,7 @@ const queryClient = new QueryClient({ export const withProviders = (getStory) => ( - + diff --git a/airbyte-webapp/nginx/default.conf.template b/airbyte-webapp/nginx/default.conf.template index 95981627154a5..25de5efe4ad34 100644 --- a/airbyte-webapp/nginx/default.conf.template +++ b/airbyte-webapp/nginx/default.conf.template @@ -2,6 +2,10 @@ upstream api-server { server $INTERNAL_API_HOST; } +upstream connector-builder-server { + server $CONNECTOR_BUILDER_API_HOST; +} + server { listen 80; listen [::]:80; @@ -40,4 +44,11 @@ server { client_max_body_size 200M; proxy_pass http://api-server/api/; } + + location /connector-builder-api/ { + fastcgi_read_timeout 1h; + proxy_read_timeout 1h; + client_max_body_size 200M; + proxy_pass http://connector-builder-server/; + } } diff --git a/airbyte-webapp/orval.config.ts b/airbyte-webapp/orval.config.ts index daac7be19c0c3..f859eaa5e4e21 100644 --- a/airbyte-webapp/orval.config.ts +++ b/airbyte-webapp/orval.config.ts @@ -23,7 +23,7 @@ export default defineConfig({ }, }, connectorBuilder: { - input: "../connector-builder-server/src/main/openapi/openapi.yaml", + input: "../airbyte-connector-builder-server/src/main/openapi/openapi.yaml", output: { target: "./src/core/request/ConnectorBuilderClient.ts", prettier: true, diff --git a/airbyte-webapp/package-lock.json b/airbyte-webapp/package-lock.json index 4b9bff7b1037e..0073dbfe7812a 100644 --- a/airbyte-webapp/package-lock.json +++ b/airbyte-webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "airbyte-webapp", - "version": "0.40.18", + "version": "0.40.19", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "airbyte-webapp", - "version": "0.40.18", + "version": "0.40.19", "dependencies": { "@datadog/browser-rum": "^4.21.2", "@floating-ui/react-dom": "^1.0.0", @@ -93,6 +93,7 @@ "@types/unist": "^2.0.5", "@typescript-eslint/eslint-plugin": "^5.27.1", "@typescript-eslint/parser": "^5.27.1", + "dotenv": "^16.0.3", "eslint-config-prettier": "^8.5.0", "eslint-config-react-app": "^7.0.1", "eslint-plugin-css-modules": "^2.11.0", @@ -20660,12 +20661,12 @@ } }, "node_modules/dotenv": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.6.0.tgz", - "integrity": "sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==", + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.0.3.tgz", + "integrity": "sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==", "dev": true, "engines": { - "node": ">=10" + "node": ">=12" } }, "node_modules/dotenv-expand": { @@ -21592,18 +21593,6 @@ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "dev": true }, - "node_modules/eslint-plugin-jsx-a11y/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/eslint-plugin-prettier": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-4.0.0.tgz", @@ -30037,6 +30026,15 @@ "yarn": ">=1.0.0" } }, + "node_modules/lazy-universal-dotenv/node_modules/dotenv": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.6.0.tgz", + "integrity": "sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==", + "dev": true, + "engines": { + "node": ">=10" + } + }, "node_modules/leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -32801,9 +32799,9 @@ "dev": true }, "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -49425,7 +49423,7 @@ "ignore": "^4.0.6", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", - "minimatch": "^3.0.4", + "minimatch": "^3.0.5", "strip-json-comments": "^3.1.1" }, "dependencies": { @@ -50300,7 +50298,7 @@ "requires": { "@humanwhocodes/object-schema": "^1.2.1", "debug": "^4.1.1", - "minimatch": "^3.0.4" + "minimatch": "^3.0.5" } }, "@humanwhocodes/object-schema": { @@ -52383,7 +52381,7 @@ "jsonpath-plus": "6.0.1", "lodash": "~4.17.21", "lodash.topath": "^4.5.2", - "minimatch": "3.0.4", + "minimatch": "^3.0.5", "nimma": "0.1.7", "simple-eval": "1.0.0", "tslib": "^2.3.0" @@ -53398,7 +53396,7 @@ "@babel/code-frame": "^7.5.5", "chalk": "^2.4.1", "micromatch": "^3.1.10", - "minimatch": "^3.0.4", + "minimatch": "^3.0.5", "semver": "^5.6.0", "tapable": "^1.0.0", "worker-rpc": "^0.1.0" @@ -63414,9 +63412,9 @@ } }, "dotenv": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.6.0.tgz", - "integrity": "sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==", + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.0.3.tgz", + "integrity": "sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==", "dev": true }, "dotenv-expand": { @@ -63894,7 +63892,7 @@ "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", - "minimatch": "^3.0.4", + "minimatch": "^3.0.5", "natural-compare": "^1.4.0", "optionator": "^0.9.1", "regexpp": "^3.2.0", @@ -64184,7 +64182,7 @@ "has": "^1.0.3", "is-core-module": "^2.8.0", "is-glob": "^4.0.3", - "minimatch": "^3.0.4", + "minimatch": "^3.0.5", "object.values": "^1.1.5", "resolve": "^1.20.0", "tsconfig-paths": "^3.12.0" @@ -64253,7 +64251,7 @@ "has": "^1.0.3", "jsx-ast-utils": "^3.3.2", "language-tags": "^1.0.5", - "minimatch": "^3.1.2", + "minimatch": "^3.0.5", "semver": "^6.3.0" }, "dependencies": { @@ -64262,15 +64260,6 @@ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "dev": true - }, - "minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } } } }, @@ -64294,7 +64283,7 @@ "doctrine": "^2.1.0", "estraverse": "^5.3.0", "jsx-ast-utils": "^2.4.1 || ^3.0.0", - "minimatch": "^3.0.4", + "minimatch": "^3.0.5", "object.entries": "^1.1.5", "object.fromentries": "^2.0.5", "object.hasown": "^1.1.0", @@ -65113,7 +65102,7 @@ "integrity": "sha512-LwjCsruLWQULGYKy7TX0OPtrL9kLpojOFKc5VCTxdFTV7w5zbsgqVKfnkKG7Qgjtq50gKfO56hJv88OfcGb70Q==", "dev": true, "requires": { - "minimatch": "^5.0.1" + "minimatch": "^3.0.5" }, "dependencies": { "brace-expansion": { @@ -65126,8 +65115,7 @@ } }, "minimatch": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "version": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", "dev": true, "requires": { @@ -65341,7 +65329,7 @@ "fs-extra": "^9.0.0", "glob": "^7.1.6", "memfs": "^3.1.2", - "minimatch": "^3.0.4", + "minimatch": "^3.0.5", "schema-utils": "2.7.0", "semver": "^7.3.2", "tapable": "^1.0.0" @@ -65845,7 +65833,7 @@ "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", + "minimatch": "^3.0.5", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } @@ -67652,7 +67640,7 @@ "async": "^3.2.3", "chalk": "^4.0.2", "filelist": "^1.0.1", - "minimatch": "^3.0.4" + "minimatch": "^3.0.5" }, "dependencies": { "ansi-styles": { @@ -70657,6 +70645,14 @@ "core-js": "^3.0.4", "dotenv": "^8.0.0", "dotenv-expand": "^5.1.0" + }, + "dependencies": { + "dotenv": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.6.0.tgz", + "integrity": "sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==", + "dev": true + } } }, "leven": { @@ -72623,9 +72619,9 @@ "dev": true }, "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "requires": { "brace-expansion": "^1.1.7" } @@ -72982,7 +72978,7 @@ "integrity": "sha1-X1Zl2TNRM1yqvvjxxVRRbPXx5OU=", "dev": true, "requires": { - "minimatch": "^3.0.2" + "minimatch": "^3.0.5" } }, "node-fetch": { @@ -77618,7 +77614,7 @@ "integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==", "dev": true, "requires": { - "minimatch": "3.0.4" + "minimatch": "^3.0.5" } }, "redent": { @@ -81260,7 +81256,7 @@ "requires": { "@istanbuljs/schema": "^0.1.2", "glob": "^7.1.4", - "minimatch": "^3.0.4" + "minimatch": "^3.0.5" } }, "text-encoding-utf-8": { diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 9c35073c50227..b32f4fb232579 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -1,6 +1,6 @@ { "name": "airbyte-webapp", - "version": "0.40.18", + "version": "0.40.19", "private": true, "engines": { "node": ">=16.0.0" @@ -9,6 +9,8 @@ "prepare": "cd .. && husky install airbyte-webapp/.husky", "prestart": "npm run generate-client", "start": "craco start", + "prestart:cloud": "npm run generate-client", + "start:cloud": "AB_ENV=frontend-dev node -r ./scripts/environment.js ./node_modules/.bin/craco start", "prebuild": "npm run generate-client", "build": "BUILD_PATH='./build/app' craco build", "pretest": "npm run generate-client", @@ -110,6 +112,7 @@ "@types/unist": "^2.0.5", "@typescript-eslint/eslint-plugin": "^5.27.1", "@typescript-eslint/parser": "^5.27.1", + "dotenv": "^16.0.3", "eslint-config-prettier": "^8.5.0", "eslint-config-react-app": "^7.0.1", "eslint-plugin-css-modules": "^2.11.0", @@ -138,6 +141,9 @@ "ts-node": "^10.8.1", "typescript": "^4.7.3" }, + "overrides": { + "minimatch": "^3.0.5" + }, "lint-staged": { "src/**/*.{js,jsx,ts,tsx}": [ "eslint --fix" diff --git a/airbyte-webapp/public/images/external/dbt-bit_tm.png b/airbyte-webapp/public/images/external/dbt-bit_tm.png deleted file mode 100644 index b20774dced5ac..0000000000000 Binary files a/airbyte-webapp/public/images/external/dbt-bit_tm.png and /dev/null differ diff --git a/airbyte-webapp/public/index.html b/airbyte-webapp/public/index.html index 9d9d4611d7f2f..b04f783d9e6e1 100644 --- a/airbyte-webapp/public/index.html +++ b/airbyte-webapp/public/index.html @@ -17,7 +17,7 @@