diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml
index 2f28ca09d9081..5252aa52e4901 100644
--- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml
+++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml
@@ -668,3 +668,9 @@
dockerImageTag: 0.1.0
documentationUrl: https://docs.airbyte.io/integrations/sources/azure-table
sourceType: database
+- sourceDefinitionId: f1e4c7f6-db5c-4035-981f-d35ab4998794
+ name: Zenloop
+ dockerRepository: airbyte/source-zenloop
+ dockerImageTag: 0.1.0
+ documentationUrl: https://docs.airbyte.io/integrations/sources/zenloop
+ sourceType: api
diff --git a/airbyte-config/init/src/main/resources/seed/source_specs.yaml b/airbyte-config/init/src/main/resources/seed/source_specs.yaml
index 4f0063d8e2c79..1dde0aec94165 100644
--- a/airbyte-config/init/src/main/resources/seed/source_specs.yaml
+++ b/airbyte-config/init/src/main/resources/seed/source_specs.yaml
@@ -6486,3 +6486,39 @@
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
+- dockerImage: "airbyte/source-zenloop:0.1.0"
+ spec:
+ documentationUrl: "https://docsurl.com"
+ connectionSpecification:
+ $schema: "http://json-schema.org/draft-07/schema#"
+ title: "Zenloop Spec"
+ type: "object"
+ required:
+ - "api_token"
+ additionalProperties: false
+ properties:
+ api_token:
+ type: "string"
+ description: "Zenloop API Token. You can get the API token in settings page\
+ \ here "
+ airbyte_secret: true
+ date_from:
+ type: "string"
+ description: "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\
+ \ Leave empty if only data from current data should be synced"
+ examples:
+ - "2021-10-24T03:30:30Z"
+ survey_id:
+ type: "string"
+ description: "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys"
+ airbyte_secret: true
+ survey_group_id:
+ type: "string"
+ description: "Zenloop Survey Group ID. Can be found by pulling All Survey\
+ \ Groups via SurveyGroups stream. Leave empty to pull answers from all\
+ \ survey groups"
+ airbyte_secret: true
+ supportsNormalization: false
+ supportsDBT: false
+ supported_destination_sync_modes: []
diff --git a/airbyte-integrations/connectors/source-zenloop/.dockerignore b/airbyte-integrations/connectors/source-zenloop/.dockerignore
new file mode 100644
index 0000000000000..19bdf12d9d7a6
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/.dockerignore
@@ -0,0 +1,7 @@
+*
+!Dockerfile
+!Dockerfile.test
+!main.py
+!source_zenloop
+!setup.py
+!secrets
diff --git a/airbyte-integrations/connectors/source-zenloop/Dockerfile b/airbyte-integrations/connectors/source-zenloop/Dockerfile
new file mode 100644
index 0000000000000..67385332a6d82
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/Dockerfile
@@ -0,0 +1,38 @@
+FROM python:3.7.11-alpine3.14 as base
+
+# build and load all requirements
+FROM base as builder
+WORKDIR /airbyte/integration_code
+
+# upgrade pip to the latest version
+RUN apk --no-cache upgrade \
+ && pip install --upgrade pip \
+ && apk --no-cache add tzdata build-base
+
+
+COPY setup.py ./
+# install necessary packages to a temporary folder
+RUN pip install --prefix=/install .
+
+# build a clean environment
+FROM base
+WORKDIR /airbyte/integration_code
+
+# copy all loaded and built libraries to a pure basic image
+COPY --from=builder /install /usr/local
+# add default timezone settings
+COPY --from=builder /usr/share/zoneinfo/Etc/UTC /etc/localtime
+RUN echo "Etc/UTC" > /etc/timezone
+
+# bash is installed for more convenient debugging.
+RUN apk --no-cache add bash
+
+# copy payload code only
+COPY main.py ./
+COPY source_zenloop ./source_zenloop
+
+ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py"
+ENTRYPOINT ["python", "/airbyte/integration_code/main.py"]
+
+LABEL io.airbyte.version=0.1.0
+LABEL io.airbyte.name=airbyte/source-zenloop
diff --git a/airbyte-integrations/connectors/source-zenloop/README.md b/airbyte-integrations/connectors/source-zenloop/README.md
new file mode 100644
index 0000000000000..c93616b0c476f
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/README.md
@@ -0,0 +1,132 @@
+# Zenloop Source
+
+This is the repository for the Zenloop source connector, written in Python.
+For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/zenloop).
+
+## Local development
+
+### Prerequisites
+**To iterate on this connector, make sure to complete this prerequisites section.**
+
+#### Minimum Python version required `= 3.7.0`
+
+#### Build & Activate Virtual Environment and install dependencies
+From this connector directory, create a virtual environment:
+```
+python -m venv .venv
+```
+
+This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your
+development environment of choice. To activate it from the terminal, run:
+```
+source .venv/bin/activate
+pip install -r requirements.txt
+pip install '.[tests]'
+```
+If you are in an IDE, follow your IDE's instructions to activate the virtualenv.
+
+Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is
+used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`.
+If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything
+should work as you expect.
+
+#### Building via Gradle
+You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow.
+
+To build using Gradle, from the Airbyte repository root, run:
+```
+./gradlew :airbyte-integrations:connectors:source-zenloop:build
+```
+
+#### Create credentials
+**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/zenloop)
+to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_zenloop/spec.json` file.
+Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information.
+See `integration_tests/sample_config.json` for a sample config file.
+
+**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source zenloop test creds`
+and place them into `secrets/config.json`.
+
+### Locally running the connector
+```
+python main.py spec
+python main.py check --config secrets/config.json
+python main.py discover --config secrets/config.json
+python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json
+```
+
+### Locally running the connector docker image
+
+#### Build
+First, make sure you build the latest Docker image:
+```
+docker build . -t airbyte/source-zenloop:dev
+```
+
+You can also build the connector image via Gradle:
+```
+./gradlew :airbyte-integrations:connectors:source-zenloop:airbyteDocker
+```
+When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in
+the Dockerfile.
+
+#### Run
+Then run any of the connector commands as follows:
+```
+docker run --rm airbyte/source-zenloop:dev spec
+docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zenloop:dev check --config /secrets/config.json
+docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-zenloop:dev discover --config /secrets/config.json
+docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-zenloop:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json
+```
+## Testing
+Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named.
+First install test dependencies into your virtual environment:
+```
+pip install .[tests]
+```
+### Unit Tests
+To run unit tests locally, from the connector directory run:
+```
+python -m pytest unit_tests
+```
+
+### Integration Tests
+There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector).
+#### Custom Integration tests
+Place custom tests inside `integration_tests/` folder, then, from the connector root, run
+```
+python -m pytest integration_tests
+```
+#### Acceptance Tests
+Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference) for more information.
+If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py.
+To run your integration tests with acceptance tests, from the connector root, run
+```
+python -m pytest integration_tests -p integration_tests.acceptance
+```
+To run your integration tests with docker
+
+### Using gradle to run tests
+All commands should be run from airbyte project root.
+To run unit tests:
+```
+./gradlew :airbyte-integrations:connectors:source-zenloop:unitTest
+```
+To run acceptance and custom integration tests:
+```
+./gradlew :airbyte-integrations:connectors:source-zenloop:integrationTest
+```
+
+## Dependency Management
+All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development.
+We split dependencies between two groups, dependencies that are:
+* required for your connector to work need to go to `MAIN_REQUIREMENTS` list.
+* required for the testing need to go to `TEST_REQUIREMENTS` list
+
+### Publishing a new version of the connector
+You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what?
+1. Make sure your changes are passing unit and integration tests.
+1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)).
+1. Create a Pull Request.
+1. Pat yourself on the back for being an awesome contributor.
+1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master.
diff --git a/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml b/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml
new file mode 100644
index 0000000000000..bb7a5757b6d6c
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/acceptance-test-config.yml
@@ -0,0 +1,24 @@
+# See [Source Acceptance Tests](https://docs.airbyte.io/connector-development/testing-connectors/source-acceptance-tests-reference)
+# for more information about how to configure these tests
+connector_image: airbyte/source-zenloop:dev
+tests:
+ spec:
+ - spec_path: "source_zenloop/spec.json"
+ connection:
+ - config_path: "secrets/config.json"
+ status: "succeed"
+ - config_path: "integration_tests/invalid_config.json"
+ status: "failed"
+ discovery:
+ - config_path: "secrets/config.json"
+ basic_read:
+ - config_path: "secrets/config.json"
+ configured_catalog_path: "integration_tests/configured_catalog.json"
+ empty_streams: []
+ incremental:
+ - config_path: "secrets/config.json"
+ configured_catalog_path: "integration_tests/configured_catalog.json"
+ future_state_path: "integration_tests/abnormal_state.json"
+ full_refresh:
+ - config_path: "secrets/config.json"
+ configured_catalog_path: "integration_tests/configured_catalog.json"
diff --git a/airbyte-integrations/connectors/source-zenloop/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-zenloop/acceptance-test-docker.sh
new file mode 100644
index 0000000000000..e4d8b1cef8961
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/acceptance-test-docker.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env sh
+
+# Build latest connector image
+docker build . -t $(cat acceptance-test-config.yml | grep "connector_image" | head -n 1 | cut -d: -f2)
+
+# Pull latest acctest image
+docker pull airbyte/source-acceptance-test:latest
+
+# Run
+docker run --rm -it \
+ -v /var/run/docker.sock:/var/run/docker.sock \
+ -v /tmp:/tmp \
+ -v $(pwd):/test_input \
+ airbyte/source-acceptance-test \
+ --acceptance-test-config /test_input
+
diff --git a/airbyte-integrations/connectors/source-zenloop/bootstrap.md b/airbyte-integrations/connectors/source-zenloop/bootstrap.md
new file mode 100644
index 0000000000000..fd0f7097c8d8b
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/bootstrap.md
@@ -0,0 +1,19 @@
+# Zenloop
+
+## Overview
+
+Zenloop is an integrated experience management platform (IXM). The SaaS solution automatically collects customer feedback through various channels along the customer journey, analyzes and clusters it, and derives tailored measures to retain satisfied customers as well as win back churning customers. Zenloop REST API allows a developer to retrieve survey and answer information on the Zenloop platform.
+
+## Endpoints
+
+Zenloop API consists of four endpoints which can be extracted data from:
+
+1. **Surveys**: This endpoint is used for getting basic data for all surveys from user organization.
+2. **Answers**: This endpoint is used for fetching survey answers along with basic survey data and aggregated NPS scores.
+3. **Survey Groups**: This endpoint is used for getting basic data for all survey groups from user organization.
+4. **Survey Group Answers**: This endpoint is used for fetching survey group answers along with basic survey data and aggregated NPS scores.
+
+
+## API Reference
+
+The API reference documents: [https://docs.zenloop.com/reference](https://docs.zenloop.com/reference)
diff --git a/airbyte-integrations/connectors/source-zenloop/build.gradle b/airbyte-integrations/connectors/source-zenloop/build.gradle
new file mode 100644
index 0000000000000..637af6a16d64a
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/build.gradle
@@ -0,0 +1,14 @@
+plugins {
+ id 'airbyte-python'
+ id 'airbyte-docker'
+ id 'airbyte-source-acceptance-test'
+}
+
+airbytePython {
+ moduleDirectory 'source_zenloop'
+}
+
+dependencies {
+ implementation files(project(':airbyte-integrations:bases:source-acceptance-test').airbyteDocker.outputs)
+ implementation files(project(':airbyte-integrations:bases:base-python').airbyteDocker.outputs)
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/__init__.py b/airbyte-integrations/connectors/source-zenloop/integration_tests/__init__.py
new file mode 100644
index 0000000000000..46b7376756ec6
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/__init__.py
@@ -0,0 +1,3 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/abnormal_state.json
new file mode 100644
index 0000000000000..681b893354634
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/abnormal_state.json
@@ -0,0 +1,8 @@
+{
+ "answers": {
+ "inserted_at": "2099-08-18T08:35:49.540Z"
+ },
+ "answers_survey_group": {
+ "inserted_at": "2099-08-18T08:35:49.540Z"
+ }
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py
new file mode 100644
index 0000000000000..0347f2a0b143d
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/acceptance.py
@@ -0,0 +1,14 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+
+import pytest
+
+pytest_plugins = ("source_acceptance_test.plugin",)
+
+
+@pytest.fixture(scope="session", autouse=True)
+def connector_setup():
+ """This fixture is a placeholder for external resources that acceptance test might require."""
+ yield
diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/catalog.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/catalog.json
new file mode 100644
index 0000000000000..14c918c25287d
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/catalog.json
@@ -0,0 +1,30 @@
+{
+ "streams": [
+ {
+ "name": "answers",
+ "supported_sync_modes": ["full_refresh", "incremental"],
+ "source_defined_cursor": true,
+ "default_cursor_field": "test",
+ "json_schema": {}
+ },
+ {
+ "name": "surveys",
+ "supported_sync_modes": ["full_refresh"],
+ "source_defined_cursor": true,
+ "json_schema": {}
+ },
+ {
+ "name": "survey_groups",
+ "supported_sync_modes": ["full_refresh"],
+ "source_defined_cursor": true,
+ "json_schema": {}
+ },
+ {
+ "name": "answers_survey_group",
+ "supported_sync_modes": ["full_refresh", "incremental"],
+ "source_defined_cursor": true,
+ "default_cursor_field": "test",
+ "json_schema": {}
+ }
+ ]
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/configured_catalog.json
new file mode 100644
index 0000000000000..47fe90a48f233
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/configured_catalog.json
@@ -0,0 +1,40 @@
+{
+ "streams": [
+ {
+ "stream": {
+ "name": "answers",
+ "json_schema": {},
+ "supported_sync_modes": ["full_refresh", "incremental"]
+ },
+ "sync_mode": "incremental",
+ "destination_sync_mode": "append"
+ },
+ {
+ "stream": {
+ "name": "surveys",
+ "json_schema": {},
+ "supported_sync_modes": ["full_refresh"]
+ },
+ "sync_mode": "full_refresh",
+ "destination_sync_mode": "overwrite"
+ },
+ {
+ "stream": {
+ "name": "answers_survey_group",
+ "json_schema": {},
+ "supported_sync_modes": ["full_refresh", "incremental"]
+ },
+ "sync_mode": "incremental",
+ "destination_sync_mode": "append"
+ },
+ {
+ "stream": {
+ "name": "survey_groups",
+ "json_schema": {},
+ "supported_sync_modes": ["full_refresh"]
+ },
+ "sync_mode": "full_refresh",
+ "destination_sync_mode": "overwrite"
+ }
+ ]
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json
new file mode 100644
index 0000000000000..91f11b9b150a4
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/invalid_config.json
@@ -0,0 +1,6 @@
+{
+ "api_token": "wrong key",
+ "date_from": "2021-04-01T04:20:02Z",
+ "survey_id": "wrong key",
+ "survey_group_id": "wrong key"
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json
new file mode 100644
index 0000000000000..194d4b1404f45
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_config.json
@@ -0,0 +1,6 @@
+{
+ "api_token": "",
+ "date_from": "2021-01-01",
+ "survey_id": "",
+ "survey_group_id": ""
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_state.json
new file mode 100644
index 0000000000000..cbc8e3dfa4ee1
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/integration_tests/sample_state.json
@@ -0,0 +1,8 @@
+{
+ "answers": {
+ "inserted_at": "2021-08-18T08:35:49.540Z"
+ },
+ "answers_survey_group": {
+ "inserted_at": "2021-08-18T08:35:49.540Z"
+ }
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/main.py b/airbyte-integrations/connectors/source-zenloop/main.py
new file mode 100644
index 0000000000000..4a69290ac2023
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/main.py
@@ -0,0 +1,13 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+
+import sys
+
+from airbyte_cdk.entrypoint import launch
+from source_zenloop import SourceZenloop
+
+if __name__ == "__main__":
+ source = SourceZenloop()
+ launch(source, sys.argv[1:])
diff --git a/airbyte-integrations/connectors/source-zenloop/requirements.txt b/airbyte-integrations/connectors/source-zenloop/requirements.txt
new file mode 100644
index 0000000000000..0411042aa0911
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/requirements.txt
@@ -0,0 +1,2 @@
+-e ../../bases/source-acceptance-test
+-e .
diff --git a/airbyte-integrations/connectors/source-zenloop/sample_files/config.json b/airbyte-integrations/connectors/source-zenloop/sample_files/config.json
new file mode 100644
index 0000000000000..194d4b1404f45
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/sample_files/config.json
@@ -0,0 +1,6 @@
+{
+ "api_token": "",
+ "date_from": "2021-01-01",
+ "survey_id": "",
+ "survey_group_id": ""
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/setup.py b/airbyte-integrations/connectors/source-zenloop/setup.py
new file mode 100644
index 0000000000000..92509be18873d
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/setup.py
@@ -0,0 +1,30 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+
+from setuptools import find_packages, setup
+
+MAIN_REQUIREMENTS = [
+ "airbyte-cdk",
+]
+
+TEST_REQUIREMENTS = [
+ "pytest~=6.1",
+ "pytest-mock~=3.6.1",
+ "source-acceptance-test",
+ "responses~=0.13.3",
+]
+
+setup(
+ name="source_zenloop",
+ description="Source implementation for Zenloop.",
+ author="Alexander Batoulis",
+ author_email="alexander.batoulis@hometogo.com",
+ packages=find_packages(),
+ install_requires=MAIN_REQUIREMENTS,
+ package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]},
+ extras_require={
+ "tests": TEST_REQUIREMENTS,
+ },
+)
diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/__init__.py b/airbyte-integrations/connectors/source-zenloop/source_zenloop/__init__.py
new file mode 100644
index 0000000000000..222f86afc829f
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/__init__.py
@@ -0,0 +1,8 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+
+from .source import SourceZenloop
+
+__all__ = ["SourceZenloop"]
diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers.json
new file mode 100644
index 0000000000000..86c9fd63cee63
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers.json
@@ -0,0 +1,77 @@
+{
+ "type": ["null", "object"],
+ "properties": {
+ "id": {
+ "type": ["null", "string"]
+ },
+ "score_type": {
+ "type": ["null", "string"]
+ },
+ "score": {
+ "type": ["null", "number"]
+ },
+ "sentiment": {
+ "type": ["null", "string"]
+ },
+ "sentiment_per_label_name": {
+ "type": ["null", "object"]
+ },
+ "name": {
+ "type": ["null", "string"]
+ },
+ "recipient_id": {
+ "type": ["null", "string"]
+ },
+ "property_ids": {
+ "type": ["null", "array"]
+ },
+ "metatags": {
+ "type": ["null", "object"]
+ },
+ "labels": {
+ "type": ["null", "array"]
+ },
+ "labels_with_keywords": {
+ "type": ["null", "object"]
+ },
+ "inserted_at": {
+ "type": ["null", "string"],
+ "format": "date-time"
+ },
+ "email": {
+ "type": ["null", "string"]
+ },
+ "identity": {
+ "type": ["null", "string"]
+ },
+ "identity_type": {
+ "type": ["null", "string"]
+ },
+ "comment": {
+ "type": ["null", "string"]
+ },
+ "translated_comment": {
+ "type": ["null", "string"]
+ },
+ "additional_answers": {
+ "type": ["null", "array"],
+ "items": {
+ "properties": {
+ "additional_question_id": {
+ "type": ["null", "string"]
+ },
+ "answer": {
+ "type": ["null", "string"]
+ },
+ "inserted_at": {
+ "type": ["null", "string"],
+ "format": "date-time"
+ }
+ }
+ }
+ },
+ "additional_questions": {
+ "type": ["null", "object"]
+ }
+ }
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers_survey_group.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers_survey_group.json
new file mode 100644
index 0000000000000..b871dbe6ee145
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/answers_survey_group.json
@@ -0,0 +1,63 @@
+{
+ "type": ["null", "object"],
+ "properties": {
+ "id": {
+ "type": ["null", "string"]
+ },
+ "survey_public_hash_id": {
+ "type": ["null", "string"]
+ },
+ "score_type": {
+ "type": ["null", "string"]
+ },
+ "score": {
+ "type": ["null", "number"]
+ },
+ "sentiment": {
+ "type": ["null", "string"]
+ },
+ "sentiment_per_label_name": {
+ "type": ["null", "object"]
+ },
+ "name": {
+ "type": ["null", "string"]
+ },
+ "recipient_id": {
+ "type": ["null", "string"]
+ },
+ "property_ids": {
+ "type": ["null", "array"]
+ },
+ "metatags": {
+ "type": ["null", "object"]
+ },
+ "labels": {
+ "type": ["null", "array"]
+ },
+ "labels_with_keywords": {
+ "type": ["null", "object"]
+ },
+ "inserted_at": {
+ "type": ["null", "string"],
+ "format": "date-time"
+ },
+ "email": {
+ "type": ["null", "string"]
+ },
+ "identity": {
+ "type": ["null", "string"]
+ },
+ "identity_type": {
+ "type": ["null", "string"]
+ },
+ "comment": {
+ "type": ["null", "string"]
+ },
+ "translated_comment": {
+ "type": ["null", "string"]
+ },
+ "additional_questions": {
+ "type": ["null", "object"]
+ }
+ }
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/survey_groups.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/survey_groups.json
new file mode 100644
index 0000000000000..e22e0f5bf32f4
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/survey_groups.json
@@ -0,0 +1,35 @@
+{
+ "type": ["null", "object"],
+ "properties": {
+ "surveys": {
+ "type": ["null", "array"],
+ "items": {
+ "properties": {
+ "title": {
+ "type": ["null", "string"]
+ },
+ "status": {
+ "type": ["null", "string"]
+ },
+ "public_hash_id": {
+ "type": ["null", "string"]
+ },
+ "inserted_at": {
+ "type": ["null", "string"],
+ "format": "date-time"
+ }
+ }
+ }
+ },
+ "name": {
+ "type": ["null", "string"]
+ },
+ "public_hash_id": {
+ "type": ["null", "string"]
+ },
+ "inserted_at": {
+ "type": ["null", "string"],
+ "format": "date-time"
+ }
+ }
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/surveys.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/surveys.json
new file mode 100644
index 0000000000000..96e5988801b31
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/schemas/surveys.json
@@ -0,0 +1,18 @@
+{
+ "type": ["null", "object"],
+ "properties": {
+ "title": {
+ "type": ["null", "string"]
+ },
+ "status": {
+ "type": ["null", "string"]
+ },
+ "public_hash_id": {
+ "type": ["null", "string"]
+ },
+ "inserted_at": {
+ "type": ["null", "string"],
+ "format": "date-time"
+ }
+ }
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py
new file mode 100644
index 0000000000000..e9d5f4cf6741e
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/source.py
@@ -0,0 +1,231 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+
+import math
+from abc import ABC
+from datetime import datetime, timedelta
+from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple
+
+import requests
+from airbyte_cdk.sources import AbstractSource
+from airbyte_cdk.sources.streams import Stream
+from airbyte_cdk.sources.streams.http import HttpStream
+from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator
+
+
+class ZenloopStream(HttpStream, ABC):
+
+ url_base = "https://api.zenloop.com/v1/"
+ extra_params = None
+ has_date_param = False
+
+ def __init__(self, api_token: str, date_from: Optional[str], survey_id, survey_group_id: Optional[str], **kwargs):
+ super().__init__(authenticator=api_token)
+ self.api_token = api_token
+ self.date_from = date_from or datetime.today().strftime("%Y-%m-%d")
+ self.survey_id = survey_id or None
+ self.survey_group_id = survey_group_id or None
+
+ def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
+ decoded_response = response.json()
+ page = decoded_response["meta"]["page"]
+ per_page = decoded_response["meta"]["per_page"]
+ total = decoded_response["meta"]["total"]
+
+ if page < math.ceil(total / per_page):
+ return {"page": page + 1}
+ else:
+ return None
+
+ def request_params(
+ self,
+ stream_state: Mapping[str, Any],
+ stream_slice: Mapping[str, Any] = None,
+ next_page_token: Mapping[str, Any] = None,
+ ) -> MutableMapping[str, Any]:
+ if self.has_date_param:
+ params = {"date_from": self.date_from}
+ else:
+ params = {}
+ if self.extra_params:
+ params.update(self.extra_params)
+ if next_page_token:
+ params.update(**next_page_token)
+ return params
+
+ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
+ response_json = response.json()
+ yield response_json
+
+
+class ChildStreamMixin:
+
+ parent_stream_class: Optional[ZenloopStream] = None
+
+ def stream_slices(self, sync_mode, stream_state: Mapping[str, Any] = None, **kwargs) -> Iterable[Optional[Mapping[str, any]]]:
+ # determine if parent_stream_class is Surveys or SurveyGroups
+ if self.parent_stream_class.__name__ == "Surveys":
+ public_hash_id = self.survey_id
+ else:
+ public_hash_id = self.survey_group_id
+ # loop through all survey_id's if None was provided
+ # return nothing otherwise
+ if not public_hash_id:
+ for item in self.parent_stream_class(
+ api_token=self.api_token, date_from=self.date_from, survey_id=self.survey_id, survey_group_id=self.survey_group_id
+ ).read_records(sync_mode=sync_mode):
+ # set date_from to most current cursor_field or date_from if not incremental
+ if stream_state:
+ date_from = stream_state[self.cursor_field]
+ else:
+ date_from = self.date_from
+ yield {"survey_slice": item["public_hash_id"], "date_from": date_from}
+ else:
+ yield None
+
+
+class IncrementalZenloopStream(ZenloopStream, ABC):
+ # checkpoint stream reads after 1000 records.
+ state_checkpoint_interval = 1000
+ cursor_field = "inserted_at"
+
+ def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]:
+ # latest_record has objects in answers
+ if latest_record:
+ # add 1 second to not pull latest_record again
+ latest_record_date = (
+ datetime.strptime(latest_record[self.cursor_field], "%Y-%m-%dT%H:%M:%S.%fZ") + timedelta(seconds=1)
+ ).isoformat() + str("Z")
+ else:
+ latest_record_date = ""
+ max_record = max(latest_record_date, current_stream_state.get(self.cursor_field, ""))
+ return {self.cursor_field: max_record}
+
+ def request_params(
+ self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
+ ) -> MutableMapping[str, Any]:
+ params = super().request_params(stream_state, stream_slice, next_page_token)
+ if stream_state:
+ # if looped through all slices take its date_from parameter
+ # else no survey_id or survey_group_id provided -> take cursor_field
+ if stream_slice:
+ params["date_from"] = stream_slice["date_from"]
+ else:
+ params["date_from"] = stream_state[self.cursor_field]
+ return params
+
+
+class Surveys(ZenloopStream):
+ # API Doc: https://docs.zenloop.com/reference#get-list-of-surveys
+ primary_key = None
+ has_date_param = False
+ extra_params = {"page": "1"}
+ use_cache = True
+
+ def path(
+ self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
+ ) -> str:
+ return "surveys"
+
+ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
+ response_json = response.json()
+ yield from response_json.get("surveys", [])
+
+
+class Answers(ChildStreamMixin, IncrementalZenloopStream):
+ # API Doc: https://docs.zenloop.com/reference#get-answers
+ primary_key = "id"
+ has_date_param = True
+ parent_stream_class = Surveys
+ extra_params = {
+ "page": "1",
+ "order_type": "desc",
+ "order_by": "inserted_at",
+ "date_shortcut": "custom",
+ "date_to": datetime.today().strftime("%Y-%m-%d"),
+ }
+
+ def path(
+ self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
+ ) -> str:
+ # take optional survey_id if entered
+ if self.survey_id:
+ return f"surveys/{self.survey_id}/answers"
+ # slice all survey_id's if nothing provided
+ else:
+ return f"surveys/{stream_slice['survey_slice']}/answers"
+
+ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
+ response_json = response.json()
+ # select answers and surveys to be able to link answer to a survey
+ yield from response_json.get("answers", [])
+
+
+class SurveyGroups(ZenloopStream):
+ # API Doc: https://docs.zenloop.com/reference#get-list-of-survey-groups
+ primary_key = None
+ has_date_param = False
+ extra_params = {"page": "1"}
+ use_cache = True
+
+ def path(
+ self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
+ ) -> str:
+ return "survey_groups"
+
+ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
+ response_json = response.json()
+ yield from response_json.get("survey_groups", [])
+
+
+class AnswersSurveyGroup(ChildStreamMixin, IncrementalZenloopStream):
+ # API Doc: https://docs.zenloop.com/reference#get-answers-for-survey-group
+ primary_key = "id"
+ has_date_param = True
+ parent_stream_class = SurveyGroups
+ extra_params = {
+ "page": "1",
+ "order_type": "desc",
+ "order_by": "inserted_at",
+ "date_shortcut": "custom",
+ "date_to": datetime.today().strftime("%Y-%m-%d"),
+ }
+
+ def path(
+ self, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
+ ) -> str:
+ # take optional survey_group_id if entered
+ if self.survey_group_id:
+ return f"survey_groups/{self.survey_group_id}/answers"
+ # slice all survey_group_id's if nothing provided
+ else:
+ return f"survey_groups/{stream_slice['survey_slice']}/answers"
+
+ def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
+ response_json = response.json()
+ # select answers and surveys to be able to link answer to a survey
+ yield from response_json.get("answers", [])
+
+
+class SourceZenloop(AbstractSource):
+ def check_connection(self, logger, config) -> Tuple[bool, any]:
+ try:
+ authenticator = TokenAuthenticator(config["api_token"])
+ url = f"{ZenloopStream.url_base}surveys"
+
+ session = requests.get(url, headers=authenticator.get_auth_header())
+ session.raise_for_status()
+ return True, None
+ except Exception as error:
+ return False, f"Unable to connect to Zenloop API with the provided credentials - {error}"
+
+ def streams(self, config: Mapping[str, Any]) -> List[Stream]:
+ args = {
+ "api_token": TokenAuthenticator(token=config["api_token"]),
+ "date_from": config["date_from"],
+ "survey_id": config.get("survey_id"),
+ "survey_group_id": config.get("survey_group_id"),
+ }
+ return [Surveys(**args), Answers(**args), SurveyGroups(**args), AnswersSurveyGroup(**args)]
diff --git a/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json b/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json
new file mode 100644
index 0000000000000..cbf078676d325
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/source_zenloop/spec.json
@@ -0,0 +1,32 @@
+{
+ "documentationUrl": "https://docsurl.com",
+ "connectionSpecification": {
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Zenloop Spec",
+ "type": "object",
+ "required": ["api_token"],
+ "additionalProperties": false,
+ "properties": {
+ "api_token": {
+ "type": "string",
+ "description": "Zenloop API Token. You can get the API token in settings page here ",
+ "airbyte_secret": true
+ },
+ "date_from": {
+ "type": "string",
+ "description": "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24. Leave empty if only data from current data should be synced",
+ "examples": ["2021-10-24T03:30:30Z"]
+ },
+ "survey_id": {
+ "type": "string",
+ "description": "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys",
+ "airbyte_secret": true
+ },
+ "survey_group_id": {
+ "type": "string",
+ "description": "Zenloop Survey Group ID. Can be found by pulling All Survey Groups via SurveyGroups stream. Leave empty to pull answers from all survey groups",
+ "airbyte_secret": true
+ }
+ }
+ }
+}
diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/__init__.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/__init__.py
new file mode 100644
index 0000000000000..46b7376756ec6
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/__init__.py
@@ -0,0 +1,3 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py
new file mode 100644
index 0000000000000..9e213ecc565cd
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/conftest.py
@@ -0,0 +1,10 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+from pytest import fixture
+
+
+@fixture
+def config():
+ return {"api_token": "", "date_from": "2021-07-01", "survey_id": "", "survey_group_id": ""}
diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py
new file mode 100644
index 0000000000000..78b6d70b1d58d
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_incremental_streams.py
@@ -0,0 +1,101 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+
+from unittest.mock import MagicMock
+
+from airbyte_cdk.models import SyncMode
+from pytest import fixture
+from source_zenloop.source import Answers, AnswersSurveyGroup, IncrementalZenloopStream
+
+
+@fixture
+def patch_incremental_base_class(mocker):
+ # Mock abstract methods to enable instantiating abstract class
+ mocker.patch.object(IncrementalZenloopStream, "path", "v0/example_endpoint")
+ mocker.patch.object(IncrementalZenloopStream, "primary_key", "test_primary_key")
+ mocker.patch.object(IncrementalZenloopStream, "__abstractmethods__", set())
+
+
+def test_cursor_field(patch_incremental_base_class, config):
+ stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"])
+ expected_cursor_field = "inserted_at"
+ assert stream.cursor_field == expected_cursor_field
+
+
+def test_get_updated_state(patch_incremental_base_class, config):
+ stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"])
+ expected_cursor_field = "inserted_at"
+ inputs = {
+ "current_stream_state": {expected_cursor_field: "2021-07-24T03:30:30.038549Z"},
+ "latest_record": {"inserted_at": "2021-10-20T03:30:30.038549Z"},
+ }
+ expected_state = {expected_cursor_field: "2021-10-20T03:30:31.038549Z"}
+ assert stream.get_updated_state(**inputs) == expected_state
+
+
+def test_stream_slices(patch_incremental_base_class, config):
+ expected_cursor_field = "inserted_at"
+ inputs = {
+ "sync_mode": SyncMode.incremental,
+ "cursor_field": expected_cursor_field,
+ "stream_state": {expected_cursor_field: "2021-10-20T03:30:30Z"},
+ }
+ expected_stream_slice = [None]
+
+ stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"])
+ assert list(stream.stream_slices(**inputs)) == expected_stream_slice
+
+ stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], None)
+ assert list(stream.stream_slices(**inputs)) == expected_stream_slice
+
+ stream = IncrementalZenloopStream(config["api_token"], config["date_from"], None, config["survey_group_id"])
+ assert list(stream.stream_slices(**inputs)) == expected_stream_slice
+
+
+def test_supports_incremental(patch_incremental_base_class, mocker, config):
+ mocker.patch.object(IncrementalZenloopStream, "cursor_field", "dummy_field")
+ stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"])
+ assert stream.supports_incremental
+
+
+def test_source_defined_cursor(patch_incremental_base_class, config):
+ stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"])
+ assert stream.source_defined_cursor
+
+
+def test_stream_checkpoint_interval(patch_incremental_base_class, config):
+ stream = IncrementalZenloopStream(config["api_token"], config["date_from"], config["survey_id"], config["survey_group_id"])
+ expected_checkpoint_interval = 1000
+ assert stream.state_checkpoint_interval == expected_checkpoint_interval
+
+
+def test_parse_response_answers(patch_incremental_base_class, config):
+ stream = Answers(**config)
+ response = MagicMock()
+ response.json.return_value = {"answers": [{"id": 123, "name": "John Doe"}]}
+ inputs = {"response": response}
+ expected_parsed_object = {"id": 123, "name": "John Doe"}
+ assert next(stream.parse_response(**inputs)) == expected_parsed_object
+
+
+def test_parse_response_answers_survey_groups(patch_incremental_base_class, config):
+ stream = AnswersSurveyGroup(**config)
+ response = MagicMock()
+ response.json.return_value = {"answers": [{"id": 123, "name": "John Doe"}]}
+ inputs = {"response": response}
+ expected_parsed_object = {"id": 123, "name": "John Doe"}
+ assert next(stream.parse_response(**inputs)) == expected_parsed_object
+
+
+def test_surveys_path(config):
+ stream = Answers(**config)
+ expected = "surveys//answers"
+ assert stream.path() == expected
+
+
+def test_survey_groups_path(config):
+ stream = AnswersSurveyGroup(**config)
+ expected = "survey_groups//answers"
+ assert stream.path() == expected
diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py
new file mode 100644
index 0000000000000..019577d9208cd
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_source.py
@@ -0,0 +1,38 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+from unittest.mock import MagicMock
+
+import responses
+from source_zenloop.source import SourceZenloop
+
+
+@responses.activate
+def test_check_connection_success(mocker, config):
+ responses.add(
+ responses.GET,
+ "https://api.zenloop.com/v1/surveys",
+ )
+ source = SourceZenloop()
+ logger_mock = MagicMock()
+ assert source.check_connection(logger_mock, config) == (True, None)
+
+
+@responses.activate
+def test_check_connection_fail(mocker, config):
+ responses.add(responses.GET, "https://api.zenloop.com/v1/surveys", json={"error": "Unauthorized"}, status=401)
+ source = SourceZenloop()
+ logger_mock = MagicMock()
+ assert source.check_connection(logger_mock, config) == (
+ False,
+ "Unable to connect to Zenloop API with the provided credentials - 401 Client Error: Unauthorized for url: https://api.zenloop.com/v1/surveys",
+ )
+
+
+def test_streams(mocker):
+ source = SourceZenloop()
+ config_mock = MagicMock()
+ streams = source.streams(config_mock)
+ expected_streams_number = 4
+ assert len(streams) == expected_streams_number
diff --git a/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py
new file mode 100644
index 0000000000000..07765274b448f
--- /dev/null
+++ b/airbyte-integrations/connectors/source-zenloop/unit_tests/test_streams.py
@@ -0,0 +1,107 @@
+#
+# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
+#
+
+from http import HTTPStatus
+from unittest.mock import MagicMock
+
+import pytest
+from source_zenloop.source import SurveyGroups, Surveys, ZenloopStream
+
+
+@pytest.fixture
+def patch_base_class(mocker):
+ # Mock abstract methods to enable instantiating abstract class
+ mocker.patch.object(ZenloopStream, "path", "v0/example_endpoint")
+ mocker.patch.object(ZenloopStream, "primary_key", "test_primary_key")
+ mocker.patch.object(ZenloopStream, "__abstractmethods__", set())
+
+
+def test_request_params(patch_base_class, config):
+ stream = ZenloopStream(**config)
+ inputs = {"stream_slice": None, "stream_state": None, "next_page_token": {"page": "1"}}
+ expected_params = {"page": "1"}
+ assert stream.request_params(**inputs) == expected_params
+
+
+def test_next_page_token(patch_base_class, config):
+ stream = ZenloopStream(**config)
+ inputs = {"response": MagicMock()}
+ inputs["response"].json.return_value = {"meta": {"page": 1, "per_page": 12, "total": 8}}
+ expected_token = None
+ assert stream.next_page_token(**inputs) == expected_token
+
+
+def test_parse_response(patch_base_class, config):
+ stream = ZenloopStream(**config)
+ response = MagicMock()
+ response.json.return_value = {"answers": [{"id": 123, "name": "John Doe"}]}
+ inputs = {"response": response}
+ expected_parsed_object = {"answers": [{"id": 123, "name": "John Doe"}]}
+ assert next(stream.parse_response(**inputs)) == expected_parsed_object
+
+
+def test_parse_response_surveys(patch_base_class, config):
+ stream = Surveys(**config)
+ response = MagicMock()
+ response.json.return_value = {"surveys": [{"id": 123, "name": "John Doe"}]}
+ inputs = {"response": response}
+ expected_parsed_object = {"id": 123, "name": "John Doe"}
+ assert next(stream.parse_response(**inputs)) == expected_parsed_object
+
+
+def test_parse_response_survey_groups(patch_base_class, config):
+ stream = SurveyGroups(**config)
+ response = MagicMock()
+ response.json.return_value = {"survey_groups": [{"id": 123, "name": "John Doe"}]}
+ inputs = {"response": response}
+ expected_parsed_object = {"id": 123, "name": "John Doe"}
+ assert next(stream.parse_response(**inputs)) == expected_parsed_object
+
+
+def test_surveys_path(config):
+ stream = Surveys(**config)
+ expected = "surveys"
+ assert stream.path() == expected
+
+
+def test_survey_groups_path(config):
+ stream = SurveyGroups(**config)
+ expected = "survey_groups"
+ assert stream.path() == expected
+
+
+def test_request_headers(patch_base_class, config):
+ stream = ZenloopStream(**config)
+ inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None}
+ expected_headers = {}
+ assert stream.request_headers(**inputs) == expected_headers
+
+
+def test_http_method(patch_base_class, config):
+ stream = ZenloopStream(**config)
+ expected_method = "GET"
+ assert stream.http_method == expected_method
+
+
+@pytest.mark.parametrize(
+ ("http_status", "should_retry"),
+ [
+ (HTTPStatus.OK, False),
+ (HTTPStatus.BAD_REQUEST, False),
+ (HTTPStatus.TOO_MANY_REQUESTS, True),
+ (HTTPStatus.INTERNAL_SERVER_ERROR, True),
+ ],
+)
+def test_should_retry(patch_base_class, config, http_status, should_retry):
+ response_mock = MagicMock()
+ response_mock.status_code = http_status
+ stream = ZenloopStream(**config)
+ assert stream.should_retry(response_mock) == should_retry
+
+
+def test_backoff_time(patch_base_class, config):
+ response_mock = MagicMock()
+ stream = ZenloopStream(**config)
+ expected_backoff_time = None
+ assert stream.backoff_time(response_mock) == expected_backoff_time
diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md
index 297e7b986e5fe..1ed8b71ba23f0 100644
--- a/docs/SUMMARY.md
+++ b/docs/SUMMARY.md
@@ -150,6 +150,7 @@
* [Zendesk Sunshine](integrations/sources/zendesk-sunshine.md)
* [Zendesk Support](integrations/sources/zendesk-support.md)
* [Zendesk Talk](integrations/sources/zendesk-talk.md)
+ * [Zenloop](integrations/sources/zenloop.md)
* [Zoom](integrations/sources/zoom.md)
* [Zuora](integrations/sources/zuora.md)
* [Destinations](integrations/destinations/README.md)
diff --git a/docs/integrations/README.md b/docs/integrations/README.md
index 6fd2c1f128ef9..150f4b5106be5 100644
--- a/docs/integrations/README.md
+++ b/docs/integrations/README.md
@@ -125,6 +125,7 @@ Airbyte uses a grading system for connectors to help users understand what to ex
| [Zendesk Sunshine](sources/zendesk-sunshine.md) | Beta |
| [Zendesk Support](sources/zendesk-support.md) | Certified |
| [Zendesk Talk](sources/zendesk-talk.md) | Certified |
+| [Zenloop](sources/zenloop.md)| Alpha |
| [Zoom](sources/zoom.md) | Beta |
| [Zuora](sources/zuora.md) | Beta |
diff --git a/docs/integrations/sources/zenloop.md b/docs/integrations/sources/zenloop.md
new file mode 100644
index 0000000000000..5722246868464
--- /dev/null
+++ b/docs/integrations/sources/zenloop.md
@@ -0,0 +1,55 @@
+# Zenloop
+
+## Sync overview
+
+This source can sync data for the [Zenloop API](https://docs.zenloop.com/reference). It supports both Full Refresh and Incremental syncs for Answer endpoints. You can choose if this connector will copy only the new or updated data, or all rows in the tables and columns you set up for replication, every time a sync is run.
+
+### Output schema
+
+This Source is capable of syncing the following core Streams:
+
+* [Answers](https://docs.zenloop.com/reference#get-answers) \(Incremental\)
+* [Surveys](https://docs.zenloop.com/reference#get-list-of-surveys)
+* [AnswersSurveyGroup](https://docs.zenloop.com/reference#get-answers-for-survey-group) \(Incremental\)
+* [SurveyGroups](https://docs.zenloop.com/reference#get-list-of-survey-groups)
+
+The `Answers` and `AnswersSurveyGroup` stream respectively have an optional survey_id parameter that can be set by filling the `public_hash_id` field of the connector configuration. If not provided answers for all surveys (groups) will be pulled.
+
+### Data type mapping
+
+| Integration Type | Airbyte Type | Notes |
+| :--- | :--- | :--- |
+| `string` | `string` | |
+| `integer` | `integer` | |
+| `number` | `number` | |
+| `array` | `array` | |
+| `object` | `object` | |
+
+### Features
+
+| Feature | Supported?\(Yes/No\) | Notes |
+| :--- | :--- | :--- |
+| Full Refresh Sync | Yes | |
+| Incremental Sync | Yes | |
+| Namespaces | No | |
+
+### Performance considerations
+
+The Zenloop connector should not run into Zenloop API limitations under normal usage. Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see any rate limit issues that are not automatically retried successfully.
+
+## Getting started
+
+### Requirements
+
+* Zenloop account
+* Zenloop API token
+
+### Setup guide
+
+Please register on Zenloop and retrieve your API token [here](https://app.zenloop.com/settings/api).
+
+## Changelog
+
+| Version | Date | Pull Request | Subject |
+| :--- | :--- | :--- | :--- |
+| 0.1.0 | 2021-10-26 | [7380](https://github.com/airbytehq/airbyte/pull/7380) | Initial Release |
diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh
index 472816124e584..c71a3d1ca32b9 100755
--- a/tools/bin/ci_credentials.sh
+++ b/tools/bin/ci_credentials.sh
@@ -293,6 +293,7 @@ read_secrets source-zendesk-sunshine "$ZENDESK_SUNSHINE_TEST_CREDS"
read_secrets source-zendesk-support "$ZENDESK_SUPPORT_TEST_CREDS"
read_secrets source-zendesk-support "$ZENDESK_SUPPORT_OAUTH_TEST_CREDS" "config_oauth.json"
read_secrets source-zendesk-talk "$ZENDESK_TALK_TEST_CREDS"
+read_secrets source-zenloop "$SOURCE_ZENLOOP_TEST_CREDS"
read_secrets source-zoom-singer "$ZOOM_INTEGRATION_TEST_CREDS"
read_secrets source-zuora "$SOURCE_ZUORA_TEST_CREDS"