Merge branch 'master' into devin/1765913587-source-woocommerce-rename-integration-to-mock-server
This commit is contained in:
@@ -139,8 +139,8 @@ runs:
|
||||
CONNECTOR_VERSION_TAG="${{ inputs.tag-override }}"
|
||||
echo "🏷 Using provided tag override: $CONNECTOR_VERSION_TAG"
|
||||
elif [[ "${{ inputs.release-type }}" == "pre-release" ]]; then
|
||||
hash=$(git rev-parse --short=10 HEAD)
|
||||
CONNECTOR_VERSION_TAG="${CONNECTOR_VERSION}-dev.${hash}"
|
||||
hash=$(git rev-parse --short=7 HEAD)
|
||||
CONNECTOR_VERSION_TAG="${CONNECTOR_VERSION}-preview.${hash}"
|
||||
echo "🏷 Using pre-release tag: $CONNECTOR_VERSION_TAG"
|
||||
else
|
||||
CONNECTOR_VERSION_TAG="$CONNECTOR_VERSION"
|
||||
|
||||
2
.github/pr-welcome-community.md
vendored
2
.github/pr-welcome-community.md
vendored
@@ -21,7 +21,7 @@ As needed or by request, Airbyte Maintainers can execute the following slash com
|
||||
- `/run-live-tests` - Runs live tests for the modified connector(s).
|
||||
- `/run-regression-tests` - Runs regression tests for the modified connector(s).
|
||||
- `/build-connector-images` - Builds and publishes a pre-release docker image for the modified connector(s).
|
||||
- `/publish-connectors-prerelease` - Publishes pre-release connector builds (tagged as `{version}-dev.{git-sha}`) for all modified connectors in the PR.
|
||||
- `/publish-connectors-prerelease` - Publishes pre-release connector builds (tagged as `{version}-preview.{git-sha}`) for all modified connectors in the PR.
|
||||
|
||||
If you have any questions, feel free to ask in the PR comments or join our [Slack community](https://airbytehq.slack.com/).
|
||||
|
||||
|
||||
2
.github/pr-welcome-internal.md
vendored
2
.github/pr-welcome-internal.md
vendored
@@ -28,7 +28,7 @@ Airbyte Maintainers (that's you!) can execute the following slash commands on yo
|
||||
- `/run-live-tests` - Runs live tests for the modified connector(s).
|
||||
- `/run-regression-tests` - Runs regression tests for the modified connector(s).
|
||||
- `/build-connector-images` - Builds and publishes a pre-release docker image for the modified connector(s).
|
||||
- `/publish-connectors-prerelease` - Publishes pre-release connector builds (tagged as `{version}-dev.{git-sha}`) for all modified connectors in the PR.
|
||||
- `/publish-connectors-prerelease` - Publishes pre-release connector builds (tagged as `{version}-preview.{git-sha}`) for all modified connectors in the PR.
|
||||
- Connector release lifecycle (AI-powered):
|
||||
- `/ai-prove-fix` - Runs prerelease readiness checks, including testing against customer connections.
|
||||
- `/ai-canary-prerelease` - Rolls out prerelease to 5-10 connections for canary testing.
|
||||
|
||||
@@ -3,7 +3,7 @@ name: Publish Connectors Pre-release
|
||||
# It can be triggered via the /publish-connectors-prerelease slash command from PR comments,
|
||||
# or via the MCP tool `publish_connector_to_airbyte_registry`.
|
||||
#
|
||||
# Pre-release versions are tagged with the format: {version}-dev.{10-char-git-sha}
|
||||
# Pre-release versions are tagged with the format: {version}-preview.{7-char-git-sha}
|
||||
# These versions are NOT eligible for semver auto-advancement but ARE available
|
||||
# for version pinning via the scoped_configuration API.
|
||||
#
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- name: Get short SHA
|
||||
id: get-sha
|
||||
run: |
|
||||
SHORT_SHA=$(git rev-parse --short=10 HEAD)
|
||||
SHORT_SHA=$(git rev-parse --short=7 HEAD)
|
||||
echo "short-sha=$SHORT_SHA" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Get job variables
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
> Publishing pre-release build for connector `${{ steps.resolve-connector.outputs.connector-name }}`.
|
||||
> Branch: `${{ inputs.gitref }}`
|
||||
>
|
||||
> Pre-release versions will be tagged as `{version}-dev.${{ steps.get-sha.outputs.short-sha }}`
|
||||
> Pre-release versions will be tagged as `{version}-preview.${{ steps.get-sha.outputs.short-sha }}`
|
||||
> and are available for version pinning via the scoped_configuration API.
|
||||
>
|
||||
> [View workflow run](${{ steps.job-vars.outputs.run-url }})
|
||||
|
||||
4
.github/workflows/publish_connectors.yml
vendored
4
.github/workflows/publish_connectors.yml
vendored
@@ -305,8 +305,8 @@ jobs:
|
||||
echo "connector-version=$(poe -qq get-version)" | tee -a $GITHUB_OUTPUT
|
||||
CONNECTOR_VERSION=$(poe -qq get-version)
|
||||
if [[ "${{ inputs.release-type }}" == "pre-release" ]]; then
|
||||
hash=$(git rev-parse --short=10 HEAD)
|
||||
echo "docker-image-tag=${CONNECTOR_VERSION}-dev.${hash}" | tee -a $GITHUB_OUTPUT
|
||||
hash=$(git rev-parse --short=7 HEAD)
|
||||
echo "docker-image-tag=${CONNECTOR_VERSION}-preview.${hash}" | tee -a $GITHUB_OUTPUT
|
||||
echo "release-type-flag=--pre-release" | tee -a $GITHUB_OUTPUT
|
||||
else
|
||||
echo "docker-image-tag=${CONNECTOR_VERSION}" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
@@ -75,7 +75,7 @@ This will copy the specified connector version to your development bucket. This
|
||||
_💡 Note: A prerequisite is you have [gsutil](https://cloud.google.com/storage/docs/gsutil) installed and have run `gsutil auth login`_
|
||||
|
||||
```bash
|
||||
TARGET_BUCKET=<YOUR-DEV_BUCKET> CONNECTOR="airbyte/source-stripe" VERSION="3.17.0-dev.ea013c8741" poetry run poe copy-connector-from-prod
|
||||
TARGET_BUCKET=<YOUR-DEV_BUCKET> CONNECTOR="airbyte/source-stripe" VERSION="3.17.0-preview.ea013c8" poetry run poe copy-connector-from-prod
|
||||
```
|
||||
|
||||
### Promote Connector Version to Latest
|
||||
@@ -87,5 +87,5 @@ _💡 Note: A prerequisite is you have [gsutil](https://cloud.google.com/storage
|
||||
_⚠️ Warning: Its important to know that this will remove ANY existing files in the latest folder that are not in the versioned folder as it calls `gsutil rsync` with `-d` enabled._
|
||||
|
||||
```bash
|
||||
TARGET_BUCKET=<YOUR-DEV_BUCKET> CONNECTOR="airbyte/source-stripe" VERSION="3.17.0-dev.ea013c8741" poetry run poe promote-connector-to-latest
|
||||
TARGET_BUCKET=<YOUR-DEV_BUCKET> CONNECTOR="airbyte/source-stripe" VERSION="3.17.0-preview.ea013c8" poetry run poe promote-connector-to-latest
|
||||
```
|
||||
|
||||
@@ -434,7 +434,7 @@ def generate_and_persist_registry_entry(
|
||||
bucket_name (str): The name of the GCS bucket.
|
||||
repo_metadata_file_path (pathlib.Path): The path to the spec file.
|
||||
registry_type (str): The registry type.
|
||||
docker_image_tag (str): The docker image tag associated with this release. Typically a semver string (e.g. '1.2.3'), possibly with a suffix (e.g. '1.2.3-dev.abcde12345')
|
||||
docker_image_tag (str): The docker image tag associated with this release. Typically a semver string (e.g. '1.2.3'), possibly with a suffix (e.g. '1.2.3-preview.abcde12')
|
||||
is_prerelease (bool): Whether this is a prerelease, or a main release.
|
||||
"""
|
||||
# Read the repo metadata dict to bootstrap ourselves. We need the docker repository,
|
||||
@@ -444,7 +444,7 @@ def generate_and_persist_registry_entry(
|
||||
|
||||
try:
|
||||
# Now that we have the docker repo, read the appropriate versioned metadata from GCS.
|
||||
# This metadata will differ in a few fields (e.g. in prerelease mode, dockerImageTag will contain the actual prerelease tag `1.2.3-dev.abcde12345`),
|
||||
# This metadata will differ in a few fields (e.g. in prerelease mode, dockerImageTag will contain the actual prerelease tag `1.2.3-preview.abcde12`),
|
||||
# so we'll treat this as the source of truth (ish. See below for how we handle the registryOverrides field.)
|
||||
gcs_client = get_gcs_storage_client(gcs_creds=os.environ.get("GCS_CREDENTIALS"))
|
||||
bucket = gcs_client.bucket(bucket_name)
|
||||
@@ -533,7 +533,9 @@ def generate_and_persist_registry_entry(
|
||||
|
||||
# For latest versions that are disabled, delete any existing registry entry to remove it from the registry
|
||||
if (
|
||||
"-rc" not in metadata_dict["data"]["dockerImageTag"] and "-dev" not in metadata_dict["data"]["dockerImageTag"]
|
||||
"-rc" not in metadata_dict["data"]["dockerImageTag"]
|
||||
and "-dev" not in metadata_dict["data"]["dockerImageTag"]
|
||||
and "-preview" not in metadata_dict["data"]["dockerImageTag"]
|
||||
) and not metadata_dict["data"]["registryOverrides"][registry_type]["enabled"]:
|
||||
logger.info(
|
||||
f"{registry_type} is not enabled: deleting existing {registry_type} registry entry for {metadata_dict['data']['dockerRepository']} at latest path."
|
||||
|
||||
@@ -5,7 +5,7 @@ data:
|
||||
connectorType: source
|
||||
dockerRepository: airbyte/image-exists-1
|
||||
githubIssueLabel: source-alloydb-strict-encrypt
|
||||
dockerImageTag: 2.0.0-dev.cf3628ccf3
|
||||
dockerImageTag: 2.0.0-preview.cf3628c
|
||||
documentationUrl: https://docs.airbyte.com/integrations/sources/existingsource
|
||||
connectorSubtype: database
|
||||
releaseStage: generally_available
|
||||
|
||||
@@ -231,7 +231,7 @@ def test_upload_prerelease(mocker, valid_metadata_yaml_files, tmp_path):
|
||||
mocker.patch.object(commands.click, "secho")
|
||||
mocker.patch.object(commands, "upload_metadata_to_gcs")
|
||||
|
||||
prerelease_tag = "0.3.0-dev.6d33165120"
|
||||
prerelease_tag = "0.3.0-preview.6d33165"
|
||||
bucket = "my-bucket"
|
||||
metadata_file_path = valid_metadata_yaml_files[0]
|
||||
validator_opts = ValidatorOptions(docs_path=str(tmp_path), prerelease_tag=prerelease_tag)
|
||||
|
||||
@@ -582,7 +582,7 @@ def test_upload_metadata_to_gcs_invalid_docker_images(mocker, invalid_metadata_u
|
||||
def test_upload_metadata_to_gcs_with_prerelease(mocker, valid_metadata_upload_files, tmp_path):
|
||||
mocker.spy(gcs_upload, "_file_upload")
|
||||
mocker.spy(gcs_upload, "upload_file_if_changed")
|
||||
prerelease_image_tag = "1.5.6-dev.f80318f754"
|
||||
prerelease_image_tag = "1.5.6-preview.f80318f"
|
||||
|
||||
for valid_metadata_upload_file in valid_metadata_upload_files:
|
||||
tmp_metadata_file_path = tmp_path / "metadata.yaml"
|
||||
@@ -701,7 +701,7 @@ def test_upload_metadata_to_gcs_release_candidate(mocker, get_fixture_path, tmp_
|
||||
)
|
||||
assert metadata.data.releases.rolloutConfiguration.enableProgressiveRollout
|
||||
|
||||
prerelease_tag = "1.5.6-dev.f80318f754" if prerelease else None
|
||||
prerelease_tag = "1.5.6-preview.f80318f" if prerelease else None
|
||||
|
||||
upload_info = gcs_upload.upload_metadata_to_gcs(
|
||||
"my_bucket",
|
||||
|
||||
@@ -110,14 +110,14 @@ class PublishConnectorContext(ConnectorContext):
|
||||
|
||||
@property
|
||||
def pre_release_suffix(self) -> str:
|
||||
return self.git_revision[:10]
|
||||
return self.git_revision[:7]
|
||||
|
||||
@property
|
||||
def docker_image_tag(self) -> str:
|
||||
# get the docker image tag from the parent class
|
||||
metadata_tag = super().docker_image_tag
|
||||
if self.pre_release:
|
||||
return f"{metadata_tag}-dev.{self.pre_release_suffix}"
|
||||
return f"{metadata_tag}-preview.{self.pre_release_suffix}"
|
||||
else:
|
||||
return metadata_tag
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ from pipelines.helpers.utils import raise_if_not_user
|
||||
from pipelines.models.steps import STEP_PARAMS, Step, StepResult
|
||||
|
||||
# Pin the PyAirbyte version to avoid updates from breaking CI
|
||||
PYAIRBYTE_VERSION = "0.20.2"
|
||||
PYAIRBYTE_VERSION = "0.35.1"
|
||||
|
||||
|
||||
class PytestStep(Step, ABC):
|
||||
|
||||
@@ -156,7 +156,8 @@ class TestPyAirbyteValidationTests:
|
||||
result = await PyAirbyteValidation(context_for_valid_connector)._run(mocker.MagicMock())
|
||||
assert isinstance(result, StepResult)
|
||||
assert result.status == StepStatus.SUCCESS
|
||||
assert "Getting `spec` output from connector..." in result.stdout
|
||||
# Verify the connector name appears in output (stable across PyAirbyte versions)
|
||||
assert context_for_valid_connector.connector.technical_name in (result.stdout + result.stderr)
|
||||
|
||||
async def test__run_validation_skip_unpublished_connector(
|
||||
self,
|
||||
|
||||
@@ -2,7 +2,7 @@ data:
|
||||
connectorSubtype: database
|
||||
connectorType: destination
|
||||
definitionId: ce0d828e-1dc4-496c-b122-2da42e637e48
|
||||
dockerImageTag: 2.1.16
|
||||
dockerImageTag: 2.1.18
|
||||
dockerRepository: airbyte/destination-clickhouse
|
||||
githubIssueLabel: destination-clickhouse
|
||||
icon: clickhouse.svg
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.integrations.destination.clickhouse.config
|
||||
|
||||
import io.airbyte.cdk.load.command.DestinationStream
|
||||
import io.airbyte.cdk.load.data.Transformations.Companion.toAlphanumericAndUnderscore
|
||||
import io.airbyte.cdk.load.schema.model.TableName
|
||||
import io.airbyte.cdk.load.table.ColumnNameGenerator
|
||||
import io.airbyte.cdk.load.table.FinalTableNameGenerator
|
||||
import io.airbyte.integrations.destination.clickhouse.spec.ClickhouseConfiguration
|
||||
import jakarta.inject.Singleton
|
||||
import java.util.Locale
|
||||
import java.util.UUID
|
||||
|
||||
@Singleton
|
||||
class ClickhouseFinalTableNameGenerator(private val config: ClickhouseConfiguration) :
|
||||
FinalTableNameGenerator {
|
||||
override fun getTableName(streamDescriptor: DestinationStream.Descriptor) =
|
||||
TableName(
|
||||
namespace =
|
||||
(streamDescriptor.namespace ?: config.resolvedDatabase)
|
||||
.toClickHouseCompatibleName(),
|
||||
name = streamDescriptor.name.toClickHouseCompatibleName(),
|
||||
)
|
||||
}
|
||||
|
||||
@Singleton
|
||||
class ClickhouseColumnNameGenerator : ColumnNameGenerator {
|
||||
override fun getColumnName(column: String): ColumnNameGenerator.ColumnName {
|
||||
return ColumnNameGenerator.ColumnName(
|
||||
column.toClickHouseCompatibleName(),
|
||||
column.lowercase(Locale.getDefault()).toClickHouseCompatibleName(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms a string to be compatible with ClickHouse table and column names.
|
||||
*
|
||||
* @return The transformed string suitable for ClickHouse identifiers.
|
||||
*/
|
||||
fun String.toClickHouseCompatibleName(): String {
|
||||
// 1. Replace any character that is not a letter,
|
||||
// a digit (0-9), or an underscore (_) with a single underscore.
|
||||
var transformed = toAlphanumericAndUnderscore(this)
|
||||
|
||||
// 2. Ensure the identifier does not start with a digit.
|
||||
// If it starts with a digit, prepend an underscore.
|
||||
if (transformed.isNotEmpty() && transformed[0].isDigit()) {
|
||||
transformed = "_$transformed"
|
||||
}
|
||||
|
||||
// 3.Do not allow empty strings.
|
||||
if (transformed.isEmpty()) {
|
||||
return "default_name_${UUID.randomUUID()}" // A fallback name if the input results in an
|
||||
// empty string
|
||||
}
|
||||
|
||||
return transformed
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.integrations.destination.clickhouse.schema
|
||||
|
||||
import io.airbyte.cdk.load.data.Transformations.Companion.toAlphanumericAndUnderscore
|
||||
import java.util.UUID
|
||||
|
||||
/**
|
||||
* Transforms a string to be compatible with ClickHouse table and column names.
|
||||
*
|
||||
* @return The transformed string suitable for ClickHouse identifiers.
|
||||
*/
|
||||
fun String.toClickHouseCompatibleName(): String {
|
||||
// 1. Replace any character that is not a letter,
|
||||
// a digit (0-9), or an underscore (_) with a single underscore.
|
||||
var transformed = toAlphanumericAndUnderscore(this)
|
||||
|
||||
// 2.Do not allow empty strings.
|
||||
if (transformed.isEmpty()) {
|
||||
return "default_name_${UUID.randomUUID()}" // A fallback name if the input results in an
|
||||
// empty string
|
||||
}
|
||||
|
||||
// 3. Ensure the identifier does not start with a digit.
|
||||
// If it starts with a digit, prepend an underscore.
|
||||
if (transformed[0].isDigit()) {
|
||||
transformed = "_$transformed"
|
||||
}
|
||||
|
||||
return transformed
|
||||
}
|
||||
@@ -30,7 +30,6 @@ import io.airbyte.cdk.load.schema.model.TableName
|
||||
import io.airbyte.cdk.load.table.TempTableNameGenerator
|
||||
import io.airbyte.integrations.destination.clickhouse.client.ClickhouseSqlTypes
|
||||
import io.airbyte.integrations.destination.clickhouse.client.isValidVersionColumn
|
||||
import io.airbyte.integrations.destination.clickhouse.config.toClickHouseCompatibleName
|
||||
import io.airbyte.integrations.destination.clickhouse.spec.ClickhouseConfiguration
|
||||
import jakarta.inject.Singleton
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import io.airbyte.cdk.load.data.TimestampWithTimezoneValue
|
||||
import io.airbyte.cdk.load.data.TimestampWithoutTimezoneValue
|
||||
import io.airbyte.cdk.load.test.util.ExpectedRecordMapper
|
||||
import io.airbyte.cdk.load.test.util.OutputRecord
|
||||
import io.airbyte.integrations.destination.clickhouse.config.toClickHouseCompatibleName
|
||||
import io.airbyte.integrations.destination.clickhouse.schema.toClickHouseCompatibleName
|
||||
import java.math.RoundingMode
|
||||
import java.time.LocalTime
|
||||
import java.time.ZoneOffset
|
||||
|
||||
@@ -30,8 +30,8 @@ import io.airbyte.cdk.load.write.UnknownTypesBehavior
|
||||
import io.airbyte.integrations.destination.clickhouse.ClickhouseConfigUpdater
|
||||
import io.airbyte.integrations.destination.clickhouse.ClickhouseContainerHelper
|
||||
import io.airbyte.integrations.destination.clickhouse.Utils
|
||||
import io.airbyte.integrations.destination.clickhouse.config.toClickHouseCompatibleName
|
||||
import io.airbyte.integrations.destination.clickhouse.fixtures.ClickhouseExpectedRecordMapper
|
||||
import io.airbyte.integrations.destination.clickhouse.schema.toClickHouseCompatibleName
|
||||
import io.airbyte.integrations.destination.clickhouse.spec.ClickhouseConfiguration
|
||||
import io.airbyte.integrations.destination.clickhouse.spec.ClickhouseConfigurationFactory
|
||||
import io.airbyte.integrations.destination.clickhouse.spec.ClickhouseSpecificationOss
|
||||
|
||||
@@ -21,7 +21,6 @@ import io.airbyte.cdk.load.schema.model.StreamTableSchema
|
||||
import io.airbyte.cdk.load.schema.model.TableName
|
||||
import io.airbyte.cdk.load.table.ColumnNameMapping
|
||||
import io.airbyte.cdk.load.table.TempTableNameGenerator
|
||||
import io.airbyte.integrations.destination.clickhouse.config.ClickhouseFinalTableNameGenerator
|
||||
import io.mockk.coEvery
|
||||
import io.mockk.coVerify
|
||||
import io.mockk.coVerifyOrder
|
||||
@@ -39,8 +38,6 @@ class ClickhouseAirbyteClientTest {
|
||||
// Mocks
|
||||
private val client: ClickHouseClientRaw = mockk(relaxed = true)
|
||||
private val clickhouseSqlGenerator: ClickhouseSqlGenerator = mockk(relaxed = true)
|
||||
private val clickhouseFinalTableNameGenerator: ClickhouseFinalTableNameGenerator =
|
||||
mockk(relaxed = true)
|
||||
private val tempTableNameGenerator: TempTableNameGenerator = mockk(relaxed = true)
|
||||
|
||||
// Client
|
||||
@@ -105,7 +102,6 @@ class ClickhouseAirbyteClientTest {
|
||||
alterTableStatement
|
||||
coEvery { clickhouseAirbyteClient.execute(alterTableStatement) } returns
|
||||
mockk(relaxed = true)
|
||||
every { clickhouseFinalTableNameGenerator.getTableName(any()) } returns mockTableName
|
||||
|
||||
mockCHSchemaWithAirbyteColumns()
|
||||
|
||||
@@ -172,7 +168,6 @@ class ClickhouseAirbyteClientTest {
|
||||
|
||||
coEvery { clickhouseAirbyteClient.execute(any()) } returns mockk(relaxed = true)
|
||||
every { tempTableNameGenerator.generate(any()) } returns tempTableName
|
||||
every { clickhouseFinalTableNameGenerator.getTableName(any()) } returns finalTableName
|
||||
|
||||
mockCHSchemaWithAirbyteColumns()
|
||||
|
||||
@@ -226,8 +221,6 @@ class ClickhouseAirbyteClientTest {
|
||||
fun `test ensure schema matches fails if no airbyte columns`() = runTest {
|
||||
val finalTableName = TableName("fin", "al")
|
||||
|
||||
every { clickhouseFinalTableNameGenerator.getTableName(any()) } returns finalTableName
|
||||
|
||||
val columnMapping = ColumnNameMapping(mapOf())
|
||||
val stream =
|
||||
mockk<DestinationStream> {
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
* Copyright (c) 2025 Airbyte, Inc., all rights reserved.
|
||||
*/
|
||||
|
||||
package io.airbyte.integrations.destination.clickhouse.config
|
||||
package io.airbyte.integrations.destination.clickhouse.schema
|
||||
|
||||
import java.util.UUID
|
||||
import org.junit.jupiter.api.Assertions
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class ClickhouseNameGeneratorTest {
|
||||
class ClickhouseNamingUtilsTest {
|
||||
@Test
|
||||
fun `toClickHouseCompatibleName replaces special characters with underscores`() {
|
||||
Assertions.assertEquals("hello_world", "hello world".toClickHouseCompatibleName())
|
||||
@@ -11,7 +11,7 @@ data:
|
||||
connectorSubtype: api
|
||||
connectorType: source
|
||||
definitionId: 253487c0-2246-43ba-a21f-5116b20a2c50
|
||||
dockerImageTag: 4.1.3
|
||||
dockerImageTag: 4.1.4-rc.1
|
||||
dockerRepository: airbyte/source-google-ads
|
||||
documentationUrl: https://docs.airbyte.com/integrations/sources/google-ads
|
||||
externalDocumentationUrls:
|
||||
@@ -37,7 +37,7 @@ data:
|
||||
releaseStage: generally_available
|
||||
releases:
|
||||
rolloutConfiguration:
|
||||
enableProgressiveRollout: false
|
||||
enableProgressiveRollout: true
|
||||
breakingChanges:
|
||||
1.0.0:
|
||||
message: This release introduces fixes to custom query schema creation. Users should refresh the source schema and reset affected streams after upgrading to ensure uninterrupted syncs.
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
|
||||
[[package]]
|
||||
name = "airbyte-cdk"
|
||||
version = "7.4.5"
|
||||
version = "7.5.1.post3.dev19705070276"
|
||||
description = "A framework for writing Airbyte Connectors."
|
||||
optional = false
|
||||
python-versions = "<3.14,>=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "airbyte_cdk-7.4.5-py3-none-any.whl", hash = "sha256:91694c099744b966dc8ba8468317c7ff553cd64cc777cf19981d58808350c87b"},
|
||||
{file = "airbyte_cdk-7.4.5.tar.gz", hash = "sha256:100ed9f5d7ba5ba4d0d95e93d838ae9569a0d747686979399868cf1f7c2c7d9c"},
|
||||
{file = "airbyte_cdk-7.5.1.post3.dev19705070276-py3-none-any.whl", hash = "sha256:842c405e7be07ed4ad608c00c3abd96ed550b47e9faf5be3c036d7f16ec30679"},
|
||||
{file = "airbyte_cdk-7.5.1.post3.dev19705070276.tar.gz", hash = "sha256:c524ffa077ec1d2863336696fbe7fb2c3a3cffe8d5bbc2a4079310ff0105d0d7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -159,14 +159,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cachetools"
|
||||
version = "6.2.1"
|
||||
version = "6.2.2"
|
||||
description = "Extensible memoizing collections and decorators"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cachetools-6.2.1-py3-none-any.whl", hash = "sha256:09868944b6dde876dfd44e1d47e18484541eaf12f26f29b7af91b26cc892d701"},
|
||||
{file = "cachetools-6.2.1.tar.gz", hash = "sha256:3f391e4bd8f8bf0931169baf7456cc822705f4e2a31f840d218f445b9a854201"},
|
||||
{file = "cachetools-6.2.2-py3-none-any.whl", hash = "sha256:6c09c98183bf58560c97b2abfcedcbaf6a896a490f534b031b661d3723b45ace"},
|
||||
{file = "cachetools-6.2.2.tar.gz", hash = "sha256:8e6d266b25e539df852251cfd6f990b4bc3a141db73b939058d809ebd2590fc6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -431,14 +431,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.3.0"
|
||||
version = "8.3.1"
|
||||
description = "Composable command line interface toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"},
|
||||
{file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"},
|
||||
{file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"},
|
||||
{file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -569,15 +569,15 @@ packaging = ">=20.9"
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.3.0"
|
||||
version = "1.3.1"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main", "dev"]
|
||||
markers = "python_version == \"3.10\""
|
||||
files = [
|
||||
{file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"},
|
||||
{file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"},
|
||||
{file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"},
|
||||
{file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1217,87 +1217,87 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.3.4"
|
||||
version = "2.3.5"
|
||||
description = "Fundamental package for array computing in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.11"
|
||||
groups = ["main"]
|
||||
markers = "python_version == \"3.11\""
|
||||
files = [
|
||||
{file = "numpy-2.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e78aecd2800b32e8347ce49316d3eaf04aed849cd5b38e0af39f829a4e59f5eb"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fd09cc5d65bda1e79432859c40978010622112e9194e581e3415a3eccc7f43f"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1b219560ae2c1de48ead517d085bc2d05b9433f8e49d0955c82e8cd37bd7bf36"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:bafa7d87d4c99752d07815ed7a2c0964f8ab311eb8168f41b910bd01d15b6032"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36dc13af226aeab72b7abad501d370d606326a0029b9f435eacb3b8c94b8a8b7"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7b2f9a18b5ff9824a6af80de4f37f4ec3c2aab05ef08f51c77a093f5b89adda"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9984bd645a8db6ca15d850ff996856d8762c51a2239225288f08f9050ca240a0"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:64c5825affc76942973a70acf438a8ab618dbd692b84cd5ec40a0a0509edc09a"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-win32.whl", hash = "sha256:ed759bf7a70342f7817d88376eb7142fab9fef8320d6019ef87fae05a99874e1"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:faba246fb30ea2a526c2e9645f61612341de1a83fb1e0c5edf4ddda5a9c10996"},
|
||||
{file = "numpy-2.3.4-cp311-cp311-win_arm64.whl", hash = "sha256:4c01835e718bcebe80394fd0ac66c07cbb90147ebbdad3dcecd3f25de2ae7e2c"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32"},
|
||||
{file = "numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c090d4860032b857d94144d1a9976b8e36709e40386db289aaf6672de2a81966"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a13fc473b6db0be619e45f11f9e81260f7302f8d180c49a22b6e6120022596b3"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:3634093d0b428e6c32c3a69b78e554f0cd20ee420dcad5a9f3b2a63762ce4197"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:043885b4f7e6e232d7df4f51ffdef8c36320ee9d5f227b380ea636722c7ed12e"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ee6a571d1e4f0ea6d5f22d6e5fbd6ed1dc2b18542848e1e7301bd190500c9d7"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fc8a63918b04b8571789688b2780ab2b4a33ab44bfe8ccea36d3eba51228c953"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:40cc556d5abbc54aabe2b1ae287042d7bdb80c08edede19f0c0afb36ae586f37"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ecb63014bb7f4ce653f8be7f1df8cbc6093a5a2811211770f6606cc92b5a78fd"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-win32.whl", hash = "sha256:e8370eb6925bb8c1c4264fec52b0384b44f675f191df91cbe0140ec9f0955646"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-win_amd64.whl", hash = "sha256:56209416e81a7893036eea03abcb91c130643eb14233b2515c90dcac963fe99d"},
|
||||
{file = "numpy-2.3.4-cp313-cp313-win_arm64.whl", hash = "sha256:a700a4031bc0fd6936e78a752eefb79092cecad2599ea9c8039c548bc097f9bc"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:86966db35c4040fdca64f0816a1c1dd8dbd027d90fca5a57e00e1ca4cd41b879"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:838f045478638b26c375ee96ea89464d38428c69170360b23a1a50fa4baa3562"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d7315ed1dab0286adca467377c8381cd748f3dc92235f22a7dfc42745644a96a"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:84f01a4d18b2cc4ade1814a08e5f3c907b079c847051d720fad15ce37aa930b6"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:817e719a868f0dacde4abdfc5c1910b301877970195db9ab6a5e2c4bd5b121f7"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85e071da78d92a214212cacea81c6da557cab307f2c34b5f85b628e94803f9c0"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2ec646892819370cf3558f518797f16597b4e4669894a2ba712caccc9da53f1f"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:035796aaaddfe2f9664b9a9372f089cfc88bd795a67bd1bfe15e6e770934cf64"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-win32.whl", hash = "sha256:fea80f4f4cf83b54c3a051f2f727870ee51e22f0248d3114b8e755d160b38cfb"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-win_amd64.whl", hash = "sha256:15eea9f306b98e0be91eb344a94c0e630689ef302e10c2ce5f7e11905c704f9c"},
|
||||
{file = "numpy-2.3.4-cp313-cp313t-win_arm64.whl", hash = "sha256:b6c231c9c2fadbae4011ca5e7e83e12dc4a5072f1a1d85a0a7b3ed754d145a40"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:81c3e6d8c97295a7360d367f9f8553973651b76907988bb6066376bc2252f24e"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7c26b0b2bf58009ed1f38a641f3db4be8d960a417ca96d14e5b06df1506d41ff"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:62b2198c438058a20b6704351b35a1d7db881812d8512d67a69c9de1f18ca05f"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:9d729d60f8d53a7361707f4b68a9663c968882dd4f09e0d58c044c8bf5faee7b"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bd0c630cf256b0a7fd9d0a11c9413b42fef5101219ce6ed5a09624f5a65392c7"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d5e081bc082825f8b139f9e9fe42942cb4054524598aaeb177ff476cc76d09d2"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:15fb27364ed84114438fff8aaf998c9e19adbeba08c0b75409f8c452a8692c52"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:85d9fb2d8cd998c84d13a79a09cc0c1091648e848e4e6249b0ccd7f6b487fa26"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-win32.whl", hash = "sha256:e73d63fd04e3a9d6bc187f5455d81abfad05660b212c8804bf3b407e984cd2bc"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-win_amd64.whl", hash = "sha256:3da3491cee49cf16157e70f607c03a217ea6647b1cea4819c4f48e53d49139b9"},
|
||||
{file = "numpy-2.3.4-cp314-cp314-win_arm64.whl", hash = "sha256:6d9cd732068e8288dbe2717177320723ccec4fb064123f0caf9bbd90ab5be868"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:22758999b256b595cf0b1d102b133bb61866ba5ceecf15f759623b64c020c9ec"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9cb177bc55b010b19798dc5497d540dea67fd13a8d9e882b2dae71de0cf09eb3"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0f2bcc76f1e05e5ab58893407c63d90b2029908fa41f9f1cc51eecce936c3365"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:8dc20bde86802df2ed8397a08d793da0ad7a5fd4ea3ac85d757bf5dd4ad7c252"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e199c087e2aa71c8f9ce1cb7a8e10677dc12457e7cc1be4798632da37c3e86e"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85597b2d25ddf655495e2363fe044b0ae999b75bc4d630dc0d886484b03a5eb0"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04a69abe45b49c5955923cf2c407843d1c85013b424ae8a560bba16c92fe44a0"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e1708fac43ef8b419c975926ce1eaf793b0c13b7356cfab6ab0dc34c0a02ac0f"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-win32.whl", hash = "sha256:863e3b5f4d9915aaf1b8ec79ae560ad21f0b8d5e3adc31e73126491bb86dee1d"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-win_amd64.whl", hash = "sha256:962064de37b9aef801d33bc579690f8bfe6c5e70e29b61783f60bcba838a14d6"},
|
||||
{file = "numpy-2.3.4-cp314-cp314t-win_arm64.whl", hash = "sha256:8b5a9a39c45d852b62693d9b3f3e0fe052541f804296ff401a72a1b60edafb29"},
|
||||
{file = "numpy-2.3.4-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6e274603039f924c0fe5cb73438fa9246699c78a6df1bd3decef9ae592ae1c05"},
|
||||
{file = "numpy-2.3.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d149aee5c72176d9ddbc6803aef9c0f6d2ceeea7626574fc68518da5476fa346"},
|
||||
{file = "numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:6d34ed9db9e6395bb6cd33286035f73a59b058169733a9db9f85e650b88df37e"},
|
||||
{file = "numpy-2.3.4-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:fdebe771ca06bb8d6abce84e51dca9f7921fe6ad34a0c914541b063e9a68928b"},
|
||||
{file = "numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e92defe6c08211eb77902253b14fe5b480ebc5112bc741fd5e9cd0608f847"},
|
||||
{file = "numpy-2.3.4-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13b9062e4f5c7ee5c7e5be96f29ba71bc5a37fed3d1d77c37390ae00724d296d"},
|
||||
{file = "numpy-2.3.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:81b3a59793523e552c4a96109dde028aa4448ae06ccac5a76ff6532a85558a7f"},
|
||||
{file = "numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de5672f4a7b200c15a4127042170a694d4df43c992948f5e1af57f0174beed10"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:acfd89508504a19ed06ef963ad544ec6664518c863436306153e13e94605c218"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:ffe22d2b05504f786c867c8395de703937f934272eb67586817b46188b4ded6d"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:872a5cf366aec6bb1147336480fef14c9164b154aeb6542327de4970282cd2f5"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3095bdb8dd297e5920b010e96134ed91d852d81d490e787beca7e35ae1d89cf7"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cba086a43d54ca804ce711b2a940b16e452807acebe7852ff327f1ecd49b0d4"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6cf9b429b21df6b99f4dee7a1218b8b7ffbbe7df8764dc0bd60ce8a0708fed1e"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:396084a36abdb603546b119d96528c2f6263921c50df3c8fd7cb28873a237748"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-win32.whl", hash = "sha256:b0c7088a73aef3d687c4deef8452a3ac7c1be4e29ed8bf3b366c8111128ac60c"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-win_amd64.whl", hash = "sha256:a414504bef8945eae5f2d7cb7be2d4af77c5d1cb5e20b296c2c25b61dff2900c"},
|
||||
{file = "numpy-2.3.5-cp311-cp311-win_arm64.whl", hash = "sha256:0cd00b7b36e35398fa2d16af7b907b65304ef8bb4817a550e06e5012929830fa"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa"},
|
||||
{file = "numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b"},
|
||||
{file = "numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234"},
|
||||
{file = "numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:bf06bc2af43fa8d32d30fae16ad965663e966b1a3202ed407b84c989c3221e82"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:052e8c42e0c49d2575621c158934920524f6c5da05a1d3b9bab5d8e259e045f0"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:1ed1ec893cff7040a02c8aa1c8611b94d395590d553f6b53629a4461dc7f7b63"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:2dcd0808a421a482a080f89859a18beb0b3d1e905b81e617a188bd80422d62e9"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:727fd05b57df37dc0bcf1a27767a3d9a78cbbc92822445f32cc3436ba797337b"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fffe29a1ef00883599d1dc2c51aa2e5d80afe49523c261a74933df395c15c520"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f7f0e05112916223d3f438f293abf0727e1181b5983f413dfa2fefc4098245c"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2e2eb32ddb9ccb817d620ac1d8dae7c3f641c1e5f55f531a33e8ab97960a75b8"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-win32.whl", hash = "sha256:66f85ce62c70b843bab1fb14a05d5737741e74e28c7b8b5a064de10142fad248"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-win_amd64.whl", hash = "sha256:e6a0bc88393d65807d751a614207b7129a310ca4fe76a74e5c7da5fa5671417e"},
|
||||
{file = "numpy-2.3.5-cp314-cp314-win_arm64.whl", hash = "sha256:aeffcab3d4b43712bb7a60b65f6044d444e75e563ff6180af8f98dd4b905dfd2"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:17531366a2e3a9e30762c000f2c43a9aaa05728712e25c11ce1dbe700c53ad41"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d21644de1b609825ede2f48be98dfde4656aefc713654eeee280e37cadc4e0ad"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:c804e3a5aba5460c73955c955bdbd5c08c354954e9270a2c1565f62e866bdc39"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:cc0a57f895b96ec78969c34f682c602bf8da1a0270b09bc65673df2e7638ec20"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:900218e456384ea676e24ea6a0417f030a3b07306d29d7ad843957b40a9d8d52"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a1bea522b25109bf8e6f3027bd810f7c1085c64a0c7ce050c1676ad0ba010b"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04822c00b5fd0323c8166d66c701dc31b7fbd252c100acd708c48f763968d6a3"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6889ec4ec662a1a37eb4b4fb26b6100841804dac55bd9df579e326cdc146227"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf"},
|
||||
{file = "numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42"},
|
||||
{file = "numpy-2.3.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f0963b55cdd70fad460fa4c1341f12f976bb26cb66021a5580329bd498988310"},
|
||||
{file = "numpy-2.3.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f4255143f5160d0de972d28c8f9665d882b5f61309d8362fdd3e103cf7bf010c"},
|
||||
{file = "numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:a4b9159734b326535f4dd01d947f919c6eefd2d9827466a696c44ced82dfbc18"},
|
||||
{file = "numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2feae0d2c91d46e59fcd62784a3a83b3fb677fead592ce51b5a6fbb4f95965ff"},
|
||||
{file = "numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ffac52f28a7849ad7576293c0cb7b9f08304e8f7d738a8cb8a90ec4c55a998eb"},
|
||||
{file = "numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63c0e9e7eea69588479ebf4a8a270d5ac22763cc5854e9a7eae952a3908103f7"},
|
||||
{file = "numpy-2.3.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f16417ec91f12f814b10bafe79ef77e70113a2f5f7018640e7425ff979253425"},
|
||||
{file = "numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2407,127 +2407,127 @@ docs = ["markdown-include (>=0.8.1)", "mike (>=2.1.3)", "mkdocs-github-admonitio
|
||||
|
||||
[[package]]
|
||||
name = "rpds-py"
|
||||
version = "0.28.0"
|
||||
version = "0.29.0"
|
||||
description = "Python bindings to Rust's persistent data structures (rpds)"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7b6013db815417eeb56b2d9d7324e64fcd4fa289caeee6e7a78b2e11fc9b438a"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a4c6b05c685c0c03f80dabaeb73e74218c49deea965ca63f76a752807397207"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4794c6c3fbe8f9ac87699b131a1f26e7b4abcf6d828da46a3a52648c7930eba"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e8456b6ee5527112ff2354dd9087b030e3429e43a74f480d4a5ca79d269fd85"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:beb880a9ca0a117415f241f66d56025c02037f7c4efc6fe59b5b8454f1eaa50d"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6897bebb118c44b38c9cb62a178e09f1593c949391b9a1a6fe777ccab5934ee7"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b553dd06e875249fd43efd727785efb57a53180e0fde321468222eabbeaafa"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:f0b2044fdddeea5b05df832e50d2a06fe61023acb44d76978e1b060206a8a476"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05cf1e74900e8da73fa08cc76c74a03345e5a3e37691d07cfe2092d7d8e27b04"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:efd489fec7c311dae25e94fe7eeda4b3d06be71c68f2cf2e8ef990ffcd2cd7e8"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ada7754a10faacd4f26067e62de52d6af93b6d9542f0df73c57b9771eb3ba9c4"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c2a34fd26588949e1e7977cfcbb17a9a42c948c100cab890c6d8d823f0586457"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-win32.whl", hash = "sha256:f9174471d6920cbc5e82a7822de8dfd4dcea86eb828b04fc8c6519a77b0ee51e"},
|
||||
{file = "rpds_py-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:6e32dd207e2c4f8475257a3540ab8a93eff997abfa0a3fdb287cae0d6cd874b8"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:03065002fd2e287725d95fbc69688e0c6daf6c6314ba38bdbaa3895418e09296"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28ea02215f262b6d078daec0b45344c89e161eab9526b0d898221d96fdda5f27"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dbade8fbf30bcc551cb352376c0ad64b067e4fc56f90e22ba70c3ce205988c"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c03002f54cc855860bfdc3442928ffdca9081e73b5b382ed0b9e8efe6e5e205"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9699fa7990368b22032baf2b2dce1f634388e4ffc03dfefaaac79f4695edc95"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9b06fe1a75e05e0713f06ea0c89ecb6452210fd60e2f1b6ddc1067b990e08d9"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9f83e7b326a3f9ec3ef84cda98fb0a74c7159f33e692032233046e7fd15da2"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:0d3259ea9ad8743a75a43eb7819324cdab393263c91be86e2d1901ee65c314e0"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a7548b345f66f6695943b4ef6afe33ccd3f1b638bd9afd0f730dd255c249c9e"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9a40040aa388b037eb39416710fbcce9443498d2eaab0b9b45ae988b53f5c67"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f60c7ea34e78c199acd0d3cda37a99be2c861dd2b8cf67399784f70c9f8e57d"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1571ae4292649100d743b26d5f9c63503bb1fedf538a8f29a98dce2d5ba6b4e6"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-win32.whl", hash = "sha256:5cfa9af45e7c1140af7321fa0bef25b386ee9faa8928c80dc3a5360971a29e8c"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd8d86b5d29d1b74100982424ba53e56033dc47720a6de9ba0259cf81d7cecaa"},
|
||||
{file = "rpds_py-0.28.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e27d3a5709cc2b3e013bf93679a849213c79ae0573f9b894b284b55e729e120"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6b4f28583a4f247ff60cd7bdda83db8c3f5b05a7a82ff20dd4b078571747708f"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d678e91b610c29c4b3d52a2c148b641df2b4676ffe47c59f6388d58b99cdc424"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e819e0e37a44a78e1383bf1970076e2ccc4dc8c2bbaa2f9bd1dc987e9afff628"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5ee514e0f0523db5d3fb171f397c54875dbbd69760a414dccf9d4d7ad628b5bd"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3fa06d27fdcee47f07a39e02862da0100cb4982508f5ead53ec533cd5fe55e"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46959ef2e64f9e4a41fc89aa20dbca2b85531f9a72c21099a3360f35d10b0d5a"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8455933b4bcd6e83fde3fefc987a023389c4b13f9a58c8d23e4b3f6d13f78c84"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:ad50614a02c8c2962feebe6012b52f9802deec4263946cddea37aaf28dd25a66"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5deca01b271492553fdb6c7fd974659dce736a15bae5dad7ab8b93555bceb28"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:735f8495a13159ce6a0d533f01e8674cec0c57038c920495f87dcb20b3ddb48a"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:961ca621ff10d198bbe6ba4957decca61aa2a0c56695384c1d6b79bf61436df5"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2374e16cc9131022e7d9a8f8d65d261d9ba55048c78f3b6e017971a4f5e6353c"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-win32.whl", hash = "sha256:d15431e334fba488b081d47f30f091e5d03c18527c325386091f31718952fe08"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-win_amd64.whl", hash = "sha256:a410542d61fc54710f750d3764380b53bf09e8c4edbf2f9141a82aa774a04f7c"},
|
||||
{file = "rpds_py-0.28.0-cp312-cp312-win_arm64.whl", hash = "sha256:1f0cfd1c69e2d14f8c892b893997fa9a60d890a0c8a603e88dca4955f26d1edd"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e9e184408a0297086f880556b6168fa927d677716f83d3472ea333b42171ee3b"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:edd267266a9b0448f33dc465a97cfc5d467594b600fe28e7fa2f36450e03053a"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85beb8b3f45e4e32f6802fb6cd6b17f615ef6c6a52f265371fb916fae02814aa"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d2412be8d00a1b895f8ad827cc2116455196e20ed994bb704bf138fe91a42724"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf128350d384b777da0e68796afdcebc2e9f63f0e9f242217754e647f6d32491"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2036d09b363aa36695d1cc1a97b36865597f4478470b0697b5ee9403f4fe399"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8e1e9be4fa6305a16be628959188e4fd5cd6f1b0e724d63c6d8b2a8adf74ea6"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0a403460c9dd91a7f23fc3188de6d8977f1d9603a351d5db6cf20aaea95b538d"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d7366b6553cdc805abcc512b849a519167db8f5e5c3472010cd1228b224265cb"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b43c6a3726efd50f18d8120ec0551241c38785b68952d240c45ea553912ac41"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0cb7203c7bc69d7c1585ebb33a2e6074492d2fc21ad28a7b9d40457ac2a51ab7"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7a52a5169c664dfb495882adc75c304ae1d50df552fbd68e100fdc719dee4ff9"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-win32.whl", hash = "sha256:2e42456917b6687215b3e606ab46aa6bca040c77af7df9a08a6dcfe8a4d10ca5"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-win_amd64.whl", hash = "sha256:e0a0311caedc8069d68fc2bf4c9019b58a2d5ce3cd7cb656c845f1615b577e1e"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313-win_arm64.whl", hash = "sha256:04c1b207ab8b581108801528d59ad80aa83bb170b35b0ddffb29c20e411acdc1"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f296ea3054e11fc58ad42e850e8b75c62d9a93a9f981ad04b2e5ae7d2186ff9c"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5a7306c19b19005ad98468fcefeb7100b19c79fc23a5f24a12e06d91181193fa"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5d9b86aa501fed9862a443c5c3116f6ead8bc9296185f369277c42542bd646b"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e5bbc701eff140ba0e872691d573b3d5d30059ea26e5785acba9132d10c8c31d"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5690671cd672a45aa8616d7374fdf334a1b9c04a0cac3c854b1136e92374fe"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f1d92ecea4fa12f978a367c32a5375a1982834649cdb96539dcdc12e609ab1a"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d252db6b1a78d0a3928b6190156042d54c93660ce4d98290d7b16b5296fb7cc"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d61b355c3275acb825f8777d6c4505f42b5007e357af500939d4a35b19177259"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:acbe5e8b1026c0c580d0321c8aae4b0a1e1676861d48d6e8c6586625055b606a"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8aa23b6f0fc59b85b4c7d89ba2965af274346f738e8d9fc2455763602e62fd5f"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7b14b0c680286958817c22d76fcbca4800ddacef6f678f3a7c79a1fe7067fe37"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bcf1d210dfee61a6c86551d67ee1031899c0fdbae88b2d44a569995d43797712"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-win32.whl", hash = "sha256:3aa4dc0fdab4a7029ac63959a3ccf4ed605fee048ba67ce89ca3168da34a1342"},
|
||||
{file = "rpds_py-0.28.0-cp313-cp313t-win_amd64.whl", hash = "sha256:7b7d9d83c942855e4fdcfa75d4f96f6b9e272d42fffcb72cd4bb2577db2e2907"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:dcdcb890b3ada98a03f9f2bb108489cdc7580176cb73b4f2d789e9a1dac1d472"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f274f56a926ba2dc02976ca5b11c32855cbd5925534e57cfe1fda64e04d1add2"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fe0438ac4a29a520ea94c8c7f1754cdd8feb1bc490dfda1bfd990072363d527"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a358a32dd3ae50e933347889b6af9a1bdf207ba5d1a3f34e1a38cd3540e6733"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e80848a71c78aa328fefaba9c244d588a342c8e03bda518447b624ea64d1ff56"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f586db2e209d54fe177e58e0bc4946bea5fb0102f150b1b2f13de03e1f0976f8"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae8ee156d6b586e4292491e885d41483136ab994e719a13458055bec14cf370"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:a805e9b3973f7e27f7cab63a6b4f61d90f2e5557cff73b6e97cd5b8540276d3d"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5d3fd16b6dc89c73a4da0b4ac8b12a7ecc75b2864b95c9e5afed8003cb50a728"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6796079e5d24fdaba6d49bda28e2c47347e89834678f2bc2c1b4fc1489c0fb01"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:76500820c2af232435cbe215e3324c75b950a027134e044423f59f5b9a1ba515"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bbdc5640900a7dbf9dd707fe6388972f5bbd883633eb68b76591044cfe346f7e"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-win32.whl", hash = "sha256:adc8aa88486857d2b35d75f0640b949759f79dc105f50aa2c27816b2e0dd749f"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-win_amd64.whl", hash = "sha256:66e6fa8e075b58946e76a78e69e1a124a21d9a48a5b4766d15ba5b06869d1fa1"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314-win_arm64.whl", hash = "sha256:a6fe887c2c5c59413353b7c0caff25d0e566623501ccfff88957fa438a69377d"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7a69df082db13c7070f7b8b1f155fa9e687f1d6aefb7b0e3f7231653b79a067b"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b1cde22f2c30ebb049a9e74c5374994157b9b70a16147d332f89c99c5960737a"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5338742f6ba7a51012ea470bd4dc600a8c713c0c72adaa0977a1b1f4327d6592"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1460ebde1bcf6d496d80b191d854adedcc619f84ff17dc1c6d550f58c9efbba"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3eb248f2feba84c692579257a043a7699e28a77d86c77b032c1d9fbb3f0219c"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3bbba5def70b16cd1c1d7255666aad3b290fbf8d0fe7f9f91abafb73611a91"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3114f4db69ac5a1f32e7e4d1cbbe7c8f9cf8217f78e6e002cedf2d54c2a548ed"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:4b0cb8a906b1a0196b863d460c0222fb8ad0f34041568da5620f9799b83ccf0b"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf681ac76a60b667106141e11a92a3330890257e6f559ca995fbb5265160b56e"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1e8ee6413cfc677ce8898d9cde18cc3a60fc2ba756b0dec5b71eb6eb21c49fa1"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b3072b16904d0b5572a15eb9d31c1954e0d3227a585fc1351aa9878729099d6c"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b670c30fd87a6aec281c3c9896d3bae4b205fd75d79d06dc87c2503717e46092"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-win32.whl", hash = "sha256:8014045a15b4d2b3476f0a287fcc93d4f823472d7d1308d47884ecac9e612be3"},
|
||||
{file = "rpds_py-0.28.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7a4e59c90d9c27c561eb3160323634a9ff50b04e4f7820600a2beb0ac90db578"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f5e7101145427087e493b9c9b959da68d357c28c562792300dd21a095118ed16"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:31eb671150b9c62409a888850aaa8e6533635704fe2b78335f9aaf7ff81eec4d"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b55c1f64482f7d8bd39942f376bfdf2f6aec637ee8c805b5041e14eeb771db"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24743a7b372e9a76171f6b69c01aedf927e8ac3e16c474d9fe20d552a8cb45c7"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:389c29045ee8bbb1627ea190b4976a310a295559eaf9f1464a1a6f2bf84dde78"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23690b5827e643150cf7b49569679ec13fe9a610a15949ed48b85eb7f98f34ec"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f0c9266c26580e7243ad0d72fc3e01d6b33866cfab5084a6da7576bcf1c4f72"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4c6c4db5d73d179746951486df97fd25e92396be07fc29ee8ff9a8f5afbdfb27"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3b695a8fa799dd2cfdb4804b37096c5f6dba1ac7f48a7fbf6d0485bcd060316"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6aa1bfce3f83baf00d9c5fcdbba93a3ab79958b4c7d7d1f55e7fe68c20e63912"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b0f9dceb221792b3ee6acb5438eb1f02b0cb2c247796a72b016dcc92c6de829"},
|
||||
{file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5d0145edba8abd3db0ab22b5300c99dc152f5c9021fab861be0f0544dc3cbc5f"},
|
||||
{file = "rpds_py-0.28.0.tar.gz", hash = "sha256:abd4df20485a0983e2ca334a216249b6186d6e3c1627e106651943dbdb791aea"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4ae4b88c6617e1b9e5038ab3fccd7bac0842fdda2b703117b2aa99bc85379113"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7d9128ec9d8cecda6f044001fde4fb71ea7c24325336612ef8179091eb9596b9"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37812c3da8e06f2bb35b3cf10e4a7b68e776a706c13058997238762b4e07f4f"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66786c3fb1d8de416a7fa8e1cb1ec6ba0a745b2b0eee42f9b7daa26f1a495545"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58f5c77f1af888b5fd1876c9a0d9858f6f88a39c9dd7c073a88e57e577da66d"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:799156ef1f3529ed82c36eb012b5d7a4cf4b6ef556dd7cc192148991d07206ae"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:453783477aa4f2d9104c4b59b08c871431647cb7af51b549bbf2d9eb9c827756"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:24a7231493e3c4a4b30138b50cca089a598e52c34cf60b2f35cebf62f274fdea"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7033c1010b1f57bb44d8067e8c25aa6fa2e944dbf46ccc8c92b25043839c3fd2"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0248b19405422573621172ab8e3a1f29141362d13d9f72bafa2e28ea0cdca5a2"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f9f436aee28d13b9ad2c764fc273e0457e37c2e61529a07b928346b219fcde3b"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24a16cb7163933906c62c272de20ea3c228e4542c8c45c1d7dc2b9913e17369a"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-win32.whl", hash = "sha256:1a409b0310a566bfd1be82119891fefbdce615ccc8aa558aff7835c27988cbef"},
|
||||
{file = "rpds_py-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5523b0009e7c3c1263471b69d8da1c7d41b3ecb4cb62ef72be206b92040a950"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b9c764a11fd637e0322a488560533112837f5334ffeb48b1be20f6d98a7b437"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fd2164d73812026ce970d44c3ebd51e019d2a26a4425a5dcbdfa93a34abc383"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a097b7f7f7274164566ae90a221fd725363c0e9d243e2e9ed43d195ccc5495c"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cdc0490374e31cedefefaa1520d5fe38e82fde8748cbc926e7284574c714d6b"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89ca2e673ddd5bde9b386da9a0aac0cab0e76f40c8f0aaf0d6311b6bbf2aa311"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5d9da3ff5af1ca1249b1adb8ef0573b94c76e6ae880ba1852f033bf429d4588"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8238d1d310283e87376c12f658b61e1ee23a14c0e54c7c0ce953efdbdc72deed"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:2d6fb2ad1c36f91c4646989811e84b1ea5e0c3cf9690b826b6e32b7965853a63"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:534dc9df211387547267ccdb42253aa30527482acb38dd9b21c5c115d66a96d2"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d456e64724a075441e4ed648d7f154dc62e9aabff29bcdf723d0c00e9e1d352f"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a738f2da2f565989401bd6fd0b15990a4d1523c6d7fe83f300b7e7d17212feca"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a110e14508fd26fd2e472bb541f37c209409876ba601cf57e739e87d8a53cf95"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-win32.whl", hash = "sha256:923248a56dd8d158389a28934f6f69ebf89f218ef96a6b216a9be6861804d3f4"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:539eb77eb043afcc45314d1be09ea6d6cafb3addc73e0547c171c6d636957f60"},
|
||||
{file = "rpds_py-0.29.0-cp311-cp311-win_arm64.whl", hash = "sha256:bdb67151ea81fcf02d8f494703fb728d4d34d24556cbff5f417d74f6f5792e7c"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a0891cfd8db43e085c0ab93ab7e9b0c8fee84780d436d3b266b113e51e79f954"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3897924d3f9a0361472d884051f9a2460358f9a45b1d85a39a158d2f8f1ad71c"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21deb8e0d1571508c6491ce5ea5e25669b1dd4adf1c9d64b6314842f708b5d"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9efe71687d6427737a0a2de9ca1c0a216510e6cd08925c44162be23ed7bed2d5"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f65470919dc189c833e86b2c4bd21bd355f98436a2cef9e0a9a92aebc8e57e"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:def48ff59f181130f1a2cb7c517d16328efac3ec03951cca40c1dc2049747e83"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7bd570be92695d89285a4b373006930715b78d96449f686af422debb4d3949"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:5a572911cd053137bbff8e3a52d31c5d2dba51d3a67ad902629c70185f3f2181"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d583d4403bcbf10cffc3ab5cee23d7643fcc960dff85973fd3c2d6c86e8dbb0c"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:070befbb868f257d24c3bb350dbd6e2f645e83731f31264b19d7231dd5c396c7"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fc935f6b20b0c9f919a8ff024739174522abd331978f750a74bb68abd117bd19"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c5a8ecaa44ce2d8d9d20a68a2483a74c07f05d72e94a4dff88906c8807e77b0"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-win32.whl", hash = "sha256:ba5e1aeaf8dd6d8f6caba1f5539cddda87d511331714b7b5fc908b6cfc3636b7"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f6134faf54b3cb83375db0f113506f8b7770785be1f95a631e7e2892101977"},
|
||||
{file = "rpds_py-0.29.0-cp312-cp312-win_arm64.whl", hash = "sha256:b016eddf00dca7944721bf0cd85b6af7f6c4efaf83ee0b37c4133bd39757a8c7"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1585648d0760b88292eecab5181f5651111a69d90eff35d6b78aa32998886a61"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:521807963971a23996ddaf764c682b3e46459b3c58ccd79fefbe16718db43154"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8896986efaa243ab713c69e6491a4138410f0fe36f2f4c71e18bd5501e8014"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d24564a700ef41480a984c5ebed62b74e6ce5860429b98b1fede76049e953e6"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6596b93c010d386ae46c9fba9bfc9fc5965fa8228edeac51576299182c2e31c"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5cc58aac218826d054c7da7f95821eba94125d88be673ff44267bb89d12a5866"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de73e40ebc04dd5d9556f50180395322193a78ec247e637e741c1b954810f295"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:295ce5ac7f0cf69a651ea75c8f76d02a31f98e5698e82a50a5f4d4982fbbae3b"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ea59b23ea931d494459c8338056fe7d93458c0bf3ecc061cd03916505369d55"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f49d41559cebd608042fdcf54ba597a4a7555b49ad5c1c0c03e0af82692661cd"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:05a2bd42768ea988294ca328206efbcc66e220d2d9b7836ee5712c07ad6340ea"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33ca7bdfedd83339ca55da3a5e1527ee5870d4b8369456b5777b197756f3ca22"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-win32.whl", hash = "sha256:20c51ae86a0bb9accc9ad4e6cdeec58d5ebb7f1b09dd4466331fc65e1766aae7"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-win_amd64.whl", hash = "sha256:6410e66f02803600edb0b1889541f4b5cc298a5ccda0ad789cc50ef23b54813e"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313-win_arm64.whl", hash = "sha256:56838e1cd9174dc23c5691ee29f1d1be9eab357f27efef6bded1328b23e1ced2"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:37d94eadf764d16b9a04307f2ab1d7af6dc28774bbe0535c9323101e14877b4c"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d472cf73efe5726a067dce63eebe8215b14beabea7c12606fd9994267b3cfe2b"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72fdfd5ff8992e4636621826371e3ac5f3e3b8323e9d0e48378e9c13c3dac9d0"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2549d833abdf8275c901313b9e8ff8fba57e50f6a495035a2a4e30621a2f7cc4"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4448dad428f28a6a767c3e3b80cde3446a22a0efbddaa2360f4bb4dc836d0688"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:115f48170fd4296a33938d8c11f697f5f26e0472e43d28f35624764173a60e4d"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5bb73ffc029820f4348e9b66b3027493ae00bca6629129cd433fd7a76308ee"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:b1581fcde18fcdf42ea2403a16a6b646f8eb1e58d7f90a0ce693da441f76942e"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16e9da2bda9eb17ea318b4c335ec9ac1818e88922cbe03a5743ea0da9ecf74fb"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:28fd300326dd21198f311534bdb6d7e989dd09b3418b3a91d54a0f384c700967"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2aba991e041d031c7939e1358f583ae405a7bf04804ca806b97a5c0e0af1ea5e"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f437026dbbc3f08c99cc41a5b2570c6e1a1ddbe48ab19a9b814254128d4ea7a"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-win32.whl", hash = "sha256:6e97846e9800a5d0fe7be4d008f0c93d0feeb2700da7b1f7528dabafb31dfadb"},
|
||||
{file = "rpds_py-0.29.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f49196aec7c4b406495f60e6f947ad71f317a765f956d74bbd83996b9edc0352"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:394d27e4453d3b4d82bb85665dc1fcf4b0badc30fc84282defed71643b50e1a1"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55d827b2ae95425d3be9bc9a5838b6c29d664924f98146557f7715e331d06df8"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc31a07ed352e5462d3ee1b22e89285f4ce97d5266f6d1169da1142e78045626"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4695dd224212f6105db7ea62197144230b808d6b2bba52238906a2762f1d1e7"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcae1770b401167f8b9e1e3f566562e6966ffa9ce63639916248a9e25fa8a244"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90f30d15f45048448b8da21c41703b31c61119c06c216a1bf8c245812a0f0c17"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a91e0ab77bdc0004b43261a4b8cd6d6b451e8d443754cfda830002b5745b32"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:4aa195e5804d32c682e453b34474f411ca108e4291c6a0f824ebdc30a91c973c"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7971bdb7bf4ee0f7e6f67fa4c7fbc6019d9850cc977d126904392d363f6f8318"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8ae33ad9ce580c7a47452c3b3f7d8a9095ef6208e0a0c7e4e2384f9fc5bf8212"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c661132ab2fb4eeede2ef69670fd60da5235209874d001a98f1542f31f2a8a94"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bb78b3a0d31ac1bde132c67015a809948db751cb4e92cdb3f0b242e430b6ed0d"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-win32.whl", hash = "sha256:f475f103488312e9bd4000bc890a95955a07b2d0b6e8884aef4be56132adbbf1"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-win_amd64.whl", hash = "sha256:b9cf2359a4fca87cfb6801fae83a76aedf66ee1254a7a151f1341632acf67f1b"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314-win_arm64.whl", hash = "sha256:9ba8028597e824854f0f1733d8b964e914ae3003b22a10c2c664cb6927e0feb9"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:e71136fd0612556b35c575dc2726ae04a1669e6a6c378f2240312cf5d1a2ab10"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:76fe96632d53f3bf0ea31ede2f53bbe3540cc2736d4aec3b3801b0458499ef3a"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9459a33f077130dbb2c7c3cea72ee9932271fb3126404ba2a2661e4fe9eb7b79"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5c9546cfdd5d45e562cc0444b6dddc191e625c62e866bf567a2c69487c7ad28a"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12597d11d97b8f7e376c88929a6e17acb980e234547c92992f9f7c058f1a7310"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28de03cf48b8a9e6ec10318f2197b83946ed91e2891f651a109611be4106ac4b"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7951c964069039acc9d67a8ff1f0a7f34845ae180ca542b17dc1456b1f1808"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:c07d107b7316088f1ac0177a7661ca0c6670d443f6fe72e836069025e6266761"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de2345af363d25696969befc0c1688a6cb5e8b1d32b515ef84fc245c6cddba3"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:00e56b12d2199ca96068057e1ae7f9998ab6e99cda82431afafd32f3ec98cca9"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3919a3bbecee589300ed25000b6944174e07cd20db70552159207b3f4bbb45b8"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7fa2ccc312bbd91e43aa5e0869e46bc03278a3dddb8d58833150a18b0f0283a"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-win32.whl", hash = "sha256:97c817863ffc397f1e6a6e9d2d89fe5408c0a9922dac0329672fb0f35c867ea5"},
|
||||
{file = "rpds_py-0.29.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2023473f444752f0f82a58dfcbee040d0a1b3d1b3c2ec40e884bd25db6d117d2"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:acd82a9e39082dc5f4492d15a6b6c8599aa21db5c35aaf7d6889aea16502c07d"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:715b67eac317bf1c7657508170a3e011a1ea6ccb1c9d5f296e20ba14196be6b3"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b1b87a237cb2dba4db18bcfaaa44ba4cd5936b91121b62292ff21df577fc43"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c3c3e8101bb06e337c88eb0c0ede3187131f19d97d43ea0e1c5407ea74c0cbf"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8e54d6e61f3ecd3abe032065ce83ea63417a24f437e4a3d73d2f85ce7b7cfe"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fbd4e9aebf110473a420dea85a238b254cf8a15acb04b22a5a6b5ce8925b760"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fdf53d36e6c72819993e35d1ebeeb8e8fc688d0c6c2b391b55e335b3afba5a"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:ea7173df5d86f625f8dde6d5929629ad811ed8decda3b60ae603903839ac9ac0"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:76054d540061eda273274f3d13a21a4abdde90e13eaefdc205db37c05230efce"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9f84c549746a5be3bc7415830747a3a0312573afc9f95785eb35228bb17742ec"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:0ea962671af5cb9a260489e311fa22b2e97103e3f9f0caaea6f81390af96a9ed"},
|
||||
{file = "rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f7728653900035fb7b8d06e1e5900545d8088efc9d5d4545782da7df03ec803f"},
|
||||
{file = "rpds_py-0.29.0.tar.gz", hash = "sha256:fe55fe686908f50154d1dc599232016e50c243b438c3b7432f24e2895b0e5359"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2943,4 +2943,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.10,<3.12"
|
||||
content-hash = "edf4d29a4e9bfc41d341b7fd08e31d621b69b9fe516b4446200da3de0f325aaa"
|
||||
content-hash = "8cb4584a1c2360f62df03bfcab503c4028f0d0d2f12823913af87522b17732e7"
|
||||
|
||||
@@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
version = "4.1.3"
|
||||
version = "4.1.4-rc.1"
|
||||
name = "source-google-ads"
|
||||
description = "Source implementation for Google Ads."
|
||||
authors = [ "Airbyte <contact@airbyte.io>",]
|
||||
@@ -20,7 +20,7 @@ python = "^3.10,<3.12"
|
||||
google-ads = "==27.0.0"
|
||||
protobuf = "==4.25.2"
|
||||
pendulum = "<3.0.0"
|
||||
airbyte-cdk = "^7.4.1"
|
||||
airbyte-cdk = "^7.5.1.post3.dev19705070276"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
source-google-ads = "source_google_ads.run:run"
|
||||
|
||||
@@ -697,6 +697,7 @@ class CustomGAQueryHttpRequester(HttpRequester):
|
||||
def __post_init__(self, parameters: Mapping[str, Any]):
|
||||
super().__post_init__(parameters=parameters)
|
||||
self.query = GAQL.parse(parameters.get("query"))
|
||||
self.stream_response = True
|
||||
|
||||
@staticmethod
|
||||
def is_metrics_in_custom_query(query: GAQL) -> bool:
|
||||
@@ -761,6 +762,17 @@ class CustomGAQueryHttpRequester(HttpRequester):
|
||||
return self.query[from_index + 4 :].strip()
|
||||
|
||||
|
||||
class CustomGAQueryClickViewHttpRequester(CustomGAQueryHttpRequester):
|
||||
@staticmethod
|
||||
def _insert_segments_date_expr(query: GAQL, start_date: str, end_date: str) -> GAQL:
|
||||
if "segments.date" not in query.fields:
|
||||
query = query.append_field("segments.date")
|
||||
condition = f"segments.date ='{start_date}'"
|
||||
if query.where:
|
||||
return query.set_where(query.where + " AND " + condition)
|
||||
return query.set_where(condition)
|
||||
|
||||
|
||||
@dataclass()
|
||||
class CustomGAQuerySchemaLoader(SchemaLoader):
|
||||
"""
|
||||
|
||||
@@ -65,27 +65,15 @@ definitions:
|
||||
field_path:
|
||||
- results
|
||||
|
||||
cursor_paginator:
|
||||
type: DefaultPaginator
|
||||
pagination_strategy:
|
||||
type: CursorPagination
|
||||
cursor_value: "{{ response.get('nextPageToken', '') }}"
|
||||
stop_condition: "{{ response.get('nextPageToken', '') is none }}"
|
||||
page_token_option:
|
||||
type: RequestOption
|
||||
inject_into: body_json
|
||||
field_path: ["page_token"]
|
||||
|
||||
base_retriever:
|
||||
type: SimpleRetriever
|
||||
requester:
|
||||
$ref: "#/definitions/stream_requester"
|
||||
record_selector:
|
||||
$ref: "#/definitions/base_selector"
|
||||
paginator:
|
||||
$ref: "#/definitions/cursor_paginator"
|
||||
decoder:
|
||||
type: JsonDecoder
|
||||
type: CustomDecoder
|
||||
class_name: "source_google_ads.components.GoogleAdsStreamingDecoder"
|
||||
|
||||
stream_base:
|
||||
type: DeclarativeStream
|
||||
@@ -199,7 +187,7 @@ definitions:
|
||||
$ref: "#/schemas"
|
||||
authenticator:
|
||||
$ref: "#/definitions/authenticator"
|
||||
url_base: "https://googleads.googleapis.com/v20/{{ stream_partition['customer_id'] }}/googleAds:search"
|
||||
url_base: "https://googleads.googleapis.com/v20/{{ stream_partition['customer_id'] }}/googleAds:searchStream"
|
||||
http_method: POST
|
||||
error_handler:
|
||||
$ref: "#/definitions/base_error_handler"
|
||||
@@ -224,7 +212,7 @@ definitions:
|
||||
incremental_stream:
|
||||
$ref: "#/definitions/incremental_stream_base"
|
||||
$parameters:
|
||||
url_base: "https://googleads.googleapis.com/v20/{{ stream_partition['parent_slice']['customer_id'] }}/googleAds:search"
|
||||
url_base: "https://googleads.googleapis.com/v20/{{ stream_partition['parent_slice']['customer_id'] }}/googleAds:searchStream"
|
||||
retriever:
|
||||
type: CustomRetriever
|
||||
class_name: "source_google_ads.components.CriterionRetriever"
|
||||
@@ -240,8 +228,6 @@ definitions:
|
||||
http_method: POST
|
||||
error_handler:
|
||||
$ref: "#/definitions/base_error_handler"
|
||||
paginator:
|
||||
$ref: "#/definitions/cursor_paginator"
|
||||
record_selector:
|
||||
type: RecordSelector
|
||||
extractor:
|
||||
@@ -297,8 +283,6 @@ definitions:
|
||||
$ref: "#/definitions/base_requester"
|
||||
url_base: "https://googleads.googleapis.com/v20/customers:listAccessibleCustomers"
|
||||
http_method: GET
|
||||
paginator:
|
||||
type: NoPagination
|
||||
record_selector:
|
||||
extractor:
|
||||
type: CustomRecordExtractor
|
||||
@@ -422,11 +406,6 @@ definitions:
|
||||
parent_key: "clientCustomer"
|
||||
partition_field: "customer_id"
|
||||
stream: "#/definitions/customer_client"
|
||||
decoder:
|
||||
type: CustomDecoder
|
||||
class_name: "source_google_ads.components.GoogleAdsStreamingDecoder"
|
||||
paginator:
|
||||
type: NoPagination
|
||||
transformations:
|
||||
- type: CustomTransformation
|
||||
class_name: "source_google_ads.components.KeysToSnakeCaseGoogleAdsTransformation"
|
||||
@@ -487,13 +466,6 @@ definitions:
|
||||
|
||||
ad_group_ad_stream:
|
||||
$ref: "#/definitions/incremental_stream_base"
|
||||
retriever:
|
||||
$ref: "#/definitions/incremental_stream_base/retriever"
|
||||
paginator:
|
||||
type: NoPagination
|
||||
decoder:
|
||||
type: CustomDecoder
|
||||
class_name: "source_google_ads.components.GoogleAdsStreamingDecoder"
|
||||
name: ad_group_ad
|
||||
primary_key:
|
||||
- ad_group.id
|
||||
@@ -665,12 +637,10 @@ definitions:
|
||||
$ref: "#/schemas/click_view"
|
||||
authenticator:
|
||||
$ref: "#/definitions/authenticator"
|
||||
url_base: "https://googleads.googleapis.com/v20/{{ stream_partition['customer_id'] }}/googleAds:search"
|
||||
url_base: "https://googleads.googleapis.com/v20/{{ stream_partition['customer_id'] }}/googleAds:searchStream"
|
||||
http_method: POST
|
||||
error_handler:
|
||||
$ref: "#/definitions/base_error_handler"
|
||||
paginator:
|
||||
$ref: "#/definitions/cursor_paginator"
|
||||
incremental_sync:
|
||||
type: DatetimeBasedCursor
|
||||
cursor_field: segments.date
|
||||
@@ -859,8 +829,6 @@ definitions:
|
||||
error_handler:
|
||||
$ref: "#/definitions/base_error_handler"
|
||||
name: change_status
|
||||
paginator:
|
||||
$ref: "#/definitions/cursor_paginator"
|
||||
pagination_reset:
|
||||
type: PaginationReset
|
||||
action: SPLIT_USING_CURSOR
|
||||
@@ -1030,14 +998,6 @@ definitions:
|
||||
)
|
||||
}}
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- retriever
|
||||
- requester
|
||||
- $parameters
|
||||
- query
|
||||
value: "{{ components_values.get('query', None) }}"
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- retriever
|
||||
@@ -1085,6 +1045,130 @@ definitions:
|
||||
)
|
||||
}}
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- incremental_sync
|
||||
- step
|
||||
value: "P1D"
|
||||
condition: >-
|
||||
{{
|
||||
(
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 1
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
)
|
||||
or
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 2
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bORDER\\s+BY\\b[\\s\\S]*?segments\\.date)'))
|
||||
)
|
||||
) and components_values.get('query', '') | regex_search('(?i)(\\bFROM\\s+click_view\\b)')
|
||||
}}
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- incremental_sync
|
||||
- start_datetime
|
||||
value: >-
|
||||
{
|
||||
"type": "MinMaxDatetime",
|
||||
"datetime": "{{ max(config.get('start_date', day_delta(-90, format='%Y-%m-%d')), day_delta(-90, format='%Y-%m-%d')) }}",
|
||||
"datetime_format": "%Y-%m-%d"
|
||||
}
|
||||
condition: >-
|
||||
{{
|
||||
(
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 1
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
)
|
||||
or
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 2
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bORDER\\s+BY\\b[\\s\\S]*?segments\\.date)'))
|
||||
)
|
||||
) and components_values.get('query', '') | regex_search('(?i)(\\bFROM\\s+click_view\\b)')
|
||||
}}
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- incremental_sync
|
||||
- end_datetime
|
||||
value: >-
|
||||
{
|
||||
"type": "MinMaxDatetime",
|
||||
"datetime": "{{ format_datetime((str_to_datetime(config.get('end_date')) if config.get('end_date') else now_utc()) + duration('P1D'), '%Y-%m-%d') }}",
|
||||
"datetime_format": "%Y-%m-%d"
|
||||
}
|
||||
condition: >-
|
||||
{{
|
||||
(
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 1
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
)
|
||||
or
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 2
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bORDER\\s+BY\\b[\\s\\S]*?segments\\.date)'))
|
||||
)
|
||||
) and components_values.get('query', '') | regex_search('(?i)(\\bFROM\\s+click_view\\b)')
|
||||
}}
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- incremental_sync
|
||||
- cursor_granularity
|
||||
value: P1D
|
||||
condition: >-
|
||||
{{
|
||||
(
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 1
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
)
|
||||
or
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 2
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bORDER\\s+BY\\b[\\s\\S]*?segments\\.date)'))
|
||||
)
|
||||
) and components_values.get('query', '') | regex_search('(?i)(\\bFROM\\s+click_view\\b)')
|
||||
}}
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- retriever
|
||||
- requester
|
||||
- class_name
|
||||
value: "source_google_ads.components.CustomGAQueryClickViewHttpRequester"
|
||||
condition: >-
|
||||
{{
|
||||
(
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 1
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
)
|
||||
or
|
||||
(
|
||||
components_values.get('query', '').count('segments.date') == 2
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bSELECT\\b[\\s\\S]*?segments\\.date[\\s\\S]*?\\bFROM\\b)'))
|
||||
and (components_values.get('query') | regex_search('(?i)(\\bORDER\\s+BY\\b[\\s\\S]*?segments\\.date)'))
|
||||
)
|
||||
) and components_values.get('query', '') | regex_search('(?i)(\\bFROM\\s+click_view\\b)')
|
||||
}}
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- retriever
|
||||
- requester
|
||||
- $parameters
|
||||
- query
|
||||
value: "{{ components_values.get('query', None) }}"
|
||||
create_or_update: true
|
||||
- type: ComponentMappingDefinition
|
||||
field_path:
|
||||
- retriever
|
||||
|
||||
@@ -1,16 +1,27 @@
|
||||
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
import re
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from source_google_ads.components import GAQL
|
||||
from source_google_ads.source import SourceGoogleAds
|
||||
from freezegun import freeze_time
|
||||
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
|
||||
from .conftest import Obj, find_stream, get_source, read_full_refresh
|
||||
|
||||
|
||||
def test_query_shopping_performance_view_stream(customers, config, requests_mock):
|
||||
"""
|
||||
Test that shopping_performance_view stream correctly processes and transforms data.
|
||||
|
||||
Verifies:
|
||||
- OAuth token refresh
|
||||
- Customer account fetching
|
||||
- GAQL query generation with date filtering
|
||||
- Record transformation (PascalCase -> snake_case, flattening)
|
||||
"""
|
||||
config["end_date"] = "2021-01-10"
|
||||
config["conversion_window_days"] = 3
|
||||
config["credentials"]["access_token"] = "access_token"
|
||||
@@ -108,6 +119,16 @@ def test_query_shopping_performance_view_stream(customers, config, requests_mock
|
||||
|
||||
|
||||
def test_custom_query_stream(customers, config_for_custom_query_tests, requests_mock, mocker):
|
||||
"""
|
||||
Test that custom query streams correctly generate schemas and execute queries.
|
||||
|
||||
Verifies:
|
||||
- CustomGAQuerySchemaLoader dynamically generates JSON schema from Google Ads API metadata
|
||||
- Enum types are properly handled with all possible values
|
||||
- Date fields get the correct "format": "date" annotation
|
||||
- Incremental queries are properly transformed with date range filters
|
||||
- Record transformation matches expectations
|
||||
"""
|
||||
config_for_custom_query_tests["end_date"] = "2021-01-10"
|
||||
config_for_custom_query_tests["conversion_window_days"] = 1
|
||||
config_for_custom_query_tests["credentials"]["access_token"] = "access_token"
|
||||
@@ -238,7 +259,7 @@ def test_custom_query_stream(customers, config_for_custom_query_tests, requests_
|
||||
@pytest.mark.parametrize(
|
||||
"query, expected_incremental_sync",
|
||||
[
|
||||
("\n select ad.id, segments.date, ad.resource_name\nfrom\nad", True),
|
||||
("\tselect\rad.id,\tsegments.date,\tad.resource_name\nfrom\nad", True),
|
||||
("\nselect ad.id, segments.date from ad", True),
|
||||
("select ad.id, segments.date\nfrom\nad\norder\n by segments.date", True),
|
||||
("\nselect\nad.id,\nsegments.date\nfrom\nad\norder\n by segments.date", True),
|
||||
@@ -266,21 +287,216 @@ def test_custom_query_stream(customers, config_for_custom_query_tests, requests_
|
||||
"SELECT ad_group_ad.ad.name, segments.date FROM ad_group_ad WHERE segments.date DURING LAST_30_DAYS ORDER BY ad_group_ad.ad.name",
|
||||
False,
|
||||
),
|
||||
# Click view queries - incremental detection only (step override tested in test_custom_query_click_view_retention_and_step)
|
||||
("SELECT click_view.gclid, segments.date FROM click_view", True),
|
||||
("select click_view.gclid, segments.date from click_view", True),
|
||||
("SELECT click_view.gclid, segments.date FROM click_view ORDER BY segments.date", True),
|
||||
("SELECT click_view.gclid, segments.date FROM click_view ORDER BY segments.date ASC", True),
|
||||
(
|
||||
"""SELECT
|
||||
click_view.gclid,
|
||||
segments.date
|
||||
FROM
|
||||
click_view""",
|
||||
True,
|
||||
),
|
||||
(
|
||||
"SELECT click_view.gclid, click_view.ad_group_ad, segments.date FROM click_view WHERE segments.date BETWEEN '2025-10-21' AND '2025-10-21'",
|
||||
False,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_custom_query_stream_with_different_queries(query, expected_incremental_sync, config_for_custom_query_tests, requests_mock):
|
||||
def test_custom_query_stream_with_different_queries(query, expected_incremental_sync, config_for_custom_query_tests):
|
||||
"""
|
||||
Test that the manifest regex correctly identifies incremental queries and assigns correct requester class.
|
||||
|
||||
Verifies that queries with segments.date are correctly detected by the ComponentMappingDefinition
|
||||
regex patterns and configured as incremental streams. The condition matches:
|
||||
- 1 segments.date with SELECT...FROM pattern, OR
|
||||
- 2 segments.date with SELECT...FROM AND ORDER BY patterns
|
||||
|
||||
Also verifies that incremental click_view queries use CustomGAQueryClickViewHttpRequester.
|
||||
|
||||
Note: Step override behavior is tested in test_custom_query_click_view_retention_and_step.
|
||||
"""
|
||||
config = config_for_custom_query_tests
|
||||
config["custom_queries_array"][0]["query"] = query
|
||||
|
||||
streams = get_source(config=config).streams(config=config)
|
||||
stream = next(filter(lambda s: s.name == "custom_ga_query", streams))
|
||||
|
||||
# Verify that the regex matching in the manifest correctly applies incremental sync
|
||||
# by checking the stream_cursor_field which is set by the ComponentMappingDefinition
|
||||
# The condition matches:
|
||||
# - 1 segments.date with SELECT...FROM pattern, OR
|
||||
# - 2 segments.date with SELECT...FROM AND ORDER BY...LIMIT patterns
|
||||
# Verify that the regex matching correctly identifies incremental vs full-refresh queries
|
||||
if expected_incremental_sync:
|
||||
assert stream.cursor_field == "segments.date", f"Stream cursor field should be 'segments.date' for query: {query}"
|
||||
else:
|
||||
assert stream.cursor_field != "segments.date", f"Stream should not have segments.date as cursor field for query: {query}"
|
||||
|
||||
# Check if this is a click_view query using regex (case-insensitive)
|
||||
# Matches patterns like: "FROM click_view", "from CLICK_VIEW", etc.
|
||||
is_click_view = bool(re.search(r"\bFROM\s+click_view\b", query, re.IGNORECASE))
|
||||
|
||||
# Verify the requester class for incremental queries
|
||||
# Access chain: stream -> partition_generator -> partition_factory -> retriever -> requester
|
||||
# This retrieves the HTTP requester instance to verify its class type
|
||||
requester_class_name = stream._stream_partition_generator._partition_factory._retriever.requester.__class__.__name__
|
||||
if expected_incremental_sync and is_click_view:
|
||||
assert requester_class_name == "CustomGAQueryClickViewHttpRequester", (
|
||||
f"Click view incremental queries should use CustomGAQueryClickViewHttpRequester.\n"
|
||||
f"Query: {query}\n"
|
||||
f"Actual requester class: {requester_class_name}"
|
||||
)
|
||||
else:
|
||||
assert requester_class_name == "CustomGAQueryHttpRequester", (
|
||||
f"Regular queries should use CustomGAQueryHttpRequester.\n"
|
||||
f"Query: {query}\n"
|
||||
f"Actual requester class: {requester_class_name}"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"query, has_metrics",
|
||||
[
|
||||
("SELECT campaign.id, metrics.clicks, segments.date FROM campaign", True),
|
||||
("SELECT ad_group.name, metrics.impressions, segments.date FROM ad_group", True),
|
||||
("SELECT campaign.name, metrics.cost_micros FROM campaign", True),
|
||||
("SELECT campaign.id, campaign.name, segments.date FROM campaign", False),
|
||||
("SELECT ad_group.id, segments.date FROM ad_group", False),
|
||||
],
|
||||
ids=["metrics_clicks", "metrics_impressions", "metrics_cost", "no_metrics_1", "no_metrics_2"],
|
||||
)
|
||||
def test_custom_query_partition_router_for_metrics(query, has_metrics, config_for_custom_query_tests):
|
||||
"""
|
||||
Test that partition router is correctly added for queries with metrics.
|
||||
|
||||
Verifies that the ComponentMappingDefinition in manifest correctly
|
||||
adds the customer_client_non_manager partition router when the query contains 'metrics'.
|
||||
"""
|
||||
config = config_for_custom_query_tests.copy()
|
||||
stream_name = "test_partition"
|
||||
config["custom_queries_array"] = [
|
||||
{
|
||||
"query": query,
|
||||
"table_name": stream_name,
|
||||
}
|
||||
]
|
||||
|
||||
streams = get_source(config=config).streams(config=config)
|
||||
stream = next(filter(lambda s: s.name == stream_name, streams))
|
||||
|
||||
# Navigate through the stream's partition routing structure to get the parent stream query
|
||||
# When metrics are present, the ComponentMappingDefinition adds a partition router with
|
||||
# customer_client_non_manager as the parent stream, which filters to non-manager accounts
|
||||
stream_slicer = stream._stream_partition_generator._stream_slicer
|
||||
partition_router = stream_slicer._partition_router if hasattr(stream_slicer, "_partition_router") else stream_slicer
|
||||
parent_stream = partition_router.parent_stream_configs[0].stream
|
||||
parent_stream_requester = parent_stream._stream_partition_generator._partition_factory._retriever.requester
|
||||
parent_query = parent_stream_requester.request_options_provider.request_body_json["query"]
|
||||
|
||||
# Verify the parent stream query differs based on whether metrics are present
|
||||
# Metrics queries need customer partitioning (manager = FALSE filter)
|
||||
if has_metrics:
|
||||
assert (
|
||||
parent_query
|
||||
== "SELECT customer_client.client_customer, customer_client.level, customer_client.id, customer_client.manager, customer_client.time_zone, customer_client.status FROM customer_client WHERE customer_client.manager = FALSE"
|
||||
)
|
||||
else:
|
||||
assert (
|
||||
parent_query
|
||||
== "SELECT\n customer_client.client_customer,\n customer_client.level,\n customer_client.id,\n customer_client.manager,\n customer_client.time_zone,\n customer_client.status\nFROM\n customer_client\n"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"query, is_click_view",
|
||||
[
|
||||
# Click view queries should have 90-day retention and P1D step
|
||||
("SELECT click_view.gclid, segments.date FROM click_view", True),
|
||||
("SELECT\tclick_view.gclid,\tsegments.date\tFROM\tclick_view\tORDER\tBY\tsegments.date", True),
|
||||
("select click_view.ad_group_ad, segments.date from click_view", True),
|
||||
# Regular queries should use config.start_date and P14D step
|
||||
("SELECT ad_group.id, segments.date FROM ad_group", False),
|
||||
("SELECT campaign.name, segments.date FROM campaign ORDER BY segments.date", False),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"state_date, expected_start_click_view, expected_start_regular",
|
||||
[
|
||||
# No state - use retention dates
|
||||
# click_view: 2025-01-01 minus 90 days = 2024-10-03
|
||||
# regular: config.start_date = 2023-06-01
|
||||
(None, "2024-10-03", "2023-06-01"),
|
||||
# State within retention - use state date
|
||||
# Both use state date since it's within the allowed range
|
||||
("2024-12-01", "2024-12-01", "2024-12-01"),
|
||||
# State before retention - click_view enforces retention, regular uses state
|
||||
# click_view: Ignores old state, uses 2024-10-03 (90-day limit)
|
||||
# regular: Uses state date 2024-01-01
|
||||
("2024-01-01", "2024-10-03", "2024-01-01"),
|
||||
],
|
||||
ids=["no_state", "state_within_retention", "state_before_retention"],
|
||||
)
|
||||
@freeze_time("2025-01-01")
|
||||
def test_custom_query_click_view_retention_and_step(
|
||||
query, is_click_view, state_date, expected_start_click_view, expected_start_regular, config_for_custom_query_tests
|
||||
):
|
||||
"""
|
||||
Test that click_view custom queries have correct step override and retention.
|
||||
|
||||
This test freezes time to 2025-01-01 and verifies:
|
||||
- click_view queries: P1D step (1 day) - verifies step override in manifest (lines 1033-1053)
|
||||
- click_view queries: 90-day retention via start_datetime override in manifest (lines 1054-1079)
|
||||
- regular queries: P14D step (14 days) - default for incremental queries
|
||||
- regular queries: use config.start_date for retention
|
||||
|
||||
Tests three state scenarios:
|
||||
1. No state - uses retention dates
|
||||
2. State within retention - uses state date
|
||||
3. State before retention - click_view enforces retention, regular uses state
|
||||
"""
|
||||
config = config_for_custom_query_tests.copy()
|
||||
config["start_date"] = "2023-06-01"
|
||||
stream_name = "test_query"
|
||||
config["custom_queries_array"] = [
|
||||
{
|
||||
"query": query,
|
||||
"table_name": stream_name,
|
||||
}
|
||||
]
|
||||
|
||||
# Create source with or without state
|
||||
if state_date:
|
||||
state = StateBuilder().with_stream_state(stream_name, {"state": {"segments.date": state_date}}).build()
|
||||
streams = get_source(config=config, state=state).streams(config=config)
|
||||
else:
|
||||
streams = get_source(config=config).streams(config=config)
|
||||
|
||||
stream = next(filter(lambda s: s.name == stream_name, streams))
|
||||
|
||||
# Verify incremental sync is enabled (all these queries have segments.date)
|
||||
assert stream.cursor_field == "segments.date", f"Stream cursor field should be 'segments.date' for: {query}"
|
||||
|
||||
# Verify step override (P1D for click_view, P14D for regular)
|
||||
cursor = stream.cursor._create_cursor(stream.cursor._global_cursor)
|
||||
actual_step_days = cursor._slice_range.days
|
||||
expected_step_days = 1 if is_click_view else 14
|
||||
|
||||
assert actual_step_days == expected_step_days, (
|
||||
f"Step days mismatch.\n"
|
||||
f"Query: {query}\n"
|
||||
f"State: {state_date}\n"
|
||||
f"Expected: {expected_step_days} days\n"
|
||||
f"Actual: {actual_step_days} days"
|
||||
)
|
||||
|
||||
# Verify start date (retention behavior)
|
||||
expected_start_date = expected_start_click_view if is_click_view else expected_start_regular
|
||||
actual_start_date = cursor.state["segments.date"]
|
||||
|
||||
assert actual_start_date == expected_start_date, (
|
||||
f"Start date mismatch.\n"
|
||||
f"Query: {query}\n"
|
||||
f"State: {state_date}\n"
|
||||
f"Expected start date: {expected_start_date}\n"
|
||||
f"Actual start date: {actual_start_date}\n"
|
||||
f"Click view should enforce 90-day retention (2024-10-03), regular queries use config.start_date or state."
|
||||
)
|
||||
|
||||
@@ -13,8 +13,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,9 +12,9 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from integration.response_builder import HarvestPaginatedResponseBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
from mock_server.response_builder import HarvestPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -11,7 +11,7 @@ from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse
|
||||
from integration.config import ConfigBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -11,8 +11,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "cost_rates"
|
||||
@@ -11,8 +11,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "estimate_item_categories"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "estimate_messages"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -11,8 +11,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "expense_categories"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "expenses_categories"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "expenses_clients"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "expenses_projects"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "expenses_team"
|
||||
@@ -11,8 +11,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "invoice_item_categories"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "invoice_messages"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -13,8 +13,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -11,8 +11,8 @@ from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "project_budget"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "time_clients"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "time_projects"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "time_tasks"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "time_team"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "uninvoiced"
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -12,8 +12,8 @@ from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
from integration.request_builder import HarvestRequestBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import HarvestRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
@@ -1,158 +0,0 @@
|
||||
#
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
#
|
||||
import json
|
||||
import unittest
|
||||
from unittest import TestCase
|
||||
|
||||
import pytest
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.mock_http.response_builder import (
|
||||
FieldPath,
|
||||
HttpResponseBuilder,
|
||||
RecordBuilder,
|
||||
create_record_builder,
|
||||
create_response_builder,
|
||||
find_template,
|
||||
)
|
||||
|
||||
from .config import BUSINESS_ACCOUNT_ID, ConfigBuilder
|
||||
from .pagination import NEXT_PAGE_TOKEN, InstagramPaginationStrategy
|
||||
from .request_builder import RequestBuilder, get_account_request
|
||||
from .response_builder import get_account_response
|
||||
from .utils import config, read_output
|
||||
|
||||
|
||||
PARENT_FIELDS = [
|
||||
"caption",
|
||||
"id",
|
||||
"ig_id",
|
||||
"like_count",
|
||||
"media_type",
|
||||
"media_product_type",
|
||||
"media_url",
|
||||
"owner",
|
||||
"permalink",
|
||||
"shortcode",
|
||||
"thumbnail_url",
|
||||
"timestamp",
|
||||
"username",
|
||||
]
|
||||
_PARENT_STREAM_NAME = "stories"
|
||||
_STREAM_NAME = "story_insights"
|
||||
|
||||
STORIES_ID = "3874523487643"
|
||||
STORIES_ID_ERROR_CODE_10 = "3874523487644"
|
||||
|
||||
HAPPY_PATH = "story_insights_happy_path"
|
||||
ERROR_10 = "story_insights_error_code_10"
|
||||
|
||||
_METRICS = ["reach", "replies", "follows", "profile_visits", "shares", "total_interactions"]
|
||||
|
||||
|
||||
def _get_parent_request() -> RequestBuilder:
|
||||
return RequestBuilder.get_stories_endpoint(item_id=BUSINESS_ACCOUNT_ID).with_limit(100).with_fields(PARENT_FIELDS)
|
||||
|
||||
|
||||
def _get_child_request(media_id, metric) -> RequestBuilder:
|
||||
return RequestBuilder.get_media_insights_endpoint(item_id=media_id).with_custom_param("metric", metric, with_format=True)
|
||||
|
||||
|
||||
def _get_response(stream_name: str, test: str = None, with_pagination_strategy: bool = True) -> HttpResponseBuilder:
|
||||
scenario = ""
|
||||
if test:
|
||||
scenario = f"_for_{test}"
|
||||
kwargs = {
|
||||
"response_template": find_template(f"{stream_name}{scenario}", __file__),
|
||||
"records_path": FieldPath("data"),
|
||||
"pagination_strategy": InstagramPaginationStrategy(request=_get_parent_request().build(), next_page_token=NEXT_PAGE_TOKEN),
|
||||
}
|
||||
if with_pagination_strategy:
|
||||
kwargs["pagination_strategy"] = InstagramPaginationStrategy(request=_get_parent_request().build(), next_page_token=NEXT_PAGE_TOKEN)
|
||||
|
||||
return create_response_builder(**kwargs)
|
||||
|
||||
|
||||
def _record(stream_name: str, test: str = None) -> RecordBuilder:
|
||||
scenario = ""
|
||||
if test:
|
||||
scenario = f"_for_{test}"
|
||||
return create_record_builder(
|
||||
response_template=find_template(f"{stream_name}{scenario}", __file__),
|
||||
records_path=FieldPath("data"),
|
||||
record_id_path=FieldPath("id"),
|
||||
)
|
||||
|
||||
|
||||
class TestFullRefresh(TestCase):
|
||||
@staticmethod
|
||||
def _read(config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput:
|
||||
return read_output(
|
||||
config_builder=config_,
|
||||
stream_name=_STREAM_NAME,
|
||||
sync_mode=SyncMode.full_refresh,
|
||||
expecting_exception=expecting_exception,
|
||||
)
|
||||
|
||||
@HttpMocker()
|
||||
def test_instagram_story_insights(self, http_mocker: HttpMocker) -> None:
|
||||
test = HAPPY_PATH
|
||||
# Mocking API stream
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
# Mocking parent stream
|
||||
http_mocker.get(
|
||||
_get_parent_request().build(),
|
||||
_get_response(stream_name=_PARENT_STREAM_NAME, test=test)
|
||||
.with_record(_record(stream_name=_PARENT_STREAM_NAME, test=test))
|
||||
.build(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=STORIES_ID, metric=_METRICS).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{test}", __file__)), 200),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["page_id"]
|
||||
assert output.records[0].record.data["business_account_id"]
|
||||
assert output.records[0].record.data["id"]
|
||||
for metric in _METRICS:
|
||||
assert metric in output.records[0].record.data
|
||||
|
||||
@HttpMocker()
|
||||
def test_instagram_story_insights_for_error_code_30(self, http_mocker: HttpMocker) -> None:
|
||||
test = ERROR_10
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
# Mocking parent stream
|
||||
http_mocker.get(
|
||||
_get_parent_request().build(), HttpResponse(json.dumps(find_template(f"{_PARENT_STREAM_NAME}_for_{test}", __file__)), 200)
|
||||
)
|
||||
# Good response
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=STORIES_ID, metric=_METRICS).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{HAPPY_PATH}", __file__)), 200),
|
||||
)
|
||||
# error 10
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=STORIES_ID_ERROR_CODE_10, metric=_METRICS).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{test}", __file__)), 400),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# error was ignored and correct record was processed
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["page_id"]
|
||||
assert output.records[0].record.data["business_account_id"]
|
||||
assert output.records[0].record.data["id"]
|
||||
for metric in _METRICS:
|
||||
assert metric in output.records[0].record.data
|
||||
@@ -24,5 +24,9 @@ class ConfigBuilder:
|
||||
"start_date": START_DATE,
|
||||
}
|
||||
|
||||
def with_start_date(self, start_date: str) -> "ConfigBuilder":
|
||||
self._config["start_date"] = start_date
|
||||
return self
|
||||
|
||||
def build(self) -> MutableMapping[str, Any]:
|
||||
return self._config
|
||||
@@ -6,7 +6,7 @@ from __future__ import annotations
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from airbyte_cdk.connector_builder.connector_builder_handler import resolve_manifest
|
||||
from airbyte_cdk.test.mock_http.request import HttpRequest
|
||||
from airbyte_cdk.test.mock_http.request import ANY_QUERY_PARAMS, HttpRequest
|
||||
|
||||
from ..conftest import get_source
|
||||
from .config import ACCOUNTS_FIELDS
|
||||
@@ -81,6 +81,14 @@ class RequestBuilder:
|
||||
self._query_params[param] = value
|
||||
return self
|
||||
|
||||
def with_any_query_params(self) -> RequestBuilder:
|
||||
"""Set query params to ANY_QUERY_PARAMS to match any query parameters.
|
||||
|
||||
This is useful for streams with dynamic query parameters like datetime cursors.
|
||||
"""
|
||||
self._query_params = ANY_QUERY_PARAMS
|
||||
return self
|
||||
|
||||
@staticmethod
|
||||
def _get_formatted_fields(fields: List[str]) -> str:
|
||||
return ",".join(fields)
|
||||
@@ -27,3 +27,19 @@ def get_account_response() -> HttpResponse:
|
||||
"paging": {"cursors": {"before": "before_token"}},
|
||||
}
|
||||
return build_response(body=response, status_code=HTTPStatus.OK)
|
||||
|
||||
|
||||
SECOND_PAGE_ID = "333333333333333"
|
||||
SECOND_BUSINESS_ACCOUNT_ID = "444444444444444"
|
||||
|
||||
|
||||
def get_multiple_accounts_response() -> HttpResponse:
|
||||
"""Return a response with 2 accounts for testing substreams with multiple parent records."""
|
||||
response = {
|
||||
"data": [
|
||||
{"id": PAGE_ID, "name": "AccountName", "instagram_business_account": {"id": BUSINESS_ACCOUNT_ID}},
|
||||
{"id": SECOND_PAGE_ID, "name": "SecondAccount", "instagram_business_account": {"id": SECOND_BUSINESS_ACCOUNT_ID}},
|
||||
],
|
||||
"paging": {"cursors": {"before": "before_token"}},
|
||||
}
|
||||
return build_response(body=response, status_code=HTTPStatus.OK)
|
||||
@@ -67,6 +67,11 @@ class TestFullRefresh(TestCase):
|
||||
|
||||
output = self._read(config_=config())
|
||||
assert len(output.records) == 1
|
||||
# Verify transformations are applied (page_id, business_account_id in account field)
|
||||
record = output.records[0].record.data
|
||||
assert "account" in record
|
||||
assert "page_id" in record["account"]
|
||||
assert "business_account_id" in record["account"]
|
||||
|
||||
@HttpMocker()
|
||||
def test_accounts_with_no_instagram_business_account_field(self, http_mocker: HttpMocker) -> None:
|
||||
@@ -20,7 +20,7 @@ from airbyte_cdk.test.mock_http.response_builder import (
|
||||
from .config import BUSINESS_ACCOUNT_ID, ConfigBuilder
|
||||
from .pagination import NEXT_PAGE_TOKEN, InstagramPaginationStrategy
|
||||
from .request_builder import RequestBuilder, get_account_request
|
||||
from .response_builder import get_account_response
|
||||
from .response_builder import SECOND_BUSINESS_ACCOUNT_ID, get_account_response, get_multiple_accounts_response
|
||||
from .utils import config, read_output
|
||||
|
||||
|
||||
@@ -96,6 +96,13 @@ class TestFullRefresh(TestCase):
|
||||
|
||||
output = self._read(config_=config())
|
||||
assert len(output.records) == 1
|
||||
# Verify transformations are applied
|
||||
record = output.records[0].record.data
|
||||
assert "page_id" in record
|
||||
assert "business_account_id" in record
|
||||
assert "media_insights_info" in record
|
||||
assert record["page_id"] is not None
|
||||
assert record["business_account_id"] is not None
|
||||
|
||||
@HttpMocker()
|
||||
def test_given_multiple_pages_when_read_then_return_records(self, http_mocker: HttpMocker) -> None:
|
||||
@@ -158,3 +165,29 @@ class TestFullRefresh(TestCase):
|
||||
assert "ig_id" in child
|
||||
assert "media_type" in child
|
||||
assert "owner" in child
|
||||
|
||||
@HttpMocker()
|
||||
def test_substream_with_multiple_parent_accounts(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test media stream against 2+ parent accounts per playbook requirements."""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_multiple_accounts_response(),
|
||||
)
|
||||
# Mock media requests for both accounts
|
||||
http_mocker.get(
|
||||
_get_request().build(),
|
||||
_get_response().with_record(_record()).build(),
|
||||
)
|
||||
http_mocker.get(
|
||||
RequestBuilder.get_media_endpoint(item_id=SECOND_BUSINESS_ACCOUNT_ID).with_limit(100).with_fields(_FIELDS).build(),
|
||||
_get_response().with_record(_record()).build(),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# Verify we get records from both accounts
|
||||
assert len(output.records) == 2
|
||||
# Verify transformations on all records
|
||||
for record in output.records:
|
||||
assert "page_id" in record.record.data
|
||||
assert "business_account_id" in record.record.data
|
||||
assert "media_insights_info" in record.record.data
|
||||
@@ -267,9 +267,87 @@ class TestFullRefresh(TestCase):
|
||||
assert output.records[0].record.data["id"]
|
||||
for metric in _METRICS[MEDIA_ID_GENERAL_MEDIA]:
|
||||
assert metric in output.records[0].record.data
|
||||
# For IGNORE handlers, verify no ERROR logs are produced
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_substream_with_multiple_parent_records(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test media_insights substream against 2+ parent records per playbook requirements."""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
# Mock parent stream returning 2 media records (reels and general_media)
|
||||
parent_response = {
|
||||
"data": [
|
||||
{
|
||||
"caption": "a caption",
|
||||
"comments_count": 2,
|
||||
"id": MEDIA_ID_REELS,
|
||||
"ig_id": "3123724930722523505",
|
||||
"is_comment_enabled": True,
|
||||
"like_count": 12,
|
||||
"media_type": "VIDEO",
|
||||
"media_product_type": "REELS",
|
||||
"media_url": "https://fakecontent.com/path/to/content",
|
||||
"owner": {"id": "41408147298757123"},
|
||||
"permalink": "https://instagram.com/permalink/123",
|
||||
"shortcode": "HGagdsy38",
|
||||
"thumbnail_url": "https://fakecontent.cdninstagram.com/v/somepath/",
|
||||
"timestamp": "2023-06-12T19:20:02+0000",
|
||||
"username": "username",
|
||||
},
|
||||
{
|
||||
"caption": "another caption",
|
||||
"comments_count": 0,
|
||||
"id": MEDIA_ID_GENERAL_MEDIA,
|
||||
"ig_id": "2034885879374760912",
|
||||
"is_comment_enabled": True,
|
||||
"like_count": 52,
|
||||
"media_type": "IMAGE",
|
||||
"media_product_type": "FEED",
|
||||
"media_url": "https://fakecontent.com/path/to/content2",
|
||||
"owner": {"id": "41408147298757123"},
|
||||
"permalink": "https://instagram.com/permalink/456",
|
||||
"shortcode": "ABC123",
|
||||
"timestamp": "2019-05-02T11:42:01+0000",
|
||||
"username": "username",
|
||||
},
|
||||
],
|
||||
"paging": {"cursors": {"before": "cursor123"}},
|
||||
}
|
||||
http_mocker.get(
|
||||
_get_parent_request().build(),
|
||||
HttpResponse(json.dumps(parent_response), 200),
|
||||
)
|
||||
|
||||
# Mock child requests for both parent records
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=MEDIA_ID_REELS, metric=_METRICS[MEDIA_ID_REELS]).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{REELS}", __file__)), 200),
|
||||
)
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=MEDIA_ID_GENERAL_MEDIA, metric=_METRICS[MEDIA_ID_GENERAL_MEDIA]).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{GENERAL_MEDIA}", __file__)), 200),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# Verify we get records from both parent records
|
||||
assert len(output.records) == 2
|
||||
record_ids = {r.record.data["id"] for r in output.records}
|
||||
assert MEDIA_ID_REELS in record_ids
|
||||
assert MEDIA_ID_GENERAL_MEDIA in record_ids
|
||||
# Verify transformations on all records
|
||||
for record in output.records:
|
||||
assert record.record.data["page_id"]
|
||||
assert record.record.data["business_account_id"]
|
||||
|
||||
@HttpMocker()
|
||||
def test_instagram_insights_error_posted_before_business(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test that error_subcode 2108006 (posted before business conversion) is gracefully ignored.
|
||||
|
||||
Verifies both error code and error message assertion per playbook requirements.
|
||||
"""
|
||||
test = ERROR_POSTED_BEFORE_BUSINESS
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
@@ -298,9 +376,18 @@ class TestFullRefresh(TestCase):
|
||||
assert output.records[0].record.data["id"]
|
||||
for metric in _METRICS[MEDIA_ID_GENERAL_MEDIA]:
|
||||
assert metric in output.records[0].record.data
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
log_messages = [log.log.message for log in output.logs]
|
||||
assert any(
|
||||
"Insights error for business_account_id" in msg for msg in log_messages
|
||||
), f"Expected 'Insights error for business_account_id' in logs but got: {log_messages}"
|
||||
|
||||
@HttpMocker()
|
||||
def test_instagram_insights_error_with_wrong_permissions(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test that error code 100 with subcode 33 (wrong permissions) is gracefully ignored.
|
||||
|
||||
Verifies both error code and error message assertion per playbook requirements.
|
||||
"""
|
||||
test = ERROR_WITH_WRONG_PERMISSIONS
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
@@ -323,16 +410,24 @@ class TestFullRefresh(TestCase):
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# error was ignored and correct record was processed
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["page_id"]
|
||||
assert output.records[0].record.data["business_account_id"]
|
||||
assert output.records[0].record.data["id"]
|
||||
for metric in _METRICS[MEDIA_ID_GENERAL_MEDIA]:
|
||||
assert metric in output.records[0].record.data
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
log_messages = [log.log.message for log in output.logs]
|
||||
assert any(
|
||||
"Check provided permissions for" in msg for msg in log_messages
|
||||
), f"Expected 'Check provided permissions for' in logs but got: {log_messages}"
|
||||
|
||||
@HttpMocker()
|
||||
def test_instagram_insights_error_with_wrong_permissions_code_10(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test that error code 10 with permission denied message is gracefully ignored.
|
||||
|
||||
Verifies both error code and error message assertion per playbook requirements.
|
||||
"""
|
||||
test = ERROR_WITH_WRONG_PERMISSIONS_CODE_10
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
@@ -355,10 +450,14 @@ class TestFullRefresh(TestCase):
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# error was ignored and correct record was processed
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["page_id"]
|
||||
assert output.records[0].record.data["business_account_id"]
|
||||
assert output.records[0].record.data["id"]
|
||||
for metric in _METRICS[MEDIA_ID_GENERAL_MEDIA]:
|
||||
assert metric in output.records[0].record.data
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
log_messages = [log.log.message for log in output.logs]
|
||||
assert any(
|
||||
"Check provided permissions for" in msg for msg in log_messages
|
||||
), f"Expected 'Check provided permissions for' in logs but got: {log_messages}"
|
||||
@@ -19,7 +19,7 @@ from airbyte_cdk.test.mock_http.response_builder import (
|
||||
from .config import BUSINESS_ACCOUNT_ID, ConfigBuilder
|
||||
from .pagination import NEXT_PAGE_TOKEN, InstagramPaginationStrategy
|
||||
from .request_builder import RequestBuilder, get_account_request
|
||||
from .response_builder import get_account_response
|
||||
from .response_builder import SECOND_BUSINESS_ACCOUNT_ID, get_account_response, get_multiple_accounts_response
|
||||
from .utils import config, read_output
|
||||
|
||||
|
||||
@@ -85,6 +85,12 @@ class TestFullRefresh(TestCase):
|
||||
|
||||
output = self._read(config_=config())
|
||||
assert len(output.records) == 1
|
||||
# Verify transformations are applied (page_id, business_account_id, story_insights_info, timestamp)
|
||||
record = output.records[0].record.data
|
||||
assert "page_id" in record
|
||||
assert "business_account_id" in record
|
||||
assert "story_insights_info" in record
|
||||
assert "timestamp" in record
|
||||
|
||||
@HttpMocker()
|
||||
def test_given_multiple_pages_when_read_then_return_records(self, http_mocker: HttpMocker) -> None:
|
||||
@@ -104,3 +110,29 @@ class TestFullRefresh(TestCase):
|
||||
|
||||
output = self._read(config_=config())
|
||||
assert len(output.records) == 3
|
||||
|
||||
@HttpMocker()
|
||||
def test_substream_with_multiple_parent_accounts(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test stories stream against 2+ parent accounts per playbook requirements."""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_multiple_accounts_response(),
|
||||
)
|
||||
# Mock stories requests for both accounts
|
||||
http_mocker.get(
|
||||
_get_request().build(),
|
||||
_get_response().with_record(_record()).build(),
|
||||
)
|
||||
http_mocker.get(
|
||||
RequestBuilder.get_stories_endpoint(item_id=SECOND_BUSINESS_ACCOUNT_ID).with_limit(100).with_fields(FIELDS).build(),
|
||||
_get_response().with_record(_record()).build(),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# Verify we get records from both accounts
|
||||
assert len(output.records) == 2
|
||||
# Verify transformations on all records
|
||||
for record in output.records:
|
||||
assert "page_id" in record.record.data
|
||||
assert "business_account_id" in record.record.data
|
||||
assert "story_insights_info" in record.record.data
|
||||
@@ -0,0 +1,284 @@
|
||||
#
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
#
|
||||
import json
|
||||
from unittest import TestCase
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.mock_http.response_builder import (
|
||||
FieldPath,
|
||||
HttpResponseBuilder,
|
||||
RecordBuilder,
|
||||
create_record_builder,
|
||||
create_response_builder,
|
||||
find_template,
|
||||
)
|
||||
|
||||
from .config import BUSINESS_ACCOUNT_ID, ConfigBuilder
|
||||
from .pagination import NEXT_PAGE_TOKEN, InstagramPaginationStrategy
|
||||
from .request_builder import RequestBuilder, get_account_request
|
||||
from .response_builder import get_account_response
|
||||
from .utils import config, read_output
|
||||
|
||||
|
||||
PARENT_FIELDS = [
|
||||
"caption",
|
||||
"id",
|
||||
"ig_id",
|
||||
"like_count",
|
||||
"media_type",
|
||||
"media_product_type",
|
||||
"media_url",
|
||||
"owner",
|
||||
"permalink",
|
||||
"shortcode",
|
||||
"thumbnail_url",
|
||||
"timestamp",
|
||||
"username",
|
||||
]
|
||||
_PARENT_STREAM_NAME = "stories"
|
||||
_STREAM_NAME = "story_insights"
|
||||
|
||||
STORIES_ID = "3874523487643"
|
||||
STORIES_ID_ERROR_CODE_10 = "3874523487644"
|
||||
|
||||
HAPPY_PATH = "story_insights_happy_path"
|
||||
ERROR_10 = "story_insights_error_code_10"
|
||||
|
||||
_METRICS = ["reach", "replies", "follows", "profile_visits", "shares", "total_interactions"]
|
||||
|
||||
|
||||
def _get_parent_request() -> RequestBuilder:
|
||||
return RequestBuilder.get_stories_endpoint(item_id=BUSINESS_ACCOUNT_ID).with_limit(100).with_fields(PARENT_FIELDS)
|
||||
|
||||
|
||||
def _get_child_request(media_id, metric) -> RequestBuilder:
|
||||
return RequestBuilder.get_media_insights_endpoint(item_id=media_id).with_custom_param("metric", metric, with_format=True)
|
||||
|
||||
|
||||
def _get_response(stream_name: str, test: str = None, with_pagination_strategy: bool = True) -> HttpResponseBuilder:
|
||||
scenario = ""
|
||||
if test:
|
||||
scenario = f"_for_{test}"
|
||||
kwargs = {
|
||||
"response_template": find_template(f"{stream_name}{scenario}", __file__),
|
||||
"records_path": FieldPath("data"),
|
||||
"pagination_strategy": InstagramPaginationStrategy(request=_get_parent_request().build(), next_page_token=NEXT_PAGE_TOKEN),
|
||||
}
|
||||
if with_pagination_strategy:
|
||||
kwargs["pagination_strategy"] = InstagramPaginationStrategy(request=_get_parent_request().build(), next_page_token=NEXT_PAGE_TOKEN)
|
||||
|
||||
return create_response_builder(**kwargs)
|
||||
|
||||
|
||||
def _record(stream_name: str, test: str = None) -> RecordBuilder:
|
||||
scenario = ""
|
||||
if test:
|
||||
scenario = f"_for_{test}"
|
||||
return create_record_builder(
|
||||
response_template=find_template(f"{stream_name}{scenario}", __file__),
|
||||
records_path=FieldPath("data"),
|
||||
record_id_path=FieldPath("id"),
|
||||
)
|
||||
|
||||
|
||||
class TestFullRefresh(TestCase):
|
||||
@staticmethod
|
||||
def _read(config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput:
|
||||
return read_output(
|
||||
config_builder=config_,
|
||||
stream_name=_STREAM_NAME,
|
||||
sync_mode=SyncMode.full_refresh,
|
||||
expecting_exception=expecting_exception,
|
||||
)
|
||||
|
||||
@HttpMocker()
|
||||
def test_instagram_story_insights(self, http_mocker: HttpMocker) -> None:
|
||||
test = HAPPY_PATH
|
||||
# Mocking API stream
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
# Mocking parent stream
|
||||
http_mocker.get(
|
||||
_get_parent_request().build(),
|
||||
_get_response(stream_name=_PARENT_STREAM_NAME, test=test)
|
||||
.with_record(_record(stream_name=_PARENT_STREAM_NAME, test=test))
|
||||
.build(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=STORIES_ID, metric=_METRICS).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{test}", __file__)), 200),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["page_id"]
|
||||
assert output.records[0].record.data["business_account_id"]
|
||||
assert output.records[0].record.data["id"]
|
||||
for metric in _METRICS:
|
||||
assert metric in output.records[0].record.data
|
||||
|
||||
@HttpMocker()
|
||||
def test_instagram_story_insights_for_error_code_30(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test that error code 10 is gracefully ignored.
|
||||
|
||||
Verifies both error code and error message assertion per playbook requirements.
|
||||
"""
|
||||
test = ERROR_10
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
# Mocking parent stream
|
||||
http_mocker.get(
|
||||
_get_parent_request().build(), HttpResponse(json.dumps(find_template(f"{_PARENT_STREAM_NAME}_for_{test}", __file__)), 200)
|
||||
)
|
||||
# Good response
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=STORIES_ID, metric=_METRICS).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{HAPPY_PATH}", __file__)), 200),
|
||||
)
|
||||
# error 10
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=STORIES_ID_ERROR_CODE_10, metric=_METRICS).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{test}", __file__)), 400),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["page_id"]
|
||||
assert output.records[0].record.data["business_account_id"]
|
||||
assert output.records[0].record.data["id"]
|
||||
for metric in _METRICS:
|
||||
assert metric in output.records[0].record.data
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
log_messages = [log.log.message for log in output.logs]
|
||||
assert any("Insights error" in msg for msg in log_messages), f"Expected 'Insights error' in logs but got: {log_messages}"
|
||||
|
||||
@HttpMocker()
|
||||
def test_substream_with_multiple_parent_records(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test story_insights substream against 2+ parent records per playbook requirements."""
|
||||
STORIES_ID_2 = "3874523487645"
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
# Mock parent stream returning 2 story records
|
||||
parent_response = {
|
||||
"data": [
|
||||
{
|
||||
"id": STORIES_ID,
|
||||
"ig_id": "ig_id_1",
|
||||
"like_count": 0,
|
||||
"media_type": "VIDEO",
|
||||
"media_product_type": "STORY",
|
||||
"media_url": "https://fakecontent.cdninstagram.com/path1/path2/some_value",
|
||||
"owner": {"id": "owner_id"},
|
||||
"permalink": "https://placeholder.com/stories/username/some_id_value",
|
||||
"shortcode": "ERUY34867_3",
|
||||
"thumbnail_url": "https://content.cdnfaker.com/path1/path2/some_value",
|
||||
"timestamp": "2024-06-17T19:39:18+0000",
|
||||
"username": "username",
|
||||
},
|
||||
{
|
||||
"id": STORIES_ID_2,
|
||||
"ig_id": "ig_id_2",
|
||||
"like_count": 5,
|
||||
"media_type": "IMAGE",
|
||||
"media_product_type": "STORY",
|
||||
"media_url": "https://fakecontent.cdninstagram.com/path1/path2/another_value",
|
||||
"owner": {"id": "owner_id"},
|
||||
"permalink": "https://placeholder.com/stories/username/another_id_value",
|
||||
"shortcode": "XYZ98765_4",
|
||||
"thumbnail_url": "https://content.cdnfaker.com/path1/path2/another_value",
|
||||
"timestamp": "2024-06-18T10:15:30+0000",
|
||||
"username": "username",
|
||||
},
|
||||
],
|
||||
"paging": {"cursors": {"before": "cursor123"}},
|
||||
}
|
||||
http_mocker.get(
|
||||
_get_parent_request().build(),
|
||||
HttpResponse(json.dumps(parent_response), 200),
|
||||
)
|
||||
|
||||
# Mock child requests for both parent records
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=STORIES_ID, metric=_METRICS).build(),
|
||||
HttpResponse(json.dumps(find_template(f"{_STREAM_NAME}_for_{HAPPY_PATH}", __file__)), 200),
|
||||
)
|
||||
# Build response for second story with different ID
|
||||
story_insights_response_2 = {
|
||||
"data": [
|
||||
{
|
||||
"name": "reach",
|
||||
"period": "lifetime",
|
||||
"values": [{"value": 150}],
|
||||
"title": "Reach",
|
||||
"description": "desc",
|
||||
"id": f"{STORIES_ID_2}/insights/reach/lifetime",
|
||||
},
|
||||
{
|
||||
"name": "replies",
|
||||
"period": "lifetime",
|
||||
"values": [{"value": 3}],
|
||||
"title": "Replies",
|
||||
"description": "desc",
|
||||
"id": f"{STORIES_ID_2}/insights/replies/lifetime",
|
||||
},
|
||||
{
|
||||
"name": "follows",
|
||||
"period": "lifetime",
|
||||
"values": [{"value": 2}],
|
||||
"title": "Follows",
|
||||
"description": "desc",
|
||||
"id": f"{STORIES_ID_2}/insights/follows/lifetime",
|
||||
},
|
||||
{
|
||||
"name": "profile_visits",
|
||||
"period": "lifetime",
|
||||
"values": [{"value": 10}],
|
||||
"title": "Profile Visits",
|
||||
"description": "desc",
|
||||
"id": f"{STORIES_ID_2}/insights/profile_visits/lifetime",
|
||||
},
|
||||
{
|
||||
"name": "shares",
|
||||
"period": "lifetime",
|
||||
"values": [{"value": 1}],
|
||||
"title": "Shares",
|
||||
"description": "desc",
|
||||
"id": f"{STORIES_ID_2}/insights/shares/lifetime",
|
||||
},
|
||||
{
|
||||
"name": "total_interactions",
|
||||
"period": "lifetime",
|
||||
"values": [{"value": 16}],
|
||||
"title": "Total Interactions",
|
||||
"description": "desc",
|
||||
"id": f"{STORIES_ID_2}/insights/total_interactions/lifetime",
|
||||
},
|
||||
]
|
||||
}
|
||||
http_mocker.get(
|
||||
_get_child_request(media_id=STORIES_ID_2, metric=_METRICS).build(),
|
||||
HttpResponse(json.dumps(story_insights_response_2), 200),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# Verify we get records from both parent records
|
||||
assert len(output.records) == 2
|
||||
record_ids = {r.record.data["id"] for r in output.records}
|
||||
assert STORIES_ID in record_ids
|
||||
assert STORIES_ID_2 in record_ids
|
||||
# Verify transformations on all records
|
||||
for record in output.records:
|
||||
assert record.record.data["page_id"]
|
||||
assert record.record.data["business_account_id"]
|
||||
for metric in _METRICS:
|
||||
assert metric in record.record.data
|
||||
@@ -0,0 +1,400 @@
|
||||
#
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
#
|
||||
|
||||
import json
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
|
||||
from .config import BUSINESS_ACCOUNT_ID, PAGE_ID, ConfigBuilder
|
||||
from .request_builder import RequestBuilder, get_account_request
|
||||
from .response_builder import SECOND_BUSINESS_ACCOUNT_ID, SECOND_PAGE_ID, get_account_response, get_multiple_accounts_response
|
||||
from .utils import read_output
|
||||
|
||||
|
||||
_STREAM_NAME = "user_insights"
|
||||
|
||||
_FROZEN_TIME = "2024-01-15T12:00:00Z"
|
||||
|
||||
|
||||
def _get_user_insights_request_any_params(business_account_id: str) -> RequestBuilder:
|
||||
"""Create a request builder for user_insights with any query params.
|
||||
|
||||
The user_insights stream uses DatetimeBasedCursor with step P1D and QueryProperties
|
||||
with 4 chunks (day/follower_count,reach; week/reach; days_28/reach; lifetime/online_followers).
|
||||
This creates multiple time slices and query property combinations.
|
||||
Using with_any_query_params() allows matching all these requests when the exact
|
||||
parameters are not predictable or when testing behavior that doesn't depend on
|
||||
specific request parameters.
|
||||
"""
|
||||
return RequestBuilder.get_user_lifetime_insights_endpoint(item_id=business_account_id).with_any_query_params()
|
||||
|
||||
|
||||
def _get_user_insights_request_with_params(business_account_id: str, since: str, until: str, period: str, metric: str) -> RequestBuilder:
|
||||
"""Create a request builder for user_insights with specific query params."""
|
||||
return (
|
||||
RequestBuilder.get_user_lifetime_insights_endpoint(item_id=business_account_id)
|
||||
.with_custom_param("since", since)
|
||||
.with_custom_param("until", until)
|
||||
.with_custom_param("period", period)
|
||||
.with_custom_param("metric", metric)
|
||||
)
|
||||
|
||||
|
||||
def _build_user_insights_response() -> HttpResponse:
|
||||
"""Build a successful user_insights response inline."""
|
||||
body = {
|
||||
"data": [
|
||||
{
|
||||
"name": "follower_count",
|
||||
"period": "day",
|
||||
"values": [{"value": 1000, "end_time": "2024-01-15T07:00:00+0000"}],
|
||||
"title": "Follower Count",
|
||||
"description": "Total number of followers",
|
||||
"id": f"{BUSINESS_ACCOUNT_ID}/insights/follower_count/day",
|
||||
},
|
||||
{
|
||||
"name": "reach",
|
||||
"period": "day",
|
||||
"values": [{"value": 500, "end_time": "2024-01-15T07:00:00+0000"}],
|
||||
"title": "Reach",
|
||||
"description": "Total reach",
|
||||
"id": f"{BUSINESS_ACCOUNT_ID}/insights/reach/day",
|
||||
},
|
||||
]
|
||||
}
|
||||
return HttpResponse(json.dumps(body), 200)
|
||||
|
||||
|
||||
def _build_error_response(code: int, message: str, error_subcode: int = None) -> HttpResponse:
|
||||
"""Build an error response inline.
|
||||
|
||||
Args:
|
||||
code: The error code (e.g., 100, 10)
|
||||
message: The error message
|
||||
error_subcode: Optional error subcode (e.g., 2108006, 33)
|
||||
"""
|
||||
error = {
|
||||
"message": message,
|
||||
"type": "OAuthException",
|
||||
"code": code,
|
||||
"fbtrace_id": "ABC123",
|
||||
}
|
||||
if error_subcode is not None:
|
||||
error["error_subcode"] = error_subcode
|
||||
return HttpResponse(json.dumps({"error": error}), 400)
|
||||
|
||||
|
||||
class TestFullRefresh(TestCase):
|
||||
@staticmethod
|
||||
def _read(config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput:
|
||||
return read_output(
|
||||
config_builder=config_,
|
||||
stream_name=_STREAM_NAME,
|
||||
sync_mode=SyncMode.full_refresh,
|
||||
expecting_exception=expecting_exception,
|
||||
)
|
||||
|
||||
@HttpMocker()
|
||||
@freezegun.freeze_time(_FROZEN_TIME)
|
||||
def test_read_records_full_refresh(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test full refresh sync for user_insights stream.
|
||||
|
||||
The user_insights stream uses DatetimeBasedCursor with step P1D and QueryProperties
|
||||
with multiple chunks. We set start_date close to frozen time to minimize time slices.
|
||||
Using with_any_query_params() because the stream makes multiple requests with different
|
||||
period/metric combinations that are determined by the QueryProperties configuration.
|
||||
"""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_any_params(BUSINESS_ACCOUNT_ID).build(),
|
||||
_build_user_insights_response(),
|
||||
)
|
||||
|
||||
test_config = ConfigBuilder().with_start_date("2024-01-15T00:00:00Z")
|
||||
output = self._read(config_=test_config)
|
||||
assert len(output.records) == 1
|
||||
record = output.records[0].record.data
|
||||
assert record.get("page_id") == PAGE_ID
|
||||
assert record.get("business_account_id") == BUSINESS_ACCOUNT_ID
|
||||
|
||||
@HttpMocker()
|
||||
@freezegun.freeze_time(_FROZEN_TIME)
|
||||
def test_substream_with_multiple_parent_accounts(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test user_insights stream against 2+ parent accounts per playbook requirements.
|
||||
|
||||
This test verifies that the stream correctly processes data from multiple parent accounts
|
||||
and applies transformations (page_id, business_account_id) to records from each account.
|
||||
"""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_multiple_accounts_response(),
|
||||
)
|
||||
|
||||
# Mock user_insights requests for both accounts
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_any_params(BUSINESS_ACCOUNT_ID).build(),
|
||||
_build_user_insights_response(),
|
||||
)
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_any_params(SECOND_BUSINESS_ACCOUNT_ID).build(),
|
||||
_build_user_insights_response(),
|
||||
)
|
||||
|
||||
test_config = ConfigBuilder().with_start_date("2024-01-15T00:00:00Z")
|
||||
output = self._read(config_=test_config)
|
||||
|
||||
# Verify we get records from both accounts
|
||||
assert len(output.records) == 2
|
||||
|
||||
# Verify transformations on all records
|
||||
business_account_ids = {record.record.data.get("business_account_id") for record in output.records}
|
||||
assert BUSINESS_ACCOUNT_ID in business_account_ids
|
||||
assert SECOND_BUSINESS_ACCOUNT_ID in business_account_ids
|
||||
|
||||
for record in output.records:
|
||||
assert "page_id" in record.record.data
|
||||
assert record.record.data["page_id"] is not None
|
||||
assert "business_account_id" in record.record.data
|
||||
assert record.record.data["business_account_id"] is not None
|
||||
|
||||
|
||||
class TestIncremental(TestCase):
|
||||
@staticmethod
|
||||
def _read(
|
||||
config_: ConfigBuilder,
|
||||
state: list = None,
|
||||
expecting_exception: bool = False,
|
||||
) -> EntrypointOutput:
|
||||
return read_output(
|
||||
config_builder=config_,
|
||||
stream_name=_STREAM_NAME,
|
||||
sync_mode=SyncMode.incremental,
|
||||
state=state,
|
||||
expecting_exception=expecting_exception,
|
||||
)
|
||||
|
||||
@HttpMocker()
|
||||
@freezegun.freeze_time(_FROZEN_TIME)
|
||||
def test_incremental_sync_first_sync_no_state(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test incremental sync with no prior state (first sync).
|
||||
|
||||
Using with_any_query_params() because without prior state, the stream starts from
|
||||
start_date and creates multiple time slices with different period/metric combinations.
|
||||
"""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_any_params(BUSINESS_ACCOUNT_ID).build(),
|
||||
_build_user_insights_response(),
|
||||
)
|
||||
|
||||
test_config = ConfigBuilder().with_start_date("2024-01-15T00:00:00Z")
|
||||
output = self._read(config_=test_config)
|
||||
assert len(output.records) == 1
|
||||
assert len(output.state_messages) >= 1
|
||||
|
||||
@HttpMocker()
|
||||
@freezegun.freeze_time(_FROZEN_TIME)
|
||||
def test_incremental_sync_with_prior_state(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test incremental sync with prior state (subsequent sync).
|
||||
|
||||
With prior state at 2024-01-15T00:00:00+00:00 and frozen time at 2024-01-15T12:00:00Z,
|
||||
the stream should request data with since=2024-01-15T00:00:00Z.
|
||||
We verify the outbound request includes the expected since parameter derived from state
|
||||
by mocking specific query params for each QueryProperties chunk.
|
||||
|
||||
The DatetimeBasedCursor uses the state value as the starting point, and the frozen time
|
||||
determines the end datetime. With step P1D, there's only one time slice from state to now.
|
||||
"""
|
||||
prior_state_value = "2024-01-15T00:00:00+00:00"
|
||||
# Expected since value derived from state - the API uses the state value format directly
|
||||
expected_since = "2024-01-15T00:00:00+00:00"
|
||||
# Expected until value is the frozen time (in the same format as the API expects)
|
||||
expected_until = "2024-01-15T12:00:00+00:00"
|
||||
|
||||
state = (
|
||||
StateBuilder()
|
||||
.with_stream_state(
|
||||
_STREAM_NAME,
|
||||
{
|
||||
"states": [
|
||||
{
|
||||
"partition": {"business_account_id": BUSINESS_ACCOUNT_ID},
|
||||
"cursor": {"date": prior_state_value},
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
.build()
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
|
||||
# Mock each QueryProperties chunk with specific params to validate the since parameter
|
||||
# Chunk 1: period=day, metric=follower_count,reach
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_with_params(
|
||||
BUSINESS_ACCOUNT_ID, since=expected_since, until=expected_until, period="day", metric="follower_count,reach"
|
||||
).build(),
|
||||
_build_user_insights_response(),
|
||||
)
|
||||
# Chunk 2: period=week, metric=reach
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_with_params(
|
||||
BUSINESS_ACCOUNT_ID, since=expected_since, until=expected_until, period="week", metric="reach"
|
||||
).build(),
|
||||
_build_user_insights_response(),
|
||||
)
|
||||
# Chunk 3: period=days_28, metric=reach
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_with_params(
|
||||
BUSINESS_ACCOUNT_ID, since=expected_since, until=expected_until, period="days_28", metric="reach"
|
||||
).build(),
|
||||
_build_user_insights_response(),
|
||||
)
|
||||
# Chunk 4: period=lifetime, metric=online_followers
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_with_params(
|
||||
BUSINESS_ACCOUNT_ID, since=expected_since, until=expected_until, period="lifetime", metric="online_followers"
|
||||
).build(),
|
||||
_build_user_insights_response(),
|
||||
)
|
||||
|
||||
test_config = ConfigBuilder().with_start_date("2024-01-14T00:00:00Z")
|
||||
output = self._read(config_=test_config, state=state)
|
||||
|
||||
# With specific mocks for each chunk, we can now assert exact record count
|
||||
# The merge strategy groups by date, and all chunks return the same date (2024-01-15T07:00:00+0000)
|
||||
# so records should be merged into 1 record
|
||||
assert len(output.records) == 1
|
||||
assert len(output.state_messages) >= 1
|
||||
|
||||
# Verify the record has the expected business_account_id
|
||||
record = output.records[0].record.data
|
||||
assert record.get("business_account_id") == BUSINESS_ACCOUNT_ID
|
||||
|
||||
# Verify the record date matches the expected date from our response
|
||||
# Note: The date is normalized to RFC 3339 format (+00:00) by the schema normalization
|
||||
assert record.get("date") == "2024-01-15T07:00:00+00:00"
|
||||
|
||||
|
||||
class TestErrorHandling(TestCase):
|
||||
"""Test error handling for user_insights stream.
|
||||
|
||||
The user_insights stream has IGNORE error handlers for:
|
||||
- error_subcode 2108006: "Insights error for business_account_id: {message}"
|
||||
- code 100 with error_subcode 33: "Check provided permissions for: {message}"
|
||||
- code 10 with specific permission message: "Check provided permissions for: {message}"
|
||||
|
||||
For IGNORE handlers, we verify:
|
||||
1. No ERROR logs are produced
|
||||
2. The configured error_message appears in logs (proving the handler was triggered)
|
||||
3. Zero records are returned (graceful handling)
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _read(config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput:
|
||||
return read_output(
|
||||
config_builder=config_,
|
||||
stream_name=_STREAM_NAME,
|
||||
sync_mode=SyncMode.full_refresh,
|
||||
expecting_exception=expecting_exception,
|
||||
)
|
||||
|
||||
@HttpMocker()
|
||||
@freezegun.freeze_time(_FROZEN_TIME)
|
||||
def test_error_subcode_2108006_is_ignored(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test that error_subcode 2108006 is gracefully ignored.
|
||||
|
||||
Verifies both error code and error message assertion per playbook requirements.
|
||||
"""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
|
||||
error_message = "Invalid parameter"
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_any_params(BUSINESS_ACCOUNT_ID).build(),
|
||||
_build_error_response(code=100, message=error_message, error_subcode=2108006),
|
||||
)
|
||||
|
||||
test_config = ConfigBuilder().with_start_date("2024-01-15T00:00:00Z")
|
||||
output = self._read(config_=test_config)
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
log_messages = [log.log.message for log in output.logs]
|
||||
assert any(
|
||||
"Insights error for business_account_id" in msg for msg in log_messages
|
||||
), f"Expected 'Insights error for business_account_id' in logs but got: {log_messages}"
|
||||
|
||||
@HttpMocker()
|
||||
@freezegun.freeze_time(_FROZEN_TIME)
|
||||
def test_error_code_100_subcode_33_is_ignored(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test that error code 100 with subcode 33 is gracefully ignored.
|
||||
|
||||
Verifies both error code and error message assertion per playbook requirements.
|
||||
"""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
|
||||
error_message = "Unsupported get request"
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_any_params(BUSINESS_ACCOUNT_ID).build(),
|
||||
_build_error_response(code=100, message=error_message, error_subcode=33),
|
||||
)
|
||||
|
||||
test_config = ConfigBuilder().with_start_date("2024-01-15T00:00:00Z")
|
||||
output = self._read(config_=test_config)
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
log_messages = [log.log.message for log in output.logs]
|
||||
assert any(
|
||||
"Check provided permissions for" in msg for msg in log_messages
|
||||
), f"Expected 'Check provided permissions for' in logs but got: {log_messages}"
|
||||
|
||||
@HttpMocker()
|
||||
@freezegun.freeze_time(_FROZEN_TIME)
|
||||
def test_error_code_10_permission_denied_is_ignored(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test that error code 10 with permission denied message is gracefully ignored.
|
||||
|
||||
Verifies both error code and error message assertion per playbook requirements.
|
||||
"""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_account_response(),
|
||||
)
|
||||
|
||||
error_message = "(#10) Application does not have permission for this action"
|
||||
http_mocker.get(
|
||||
_get_user_insights_request_any_params(BUSINESS_ACCOUNT_ID).build(),
|
||||
_build_error_response(code=10, message=error_message),
|
||||
)
|
||||
|
||||
test_config = ConfigBuilder().with_start_date("2024-01-15T00:00:00Z")
|
||||
output = self._read(config_=test_config)
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
log_messages = [log.log.message for log in output.logs]
|
||||
assert any(
|
||||
"Check provided permissions for" in msg for msg in log_messages
|
||||
), f"Expected 'Check provided permissions for' in logs but got: {log_messages}"
|
||||
@@ -18,7 +18,7 @@ from airbyte_cdk.test.mock_http.response_builder import (
|
||||
|
||||
from .config import BUSINESS_ACCOUNT_ID, ConfigBuilder
|
||||
from .request_builder import RequestBuilder, get_account_request
|
||||
from .response_builder import get_account_response
|
||||
from .response_builder import SECOND_BUSINESS_ACCOUNT_ID, get_account_response, get_multiple_accounts_response
|
||||
from .utils import config, read_output
|
||||
|
||||
|
||||
@@ -79,3 +79,47 @@ class TestFullRefresh(TestCase):
|
||||
output = self._read(config_=config())
|
||||
# each breakdown should produce a record
|
||||
assert len(output.records) == 3
|
||||
# Verify transformation: breakdown, page_id, business_account_id, and metric fields are added
|
||||
for record in output.records:
|
||||
assert "breakdown" in record.record.data
|
||||
assert "page_id" in record.record.data
|
||||
assert "business_account_id" in record.record.data
|
||||
assert "metric" in record.record.data
|
||||
assert record.record.data["page_id"] is not None
|
||||
assert record.record.data["business_account_id"] is not None
|
||||
|
||||
@HttpMocker()
|
||||
def test_substream_with_multiple_parent_accounts(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test user_lifetime_insights stream against 2+ parent accounts per playbook requirements."""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_multiple_accounts_response(),
|
||||
)
|
||||
# Mock requests for both accounts (each account has 3 breakdowns)
|
||||
for breakdown in ["city", "country", "age,gender"]:
|
||||
# First account
|
||||
http_mocker.get(
|
||||
_get_request().with_custom_param("breakdown", breakdown).build(),
|
||||
_get_response().with_record(_record()).build(),
|
||||
)
|
||||
# Second account
|
||||
http_mocker.get(
|
||||
RequestBuilder.get_user_lifetime_insights_endpoint(item_id=SECOND_BUSINESS_ACCOUNT_ID)
|
||||
.with_custom_param("metric", "follower_demographics")
|
||||
.with_custom_param("period", "lifetime")
|
||||
.with_custom_param("metric_type", "total_value")
|
||||
.with_limit(100)
|
||||
.with_custom_param("breakdown", breakdown)
|
||||
.build(),
|
||||
_get_response().with_record(_record()).build(),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# 2 accounts × 3 breakdowns = 6 records
|
||||
assert len(output.records) == 6
|
||||
# Verify transformations on all records
|
||||
for record in output.records:
|
||||
assert "breakdown" in record.record.data
|
||||
assert "page_id" in record.record.data
|
||||
assert "business_account_id" in record.record.data
|
||||
assert "metric" in record.record.data
|
||||
@@ -18,7 +18,7 @@ from airbyte_cdk.test.mock_http.response_builder import (
|
||||
|
||||
from .config import BUSINESS_ACCOUNT_ID, ConfigBuilder
|
||||
from .request_builder import RequestBuilder, get_account_request
|
||||
from .response_builder import get_account_response
|
||||
from .response_builder import SECOND_BUSINESS_ACCOUNT_ID, get_account_response, get_multiple_accounts_response
|
||||
from .utils import config, read_output
|
||||
|
||||
|
||||
@@ -80,3 +80,31 @@ class TestFullRefresh(TestCase):
|
||||
|
||||
output = self._read(config_=config())
|
||||
assert len(output.records) == 1
|
||||
# Verify transformation: page_id field is added from partition
|
||||
assert "page_id" in output.records[0].record.data
|
||||
assert output.records[0].record.data["page_id"] is not None
|
||||
|
||||
@HttpMocker()
|
||||
def test_substream_with_multiple_parent_accounts(self, http_mocker: HttpMocker) -> None:
|
||||
"""Test users stream against 2+ parent accounts per playbook requirements."""
|
||||
http_mocker.get(
|
||||
get_account_request().build(),
|
||||
get_multiple_accounts_response(),
|
||||
)
|
||||
# Mock users requests for both accounts
|
||||
http_mocker.get(
|
||||
_get_request().build(),
|
||||
_get_response().with_record(_record()).build(),
|
||||
)
|
||||
http_mocker.get(
|
||||
RequestBuilder.get_users_endpoint(item_id=SECOND_BUSINESS_ACCOUNT_ID).with_fields(_FIELDS).build(),
|
||||
_get_response().with_record(_record()).build(),
|
||||
)
|
||||
|
||||
output = self._read(config_=config())
|
||||
# Verify we get records from both accounts
|
||||
assert len(output.records) == 2
|
||||
# Verify transformations on all records
|
||||
for record in output.records:
|
||||
assert "page_id" in record.record.data
|
||||
assert record.record.data["page_id"] is not None
|
||||
@@ -30,4 +30,4 @@ def read_output(
|
||||
) -> EntrypointOutput:
|
||||
_catalog = catalog(stream_name, sync_mode)
|
||||
_config = config_builder.build()
|
||||
return read(get_source(config=_config), _config, _catalog, state, expecting_exception)
|
||||
return read(get_source(config=_config, state=state), _config, _catalog, state, expecting_exception)
|
||||
@@ -13,6 +13,8 @@ from responses import matchers
|
||||
|
||||
from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource
|
||||
from airbyte_cdk.sources.streams import Stream
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
|
||||
|
||||
pytest_plugins = ["airbyte_cdk.test.utils.manifest_only_fixtures"]
|
||||
@@ -34,6 +36,17 @@ _YAML_FILE_PATH = _SOURCE_FOLDER_PATH / "manifest.yaml"
|
||||
sys.path.append(str(_SOURCE_FOLDER_PATH)) # to allow loading custom components
|
||||
|
||||
|
||||
def get_source(config, state=None) -> YamlDeclarativeSource:
|
||||
"""
|
||||
Create a YamlDeclarativeSource instance for testing.
|
||||
|
||||
This is the main entry point for running your connector in tests.
|
||||
"""
|
||||
catalog = CatalogBuilder().build()
|
||||
state = StateBuilder().build() if not state else state
|
||||
return YamlDeclarativeSource(path_to_yaml=str(_YAML_FILE_PATH), catalog=catalog, config=config, state=state)
|
||||
|
||||
|
||||
def delete_cache_files(cache_directory):
|
||||
directory_path = Path(cache_directory)
|
||||
if directory_path.exists() and directory_path.is_dir():
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
class ConfigBuilder:
|
||||
def __init__(self) -> None:
|
||||
self._config: Dict[str, Any] = {
|
||||
"api_token": "any_api_token",
|
||||
"domain": "airbyteio.atlassian.net",
|
||||
"email": "integration-test@airbyte.io",
|
||||
"start_date": "2021-01-01T00:00:00Z",
|
||||
"projects": [],
|
||||
}
|
||||
|
||||
def with_api_token(self, api_token: str) -> "ConfigBuilder":
|
||||
self._config["api_token"] = api_token
|
||||
return self
|
||||
|
||||
def with_domain(self, domain: str) -> "ConfigBuilder":
|
||||
self._config["domain"] = domain
|
||||
return self
|
||||
|
||||
def with_start_date(self, start_datetime: datetime) -> "ConfigBuilder":
|
||||
self._config["start_date"] = start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
return self
|
||||
|
||||
def with_projects(self, projects: List[str]) -> "ConfigBuilder":
|
||||
self._config["projects"] = projects
|
||||
return self
|
||||
|
||||
def build(self) -> Dict[str, Any]:
|
||||
return self._config
|
||||
@@ -1,95 +0,0 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Dict
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import _YAML_FILE_PATH
|
||||
|
||||
from airbyte_cdk.models import ConfiguredAirbyteCatalog, SyncMode
|
||||
from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest
|
||||
from airbyte_cdk.test.mock_http.response_builder import (
|
||||
FieldPath,
|
||||
HttpResponseBuilder,
|
||||
RecordBuilder,
|
||||
create_record_builder,
|
||||
create_response_builder,
|
||||
)
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from integration.config import ConfigBuilder
|
||||
|
||||
|
||||
_STREAM_NAME = "issues"
|
||||
_API_TOKEN = "api_token"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
_NOW = datetime(2024, 1, 1, tzinfo=timezone.utc)
|
||||
|
||||
|
||||
def _create_config() -> ConfigBuilder:
|
||||
return ConfigBuilder().with_api_token(_API_TOKEN).with_domain(_DOMAIN)
|
||||
|
||||
|
||||
def _create_catalog(sync_mode: SyncMode = SyncMode.full_refresh) -> ConfiguredAirbyteCatalog:
|
||||
return CatalogBuilder().with_stream(name="issues", sync_mode=sync_mode).build()
|
||||
|
||||
|
||||
def _response_template() -> Dict[str, Any]:
|
||||
with open(os.path.join(os.path.dirname(__file__), "..", "responses", "issues.json")) as response_file_handler:
|
||||
return json.load(response_file_handler)
|
||||
|
||||
|
||||
def _create_response() -> HttpResponseBuilder:
|
||||
return create_response_builder(
|
||||
response_template=_response_template(),
|
||||
records_path=FieldPath("issues"),
|
||||
)
|
||||
|
||||
|
||||
def _create_record() -> RecordBuilder:
|
||||
return create_record_builder(
|
||||
_response_template(), FieldPath("issues"), record_id_path=FieldPath("id"), record_cursor_path=FieldPath("updated")
|
||||
)
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class IssuesTest(TestCase):
|
||||
@HttpMocker()
|
||||
def test_given_timezone_in_state_when_read_consider_timezone(self, http_mocker: HttpMocker) -> None:
|
||||
config = _create_config().build()
|
||||
datetime_with_timezone = "2023-11-01T00:00:00.000-0800"
|
||||
timestamp_with_timezone = 1698825600000
|
||||
state = (
|
||||
StateBuilder()
|
||||
.with_stream_state(
|
||||
"issues",
|
||||
{
|
||||
"use_global_cursor": False,
|
||||
"state": {"updated": datetime_with_timezone},
|
||||
"lookback_window": 2,
|
||||
"states": [{"partition": {}, "cursor": {"updated": datetime_with_timezone}}],
|
||||
},
|
||||
)
|
||||
.build()
|
||||
)
|
||||
http_mocker.get(
|
||||
HttpRequest(
|
||||
f"https://{_DOMAIN}/rest/api/3/search/jql",
|
||||
{
|
||||
"fields": "*all",
|
||||
"jql": f"updated >= {timestamp_with_timezone} ORDER BY updated asc",
|
||||
"expand": "renderedFields,transitions,changelog",
|
||||
"maxResults": "50",
|
||||
},
|
||||
),
|
||||
_create_response().with_record(_create_record()).with_record(_create_record()).build(),
|
||||
)
|
||||
|
||||
source = YamlDeclarativeSource(config=config, catalog=_create_catalog(), state=state, path_to_yaml=str(_YAML_FILE_PATH))
|
||||
actual_messages = read(source, config=config, catalog=_create_catalog(), state=state)
|
||||
|
||||
assert len(actual_messages.records) == 2
|
||||
@@ -0,0 +1,82 @@
|
||||
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
class ConfigBuilder:
|
||||
"""
|
||||
Builder for creating Jira connector configurations for tests.
|
||||
|
||||
Example usage:
|
||||
config = (
|
||||
ConfigBuilder()
|
||||
.with_domain("mycompany.atlassian.net")
|
||||
.with_api_token("test_token")
|
||||
.with_projects(["PROJ1", "PROJ2"])
|
||||
.build()
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._config: Dict[str, Any] = {
|
||||
"api_token": "any_api_token",
|
||||
"domain": "airbyteio.atlassian.net",
|
||||
"email": "integration-test@airbyte.io",
|
||||
"start_date": "2021-01-01T00:00:00Z",
|
||||
"projects": [],
|
||||
}
|
||||
|
||||
def with_api_token(self, api_token: str) -> "ConfigBuilder":
|
||||
"""Set the API token for authentication."""
|
||||
self._config["api_token"] = api_token
|
||||
return self
|
||||
|
||||
def with_domain(self, domain: str) -> "ConfigBuilder":
|
||||
"""Set the Jira domain (e.g., 'mycompany.atlassian.net')."""
|
||||
self._config["domain"] = domain
|
||||
return self
|
||||
|
||||
def with_email(self, email: str) -> "ConfigBuilder":
|
||||
"""Set the email for authentication."""
|
||||
self._config["email"] = email
|
||||
return self
|
||||
|
||||
def with_start_date(self, start_datetime: datetime) -> "ConfigBuilder":
|
||||
"""Set the replication start date."""
|
||||
self._config["start_date"] = start_datetime.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
return self
|
||||
|
||||
def with_start_date_str(self, start_date: str) -> "ConfigBuilder":
|
||||
"""Set the replication start date as a string."""
|
||||
self._config["start_date"] = start_date
|
||||
return self
|
||||
|
||||
def with_projects(self, projects: List[str]) -> "ConfigBuilder":
|
||||
"""Set the list of project keys to sync."""
|
||||
self._config["projects"] = projects
|
||||
return self
|
||||
|
||||
def with_lookback_window_minutes(self, minutes: int) -> "ConfigBuilder":
|
||||
"""Set the lookback window in minutes for incremental syncs."""
|
||||
self._config["lookback_window_minutes"] = minutes
|
||||
return self
|
||||
|
||||
def with_enable_experimental_streams(self, enabled: bool) -> "ConfigBuilder":
|
||||
"""Enable or disable experimental streams."""
|
||||
self._config["enable_experimental_streams"] = enabled
|
||||
return self
|
||||
|
||||
def with_issues_stream_expand_with(self, expand_with: List[str]) -> "ConfigBuilder":
|
||||
"""Set the expand options for the issues stream."""
|
||||
self._config["issues_stream_expand_with"] = expand_with
|
||||
return self
|
||||
|
||||
def with_render_fields(self, render_fields: bool) -> "ConfigBuilder":
|
||||
"""Enable or disable rendering of fields."""
|
||||
self._config["render_fields"] = render_fields
|
||||
return self
|
||||
|
||||
def build(self) -> Dict[str, Any]:
|
||||
"""Build and return the configuration dictionary."""
|
||||
return self._config
|
||||
@@ -0,0 +1,379 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
from airbyte_cdk.test.mock_http import HttpRequest
|
||||
from airbyte_cdk.test.mock_http.request import ANY_QUERY_PARAMS
|
||||
|
||||
|
||||
class JiraRequestBuilder:
|
||||
"""
|
||||
Builder for creating HTTP requests for Jira API endpoints.
|
||||
|
||||
This builder helps create clean, reusable request definitions for tests
|
||||
instead of manually constructing HttpRequest objects each time.
|
||||
|
||||
Example usage:
|
||||
request = (
|
||||
JiraRequestBuilder.application_roles_endpoint("domain.atlassian.net")
|
||||
.build()
|
||||
)
|
||||
"""
|
||||
|
||||
API_V3_BASE = "https://{domain}/rest/api/3"
|
||||
AGILE_V1_BASE = "https://{domain}/rest/agile/1.0"
|
||||
|
||||
@classmethod
|
||||
def application_roles_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /applicationrole endpoint."""
|
||||
return cls(domain, "applicationrole", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def avatars_endpoint(cls, domain: str, avatar_type: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /avatar/{type}/system endpoint."""
|
||||
return cls(domain, f"avatar/{avatar_type}/system", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def boards_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /board endpoint (Agile API)."""
|
||||
return cls(domain, "board", api_version="agile")
|
||||
|
||||
@classmethod
|
||||
def dashboards_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /dashboard endpoint."""
|
||||
return cls(domain, "dashboard", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def filters_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /filter/search endpoint."""
|
||||
return cls(domain, "filter/search", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def filter_sharing_endpoint(cls, domain: str, filter_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /filter/{id}/permission endpoint."""
|
||||
return cls(domain, f"filter/{filter_id}/permission", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def groups_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /group/bulk endpoint."""
|
||||
return cls(domain, "group/bulk", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_fields_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /field endpoint."""
|
||||
return cls(domain, "field", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_field_configurations_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /fieldconfiguration endpoint."""
|
||||
return cls(domain, "fieldconfiguration", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_custom_field_contexts_endpoint(cls, domain: str, field_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /field/{fieldId}/context endpoint."""
|
||||
return cls(domain, f"field/{field_id}/context", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_custom_field_options_endpoint(cls, domain: str, field_id: str, context_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /field/{fieldId}/context/{contextId}/option endpoint."""
|
||||
return cls(domain, f"field/{field_id}/context/{context_id}/option", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_link_types_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issueLinkType endpoint."""
|
||||
return cls(domain, "issueLinkType", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_navigator_settings_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /settings/columns endpoint."""
|
||||
return cls(domain, "settings/columns", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_notification_schemes_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /notificationscheme endpoint."""
|
||||
return cls(domain, "notificationscheme", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_priorities_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /priority/search endpoint."""
|
||||
return cls(domain, "priority/search", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_resolutions_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /resolution/search endpoint."""
|
||||
return cls(domain, "resolution/search", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_security_schemes_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issuesecurityschemes endpoint."""
|
||||
return cls(domain, "issuesecurityschemes", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_types_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issuetype endpoint."""
|
||||
return cls(domain, "issuetype", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_type_schemes_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issuetypescheme endpoint."""
|
||||
return cls(domain, "issuetypescheme", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_type_screen_schemes_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issuetypescreenscheme endpoint."""
|
||||
return cls(domain, "issuetypescreenscheme", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issues_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /search/jql endpoint."""
|
||||
return cls(domain, "search/jql", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_changelogs_endpoint(cls, domain: str, issue_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/changelog endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/changelog", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_comments_endpoint(cls, domain: str, issue_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/comment endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/comment", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_properties_endpoint(cls, domain: str, issue_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/properties endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/properties", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_property_endpoint(cls, domain: str, issue_id_or_key: str, property_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/properties/{propertyKey} endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/properties/{property_key}", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_remote_links_endpoint(cls, domain: str, issue_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/remotelink endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/remotelink", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_transitions_endpoint(cls, domain: str, issue_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/transitions endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/transitions", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_votes_endpoint(cls, domain: str, issue_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/votes endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/votes", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_watchers_endpoint(cls, domain: str, issue_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/watchers endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/watchers", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def issue_worklogs_endpoint(cls, domain: str, issue_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /issue/{issueIdOrKey}/worklog endpoint."""
|
||||
return cls(domain, f"issue/{issue_id_or_key}/worklog", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def jira_settings_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /application-properties endpoint."""
|
||||
return cls(domain, "application-properties", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def labels_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /label endpoint."""
|
||||
return cls(domain, "label", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def permissions_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /permissions endpoint."""
|
||||
return cls(domain, "permissions", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def permission_schemes_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /permissionscheme endpoint."""
|
||||
return cls(domain, "permissionscheme", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def projects_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /project/search endpoint."""
|
||||
return cls(domain, "project/search", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def project_avatars_endpoint(cls, domain: str, project_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /project/{projectIdOrKey}/avatars endpoint."""
|
||||
return cls(domain, f"project/{project_id_or_key}/avatars", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def project_categories_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /projectCategory endpoint."""
|
||||
return cls(domain, "projectCategory", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def project_components_endpoint(cls, domain: str, project_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /project/{projectIdOrKey}/component endpoint."""
|
||||
return cls(domain, f"project/{project_id_or_key}/component", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def project_email_endpoint(cls, domain: str, project_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /project/{projectId}/email endpoint."""
|
||||
return cls(domain, f"project/{project_id}/email", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def project_permission_schemes_endpoint(cls, domain: str, project_key_or_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /project/{projectKeyOrId}/securitylevel endpoint."""
|
||||
return cls(domain, f"project/{project_key_or_id}/securitylevel", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def project_roles_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /role endpoint."""
|
||||
return cls(domain, "role", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def project_types_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /project/type endpoint."""
|
||||
return cls(domain, "project/type", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def project_versions_endpoint(cls, domain: str, project_id_or_key: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /project/{projectIdOrKey}/version endpoint."""
|
||||
return cls(domain, f"project/{project_id_or_key}/version", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def screens_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /screens endpoint."""
|
||||
return cls(domain, "screens", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def screen_tabs_endpoint(cls, domain: str, screen_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /screens/{screenId}/tabs endpoint."""
|
||||
return cls(domain, f"screens/{screen_id}/tabs", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def screen_tab_fields_endpoint(cls, domain: str, screen_id: str, tab_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /screens/{screenId}/tabs/{tabId}/fields endpoint."""
|
||||
return cls(domain, f"screens/{screen_id}/tabs/{tab_id}/fields", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def screen_schemes_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /screenscheme endpoint."""
|
||||
return cls(domain, "screenscheme", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def sprints_endpoint(cls, domain: str, board_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /board/{boardId}/sprint endpoint (Agile API)."""
|
||||
return cls(domain, f"board/{board_id}/sprint", api_version="agile")
|
||||
|
||||
@classmethod
|
||||
def sprint_issues_endpoint(cls, domain: str, sprint_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /sprint/{sprintId}/issue endpoint (Agile API)."""
|
||||
return cls(domain, f"sprint/{sprint_id}/issue", api_version="agile")
|
||||
|
||||
@classmethod
|
||||
def board_issues_endpoint(cls, domain: str, board_id: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /board/{boardId}/issue endpoint (Agile API)."""
|
||||
return cls(domain, f"board/{board_id}/issue", api_version="agile")
|
||||
|
||||
@classmethod
|
||||
def time_tracking_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /configuration/timetracking/list endpoint."""
|
||||
return cls(domain, "configuration/timetracking/list", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def users_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /users/search endpoint."""
|
||||
return cls(domain, "users/search", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def users_groups_detailed_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /user endpoint."""
|
||||
return cls(domain, "user", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def workflows_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /workflow/search endpoint."""
|
||||
return cls(domain, "workflow/search", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def workflow_schemes_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /workflowscheme endpoint."""
|
||||
return cls(domain, "workflowscheme", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def workflow_statuses_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /status endpoint."""
|
||||
return cls(domain, "status", api_version="v3")
|
||||
|
||||
@classmethod
|
||||
def workflow_status_categories_endpoint(cls, domain: str) -> "JiraRequestBuilder":
|
||||
"""Create a request builder for the /statuscategory endpoint."""
|
||||
return cls(domain, "statuscategory", api_version="v3")
|
||||
|
||||
def __init__(self, domain: str, resource: str, api_version: str = "v3"):
|
||||
"""
|
||||
Initialize the request builder.
|
||||
|
||||
Args:
|
||||
domain: The Jira domain (e.g., 'mycompany.atlassian.net')
|
||||
resource: The API resource path (e.g., 'applicationrole', 'project/search')
|
||||
api_version: The API version ('v3' for REST API v3, 'agile' for Agile API v1)
|
||||
"""
|
||||
self._domain = domain
|
||||
self._resource = resource
|
||||
self._api_version = api_version
|
||||
self._query_params: dict = {}
|
||||
self._use_any_query_params = False
|
||||
|
||||
def with_max_results(self, max_results: int) -> "JiraRequestBuilder":
|
||||
"""Set the maxResults query parameter for pagination."""
|
||||
self._query_params["maxResults"] = str(max_results)
|
||||
return self
|
||||
|
||||
def with_start_at(self, start_at: int) -> "JiraRequestBuilder":
|
||||
"""Set the startAt query parameter for pagination."""
|
||||
self._query_params["startAt"] = str(start_at)
|
||||
return self
|
||||
|
||||
def with_expand(self, expand: str) -> "JiraRequestBuilder":
|
||||
"""Set the expand query parameter."""
|
||||
self._query_params["expand"] = expand
|
||||
return self
|
||||
|
||||
def with_jql(self, jql: str) -> "JiraRequestBuilder":
|
||||
"""Set the jql query parameter for issue searches."""
|
||||
self._query_params["jql"] = jql
|
||||
return self
|
||||
|
||||
def with_fields(self, fields: str) -> "JiraRequestBuilder":
|
||||
"""Set the fields query parameter."""
|
||||
self._query_params["fields"] = fields
|
||||
return self
|
||||
|
||||
def with_query_param(self, key: str, value: str) -> "JiraRequestBuilder":
|
||||
"""Add a custom query parameter."""
|
||||
self._query_params[key] = value
|
||||
return self
|
||||
|
||||
def with_any_query_params(self) -> "JiraRequestBuilder":
|
||||
"""Use ANY_QUERY_PARAMS matcher for dynamic/unpredictable parameters."""
|
||||
self._use_any_query_params = True
|
||||
return self
|
||||
|
||||
def build(self) -> HttpRequest:
|
||||
"""
|
||||
Build and return the HttpRequest object.
|
||||
|
||||
Returns:
|
||||
HttpRequest configured with the URL and query params
|
||||
"""
|
||||
if self._api_version == "agile":
|
||||
base_url = self.AGILE_V1_BASE.format(domain=self._domain)
|
||||
else:
|
||||
base_url = self.API_V3_BASE.format(domain=self._domain)
|
||||
|
||||
url = f"{base_url}/{self._resource}"
|
||||
|
||||
if self._use_any_query_params:
|
||||
return HttpRequest(url=url, query_params=ANY_QUERY_PARAMS)
|
||||
|
||||
return HttpRequest(
|
||||
url=url,
|
||||
query_params=self._query_params if self._query_params else None,
|
||||
)
|
||||
@@ -0,0 +1,366 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from airbyte_cdk.test.mock_http import HttpResponse
|
||||
|
||||
|
||||
class JiraPaginatedResponseBuilder:
|
||||
"""
|
||||
Builder for creating paginated Jira API responses.
|
||||
|
||||
This builder simplifies creating mock responses for pagination tests by handling
|
||||
the boilerplate JSON structure that Jira API returns.
|
||||
|
||||
Jira uses cursor-based pagination with the following fields:
|
||||
- startAt: The starting index of the returned items
|
||||
- maxResults: The maximum number of items returned per page
|
||||
- total: The total number of items available
|
||||
- isLast: Boolean indicating if this is the last page
|
||||
|
||||
The stop_condition in the manifest is:
|
||||
{{ response.get('isLast') or response.get('startAt') + response.get('maxResults') >= response.get('total') }}
|
||||
|
||||
Example usage:
|
||||
response = (
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records([project1, project2])
|
||||
.with_pagination(start_at=0, max_results=50, total=100, is_last=False)
|
||||
.build()
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(self, records_field: str = "values"):
|
||||
"""
|
||||
Initialize the response builder.
|
||||
|
||||
Args:
|
||||
records_field: The field name containing the records array (e.g., "values", "issues")
|
||||
"""
|
||||
self._records_field = records_field
|
||||
self._records: List[Dict[str, Any]] = []
|
||||
self._start_at: int = 0
|
||||
self._max_results: int = 50
|
||||
self._total: Optional[int] = None
|
||||
self._is_last: Optional[bool] = None
|
||||
self._status_code: int = 200
|
||||
self._extra_fields: Dict[str, Any] = {}
|
||||
|
||||
def with_records(self, records: List[Dict[str, Any]]) -> "JiraPaginatedResponseBuilder":
|
||||
"""
|
||||
Add records to the response.
|
||||
|
||||
Args:
|
||||
records: List of record dictionaries to include in the response
|
||||
|
||||
Returns:
|
||||
Self for method chaining
|
||||
"""
|
||||
self._records = records
|
||||
return self
|
||||
|
||||
def with_pagination(
|
||||
self,
|
||||
start_at: int = 0,
|
||||
max_results: int = 50,
|
||||
total: Optional[int] = None,
|
||||
is_last: Optional[bool] = None,
|
||||
) -> "JiraPaginatedResponseBuilder":
|
||||
"""
|
||||
Set pagination metadata.
|
||||
|
||||
Args:
|
||||
start_at: The starting index of the returned items
|
||||
max_results: The maximum number of items returned per page
|
||||
total: The total number of items available (defaults to len(records) if not set)
|
||||
is_last: Boolean indicating if this is the last page (calculated if not set)
|
||||
|
||||
Returns:
|
||||
Self for method chaining
|
||||
"""
|
||||
self._start_at = start_at
|
||||
self._max_results = max_results
|
||||
self._total = total
|
||||
self._is_last = is_last
|
||||
return self
|
||||
|
||||
def with_status_code(self, status_code: int) -> "JiraPaginatedResponseBuilder":
|
||||
"""
|
||||
Set the HTTP status code.
|
||||
|
||||
Args:
|
||||
status_code: HTTP status code for the response
|
||||
|
||||
Returns:
|
||||
Self for method chaining
|
||||
"""
|
||||
self._status_code = status_code
|
||||
return self
|
||||
|
||||
def with_extra_field(self, key: str, value: Any) -> "JiraPaginatedResponseBuilder":
|
||||
"""
|
||||
Add an extra field to the response body.
|
||||
|
||||
Args:
|
||||
key: Field name
|
||||
value: Field value
|
||||
|
||||
Returns:
|
||||
Self for method chaining
|
||||
"""
|
||||
self._extra_fields[key] = value
|
||||
return self
|
||||
|
||||
def build(self) -> HttpResponse:
|
||||
"""
|
||||
Build the HTTP response with paginated data.
|
||||
|
||||
Returns:
|
||||
HttpResponse object with the paginated response body
|
||||
"""
|
||||
total = self._total if self._total is not None else len(self._records)
|
||||
|
||||
if self._is_last is not None:
|
||||
is_last = self._is_last
|
||||
else:
|
||||
is_last = (self._start_at + self._max_results) >= total
|
||||
|
||||
response_body = {
|
||||
self._records_field: self._records,
|
||||
"startAt": self._start_at,
|
||||
"maxResults": self._max_results,
|
||||
"total": total,
|
||||
"isLast": is_last,
|
||||
}
|
||||
|
||||
response_body.update(self._extra_fields)
|
||||
|
||||
return HttpResponse(body=json.dumps(response_body), status_code=self._status_code)
|
||||
|
||||
@classmethod
|
||||
def single_page(cls, records_field: str, records: List[Dict[str, Any]]) -> HttpResponse:
|
||||
"""
|
||||
Convenience method to create a single-page response.
|
||||
|
||||
Args:
|
||||
records_field: The field name containing the records array
|
||||
records: List of records to include
|
||||
|
||||
Returns:
|
||||
HttpResponse for a single page with isLast=True
|
||||
"""
|
||||
return (
|
||||
cls(records_field).with_records(records).with_pagination(start_at=0, max_results=50, total=len(records), is_last=True).build()
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def empty_page(cls, records_field: str = "values") -> HttpResponse:
|
||||
"""
|
||||
Convenience method to create an empty response.
|
||||
|
||||
Args:
|
||||
records_field: The field name containing the records array
|
||||
|
||||
Returns:
|
||||
HttpResponse for an empty result set
|
||||
"""
|
||||
return cls(records_field).with_records([]).with_pagination(start_at=0, max_results=50, total=0, is_last=True).build()
|
||||
|
||||
|
||||
class JiraAgileResponseBuilder:
|
||||
"""
|
||||
Builder for creating Agile API responses (boards, sprints, etc.).
|
||||
|
||||
The Agile API uses a slightly different pagination structure with 'values' as the records field.
|
||||
"""
|
||||
|
||||
def __init__(self, records_field: str = "values"):
|
||||
"""
|
||||
Initialize the response builder.
|
||||
|
||||
Args:
|
||||
records_field: The field name containing the records array
|
||||
"""
|
||||
self._records_field = records_field
|
||||
self._records: List[Dict[str, Any]] = []
|
||||
self._start_at: int = 0
|
||||
self._max_results: int = 50
|
||||
self._total: Optional[int] = None
|
||||
self._is_last: Optional[bool] = None
|
||||
self._status_code: int = 200
|
||||
|
||||
def with_records(self, records: List[Dict[str, Any]]) -> "JiraAgileResponseBuilder":
|
||||
"""Add records to the response."""
|
||||
self._records = records
|
||||
return self
|
||||
|
||||
def with_pagination(
|
||||
self,
|
||||
start_at: int = 0,
|
||||
max_results: int = 50,
|
||||
total: Optional[int] = None,
|
||||
is_last: Optional[bool] = None,
|
||||
) -> "JiraAgileResponseBuilder":
|
||||
"""Set pagination metadata."""
|
||||
self._start_at = start_at
|
||||
self._max_results = max_results
|
||||
self._total = total
|
||||
self._is_last = is_last
|
||||
return self
|
||||
|
||||
def with_status_code(self, status_code: int) -> "JiraAgileResponseBuilder":
|
||||
"""Set the HTTP status code."""
|
||||
self._status_code = status_code
|
||||
return self
|
||||
|
||||
def build(self) -> HttpResponse:
|
||||
"""Build the HTTP response."""
|
||||
total = self._total if self._total is not None else len(self._records)
|
||||
|
||||
if self._is_last is not None:
|
||||
is_last = self._is_last
|
||||
else:
|
||||
is_last = (self._start_at + self._max_results) >= total
|
||||
|
||||
response_body = {
|
||||
self._records_field: self._records,
|
||||
"startAt": self._start_at,
|
||||
"maxResults": self._max_results,
|
||||
"total": total,
|
||||
"isLast": is_last,
|
||||
}
|
||||
|
||||
return HttpResponse(body=json.dumps(response_body), status_code=self._status_code)
|
||||
|
||||
|
||||
class JiraJqlResponseBuilder:
|
||||
"""
|
||||
Builder for creating JQL search responses (issues stream).
|
||||
|
||||
The JQL API uses 'issues' as the records field and supports nextPageToken pagination.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the response builder."""
|
||||
self._records: List[Dict[str, Any]] = []
|
||||
self._start_at: int = 0
|
||||
self._max_results: int = 50
|
||||
self._total: Optional[int] = None
|
||||
self._is_last: Optional[bool] = None
|
||||
self._next_page_token: Optional[str] = None
|
||||
self._status_code: int = 200
|
||||
|
||||
def with_records(self, records: List[Dict[str, Any]]) -> "JiraJqlResponseBuilder":
|
||||
"""Add records to the response."""
|
||||
self._records = records
|
||||
return self
|
||||
|
||||
def with_pagination(
|
||||
self,
|
||||
start_at: int = 0,
|
||||
max_results: int = 50,
|
||||
total: Optional[int] = None,
|
||||
is_last: Optional[bool] = None,
|
||||
next_page_token: Optional[str] = None,
|
||||
) -> "JiraJqlResponseBuilder":
|
||||
"""Set pagination metadata."""
|
||||
self._start_at = start_at
|
||||
self._max_results = max_results
|
||||
self._total = total
|
||||
self._is_last = is_last
|
||||
self._next_page_token = next_page_token
|
||||
return self
|
||||
|
||||
def with_status_code(self, status_code: int) -> "JiraJqlResponseBuilder":
|
||||
"""Set the HTTP status code."""
|
||||
self._status_code = status_code
|
||||
return self
|
||||
|
||||
def build(self) -> HttpResponse:
|
||||
"""Build the HTTP response."""
|
||||
total = self._total if self._total is not None else len(self._records)
|
||||
|
||||
if self._is_last is not None:
|
||||
is_last = self._is_last
|
||||
else:
|
||||
is_last = self._next_page_token is None
|
||||
|
||||
response_body: Dict[str, Any] = {
|
||||
"issues": self._records,
|
||||
"startAt": self._start_at,
|
||||
"maxResults": self._max_results,
|
||||
"total": total,
|
||||
"isLast": is_last,
|
||||
}
|
||||
|
||||
if self._next_page_token:
|
||||
response_body["nextPageToken"] = self._next_page_token
|
||||
|
||||
return HttpResponse(body=json.dumps(response_body), status_code=self._status_code)
|
||||
|
||||
|
||||
class JiraErrorResponseBuilder:
|
||||
"""
|
||||
Builder for creating Jira error responses.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the error response builder."""
|
||||
self._error_messages: List[str] = []
|
||||
self._errors: Dict[str, str] = {}
|
||||
self._status_code: int = 400
|
||||
|
||||
def with_error_messages(self, messages: List[str]) -> "JiraErrorResponseBuilder":
|
||||
"""Add error messages to the response."""
|
||||
self._error_messages = messages
|
||||
return self
|
||||
|
||||
def with_errors(self, errors: Dict[str, str]) -> "JiraErrorResponseBuilder":
|
||||
"""Add field-specific errors to the response."""
|
||||
self._errors = errors
|
||||
return self
|
||||
|
||||
def with_status_code(self, status_code: int) -> "JiraErrorResponseBuilder":
|
||||
"""Set the HTTP status code."""
|
||||
self._status_code = status_code
|
||||
return self
|
||||
|
||||
def build(self) -> HttpResponse:
|
||||
"""Build the HTTP error response."""
|
||||
response_body: Dict[str, Any] = {}
|
||||
|
||||
if self._error_messages:
|
||||
response_body["errorMessages"] = self._error_messages
|
||||
|
||||
if self._errors:
|
||||
response_body["errors"] = self._errors
|
||||
|
||||
return HttpResponse(body=json.dumps(response_body), status_code=self._status_code)
|
||||
|
||||
|
||||
class JiraSimpleResponseBuilder:
|
||||
"""
|
||||
Builder for creating simple Jira API responses without pagination.
|
||||
|
||||
Used for endpoints that return a single object or a simple array.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the simple response builder."""
|
||||
self._body: Any = None
|
||||
self._status_code: int = 200
|
||||
|
||||
def with_body(self, body: Any) -> "JiraSimpleResponseBuilder":
|
||||
"""Set the response body."""
|
||||
self._body = body
|
||||
return self
|
||||
|
||||
def with_status_code(self, status_code: int) -> "JiraSimpleResponseBuilder":
|
||||
"""Set the HTTP status code."""
|
||||
self._status_code = status_code
|
||||
return self
|
||||
|
||||
def build(self) -> HttpResponse:
|
||||
"""Build the HTTP response."""
|
||||
return HttpResponse(body=json.dumps(self._body), status_code=self._status_code)
|
||||
@@ -0,0 +1,175 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "application_roles"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestApplicationRolesStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'application_roles' stream.
|
||||
|
||||
This is a simple full refresh stream without pagination.
|
||||
It uses selector_base (extracts from root array) and no pagination.
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_single_record(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly fetches application roles.
|
||||
|
||||
Given: A configured Jira connector
|
||||
When: Running a full refresh sync for the application_roles stream
|
||||
Then: The connector should make the correct API request and return all records
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.application_roles_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(
|
||||
body=json.dumps(
|
||||
[
|
||||
{
|
||||
"key": "jira-software",
|
||||
"groups": ["jira-software-users", "administrators"],
|
||||
"name": "Jira Software",
|
||||
"defaultGroups": ["jira-software-users"],
|
||||
"selectedByDefault": False,
|
||||
"defined": True,
|
||||
"numberOfSeats": 100,
|
||||
"remainingSeats": 61,
|
||||
"userCount": 14,
|
||||
"userCountDescription": "users",
|
||||
"hasUnlimitedSeats": False,
|
||||
"platform": False,
|
||||
}
|
||||
]
|
||||
),
|
||||
status_code=200,
|
||||
),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
record = output.records[0].record.data
|
||||
assert record["key"] == "jira-software"
|
||||
assert record["name"] == "Jira Software"
|
||||
assert record["numberOfSeats"] == 100
|
||||
assert record["userCount"] == 14
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_multiple_records(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly fetches multiple application roles.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.application_roles_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(
|
||||
body=json.dumps(
|
||||
[
|
||||
{
|
||||
"key": "jira-software",
|
||||
"groups": ["jira-software-users"],
|
||||
"name": "Jira Software",
|
||||
"defaultGroups": ["jira-software-users"],
|
||||
"selectedByDefault": False,
|
||||
"defined": True,
|
||||
"numberOfSeats": 100,
|
||||
"remainingSeats": 61,
|
||||
"userCount": 14,
|
||||
"userCountDescription": "users",
|
||||
"hasUnlimitedSeats": False,
|
||||
"platform": False,
|
||||
},
|
||||
{
|
||||
"key": "jira-core",
|
||||
"groups": ["jira-core-users"],
|
||||
"name": "Jira Core",
|
||||
"defaultGroups": ["jira-core-users"],
|
||||
"selectedByDefault": True,
|
||||
"defined": True,
|
||||
"numberOfSeats": 50,
|
||||
"remainingSeats": 30,
|
||||
"userCount": 20,
|
||||
"userCountDescription": "users",
|
||||
"hasUnlimitedSeats": False,
|
||||
"platform": True,
|
||||
},
|
||||
]
|
||||
),
|
||||
status_code=200,
|
||||
),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 2
|
||||
assert output.records[0].record.data["key"] == "jira-software"
|
||||
assert output.records[1].record.data["key"] == "jira-core"
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_results(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty results gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.application_roles_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps([]), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_400_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector ignores 400 errors per the default error handler.
|
||||
|
||||
The manifest configures 400 errors with action: IGNORE, which means the connector
|
||||
silently ignores bad request errors and continues the sync with 0 records.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.application_roles_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(
|
||||
body=json.dumps({"errorMessages": ["Bad request"]}),
|
||||
status_code=400,
|
||||
),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog, expecting_exception=False)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,161 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "avatars"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestAvatarsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'avatars' stream.
|
||||
|
||||
This is a full refresh stream without pagination.
|
||||
Uses ListPartitionRouter with slices: issuetype, project, user
|
||||
Endpoint: /rest/api/3/avatar/{slice}/system
|
||||
Extract field: system
|
||||
Primary key: id
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_all_slices(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test full refresh sync returns avatars from all slices (issuetype, project, user).
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Issue type avatars
|
||||
issuetype_avatars = {
|
||||
"system": [
|
||||
{"id": "10000", "isSystemAvatar": True, "isSelected": False, "isDeletable": False},
|
||||
{"id": "10001", "isSystemAvatar": True, "isSelected": False, "isDeletable": False},
|
||||
]
|
||||
}
|
||||
|
||||
# Project avatars
|
||||
project_avatars = {
|
||||
"system": [
|
||||
{"id": "10100", "isSystemAvatar": True, "isSelected": False, "isDeletable": False},
|
||||
]
|
||||
}
|
||||
|
||||
# User avatars
|
||||
user_avatars = {
|
||||
"system": [
|
||||
{"id": "10200", "isSystemAvatar": True, "isSelected": False, "isDeletable": False},
|
||||
]
|
||||
}
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "issuetype").build(),
|
||||
HttpResponse(body=json.dumps(issuetype_avatars), status_code=200),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "project").build(),
|
||||
HttpResponse(body=json.dumps(project_avatars), status_code=200),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "user").build(),
|
||||
HttpResponse(body=json.dumps(user_avatars), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 4
|
||||
|
||||
avatar_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "10000" in avatar_ids
|
||||
assert "10001" in avatar_ids
|
||||
assert "10100" in avatar_ids
|
||||
assert "10200" in avatar_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_avatar_properties(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that avatar properties are correctly returned.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
issuetype_avatars = {
|
||||
"system": [
|
||||
{
|
||||
"id": "10000",
|
||||
"isSystemAvatar": True,
|
||||
"isSelected": True,
|
||||
"isDeletable": False,
|
||||
"fileName": "bug.svg",
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
project_avatars = {"system": []}
|
||||
user_avatars = {"system": []}
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "issuetype").build(),
|
||||
HttpResponse(body=json.dumps(issuetype_avatars), status_code=200),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "project").build(),
|
||||
HttpResponse(body=json.dumps(project_avatars), status_code=200),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "user").build(),
|
||||
HttpResponse(body=json.dumps(user_avatars), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
record = output.records[0].record.data
|
||||
assert record["id"] == "10000"
|
||||
assert record["isSystemAvatar"] is True
|
||||
assert record["isSelected"] is True
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_results(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty results gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
empty_avatars = {"system": []}
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "issuetype").build(),
|
||||
HttpResponse(body=json.dumps(empty_avatars), status_code=200),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "project").build(),
|
||||
HttpResponse(body=json.dumps(empty_avatars), status_code=200),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.avatars_endpoint(_DOMAIN, "user").build(),
|
||||
HttpResponse(body=json.dumps(empty_avatars), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,199 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraAgileResponseBuilder, JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "board_issues"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestBoardIssuesStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'board_issues' stream.
|
||||
|
||||
This is an incremental substream that depends on boards as parent.
|
||||
Endpoint: /rest/agile/1.0/board/{boardId}/issue
|
||||
Extract field: issues
|
||||
Primary key: id
|
||||
Cursor field: updated
|
||||
Transformations: AddFields (boardId, created, updated)
|
||||
Error handler: 500 IGNORE
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_with_multiple_boards(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test full refresh sync with issues from multiple boards.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Mock boards endpoint (parent stream)
|
||||
boards = [
|
||||
{"id": 1, "name": "Board 1", "type": "scrum"},
|
||||
{"id": 2, "name": "Board 2", "type": "kanban"},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(boards)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
# Mock board issues for board 1
|
||||
board1_issues = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"created": "2024-01-01T10:00:00.000+0000",
|
||||
"updated": "2024-01-15T10:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Mock board issues for board 2
|
||||
board2_issues = [
|
||||
{
|
||||
"id": "10002",
|
||||
"key": "PROJ-2",
|
||||
"fields": {
|
||||
"created": "2024-01-02T10:00:00.000+0000",
|
||||
"updated": "2024-01-16T10:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.board_issues_endpoint(_DOMAIN, "1").with_any_query_params().build(),
|
||||
JiraAgileResponseBuilder("issues").with_records(board1_issues).with_pagination(start_at=0, max_results=50, total=1).build(),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.board_issues_endpoint(_DOMAIN, "2").with_any_query_params().build(),
|
||||
JiraAgileResponseBuilder("issues").with_records(board2_issues).with_pagination(start_at=0, max_results=50, total=1).build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 2
|
||||
|
||||
issue_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "10001" in issue_ids
|
||||
assert "10002" in issue_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_board_id_transformation(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that AddFields transformation correctly adds boardId, created, updated.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
boards = [
|
||||
{"id": 1, "name": "Board 1", "type": "scrum"},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(boards)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
board_issues = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"created": "2024-01-01T10:00:00.000+0000",
|
||||
"updated": "2024-01-15T10:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.board_issues_endpoint(_DOMAIN, "1").with_any_query_params().build(),
|
||||
JiraAgileResponseBuilder("issues").with_records(board_issues).with_pagination(start_at=0, max_results=50, total=1).build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
record = output.records[0].record.data
|
||||
assert record["boardId"] == 1
|
||||
assert record["created"] == "2024-01-01T10:00:00.000+0000"
|
||||
assert record["updated"] == "2024-01-15T10:00:00.000+0000"
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_boards(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty boards gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_board_with_no_issues(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles boards with no issues gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
boards = [
|
||||
{"id": 1, "name": "Board 1", "type": "scrum"},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(boards)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.board_issues_endpoint(_DOMAIN, "1").with_any_query_params().build(),
|
||||
JiraAgileResponseBuilder("issues").with_records([]).with_pagination(start_at=0, max_results=50, total=0).build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,209 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraAgileResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "boards"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestBoardsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'boards' stream.
|
||||
|
||||
This stream uses the Agile API v1 with 'values' as the extract field.
|
||||
Endpoint: /rest/agile/1.0/board
|
||||
Has record_filter: filters by config['projects'] if specified
|
||||
Has transformations: AddFields for projectId and projectKey
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_single_page(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly fetches boards with a single page.
|
||||
Also verifies that transformations (AddFields) are applied.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
board_records = [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Scrum Board",
|
||||
"type": "scrum",
|
||||
"self": f"https://{_DOMAIN}/rest/agile/1.0/board/1",
|
||||
"location": {
|
||||
"projectId": 10001,
|
||||
"projectKey": "PROJ1",
|
||||
"displayName": "Project One",
|
||||
"projectName": "Project One",
|
||||
"projectTypeKey": "software",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "Kanban Board",
|
||||
"type": "kanban",
|
||||
"self": f"https://{_DOMAIN}/rest/agile/1.0/board/2",
|
||||
"location": {
|
||||
"projectId": 10002,
|
||||
"projectKey": "PROJ2",
|
||||
"displayName": "Project Two",
|
||||
"projectName": "Project Two",
|
||||
"projectTypeKey": "software",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraAgileResponseBuilder("values")
|
||||
.with_records(board_records)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 2
|
||||
# Verify basic fields
|
||||
assert output.records[0].record.data["id"] == 1
|
||||
assert output.records[0].record.data["name"] == "Scrum Board"
|
||||
# Verify transformations (AddFields) are applied
|
||||
assert output.records[0].record.data["projectId"] == "10001"
|
||||
assert output.records[0].record.data["projectKey"] == "PROJ1"
|
||||
assert output.records[1].record.data["id"] == 2
|
||||
assert output.records[1].record.data["projectId"] == "10002"
|
||||
assert output.records[1].record.data["projectKey"] == "PROJ2"
|
||||
|
||||
@HttpMocker()
|
||||
def test_pagination_multiple_pages(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly handles pagination across multiple pages.
|
||||
|
||||
Pagination stop_condition from manifest:
|
||||
{{ response.get('isLast') or response.get('startAt') + response.get('maxResults') >= response.get('total') }}
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
page1_records = [
|
||||
{"id": 1, "name": "Board 1", "type": "scrum", "location": {"projectId": 10001, "projectKey": "PROJ1"}},
|
||||
{"id": 2, "name": "Board 2", "type": "kanban", "location": {"projectId": 10002, "projectKey": "PROJ2"}},
|
||||
]
|
||||
page2_records = [
|
||||
{"id": 3, "name": "Board 3", "type": "scrum", "location": {"projectId": 10003, "projectKey": "PROJ3"}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
[
|
||||
JiraAgileResponseBuilder("values")
|
||||
.with_records(page1_records)
|
||||
.with_pagination(start_at=0, max_results=2, total=3, is_last=False)
|
||||
.build(),
|
||||
JiraAgileResponseBuilder("values")
|
||||
.with_records(page2_records)
|
||||
.with_pagination(start_at=2, max_results=2, total=3, is_last=True)
|
||||
.build(),
|
||||
],
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
assert output.records[0].record.data["id"] == 1
|
||||
assert output.records[1].record.data["id"] == 2
|
||||
assert output.records[2].record.data["id"] == 3
|
||||
|
||||
@HttpMocker()
|
||||
def test_project_filter_config(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector filters boards based on config['projects'] setting.
|
||||
|
||||
The record_filter in manifest:
|
||||
{{ not config.get('projects') or record.get('location', {}).get('projectKey') in config['projects'] }}
|
||||
|
||||
When projects config is set, only boards belonging to matching projects should be returned.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).with_projects(["PROJ1"]).build()
|
||||
|
||||
board_records = [
|
||||
{"id": 1, "name": "Board 1", "type": "scrum", "location": {"projectId": 10001, "projectKey": "PROJ1"}},
|
||||
{"id": 2, "name": "Board 2", "type": "kanban", "location": {"projectId": 10002, "projectKey": "PROJ2"}},
|
||||
{"id": 3, "name": "Board 3", "type": "scrum", "location": {"projectId": 10003, "projectKey": "PROJ3"}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraAgileResponseBuilder("values")
|
||||
.with_records(board_records)
|
||||
.with_pagination(start_at=0, max_results=50, total=3, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
# Only boards from PROJ1 should be returned due to the filter
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["projectKey"] == "PROJ1"
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_results(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty results gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraAgileResponseBuilder("values").with_records([]).with_pagination(start_at=0, max_results=50, total=0, is_last=True).build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_400_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector ignores 400 errors per the default error handler.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.boards_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
HttpResponse(
|
||||
body=json.dumps({"errorMessages": ["Bad request"]}),
|
||||
status_code=400,
|
||||
),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog, expecting_exception=False)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,177 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "dashboards"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestDashboardsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'dashboards' stream.
|
||||
|
||||
This stream uses the standard paginator with 'dashboards' as the extract field.
|
||||
Endpoint: /rest/api/3/dashboard
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_single_page(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly fetches dashboards with a single page.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
dashboard_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"name": "System Dashboard",
|
||||
"description": "Default system dashboard",
|
||||
"isFavourite": True,
|
||||
"self": f"https://{_DOMAIN}/rest/api/3/dashboard/10001",
|
||||
},
|
||||
{
|
||||
"id": "10002",
|
||||
"name": "Project Dashboard",
|
||||
"description": "Project overview dashboard",
|
||||
"isFavourite": False,
|
||||
"self": f"https://{_DOMAIN}/rest/api/3/dashboard/10002",
|
||||
},
|
||||
]
|
||||
|
||||
# First request doesn't include startAt parameter
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.dashboards_endpoint(_DOMAIN).with_max_results(50).build(),
|
||||
JiraPaginatedResponseBuilder("dashboards")
|
||||
.with_records(dashboard_records)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 2
|
||||
assert output.records[0].record.data["id"] == "10001"
|
||||
assert output.records[0].record.data["name"] == "System Dashboard"
|
||||
assert output.records[1].record.data["id"] == "10002"
|
||||
assert output.records[1].record.data["name"] == "Project Dashboard"
|
||||
|
||||
@HttpMocker()
|
||||
def test_pagination_multiple_pages(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly handles pagination across multiple pages.
|
||||
|
||||
NOTE: This test validates pagination for the 'dashboards' stream, but many streams
|
||||
use the same DefaultPaginator configuration (startAt/maxResults with CursorPagination),
|
||||
so this provides pagination coverage for: boards, board_issues, dashboards, filters,
|
||||
groups, issue_changelogs, issue_comments, issue_field_configurations,
|
||||
issue_notification_schemes, issue_priorities, issue_resolutions, issue_type_schemes,
|
||||
issue_type_screen_schemes, issue_worklogs, labels, project_components, project_versions,
|
||||
projects, screen_schemes, screens, sprints, workflows, workflow_schemes
|
||||
|
||||
Pagination stop_condition from manifest:
|
||||
{{ response.get('isLast') or response.get('startAt') + response.get('maxResults') >= response.get('total') }}
|
||||
|
||||
To exercise 2 pages:
|
||||
- Page 1: startAt=0, maxResults=2, total=3 -> 0 + 2 >= 3 is false, fetch page 2
|
||||
- Page 2: startAt=2, maxResults=2, total=3, isLast=true -> stops
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
page1_records = [
|
||||
{"id": "10001", "name": "Dashboard 1"},
|
||||
{"id": "10002", "name": "Dashboard 2"},
|
||||
]
|
||||
page2_records = [
|
||||
{"id": "10003", "name": "Dashboard 3"},
|
||||
]
|
||||
|
||||
# Page 1 request (first request doesn't include startAt)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.dashboards_endpoint(_DOMAIN).with_max_results(50).build(),
|
||||
JiraPaginatedResponseBuilder("dashboards")
|
||||
.with_records(page1_records)
|
||||
.with_pagination(start_at=0, max_results=2, total=3, is_last=False)
|
||||
.build(),
|
||||
)
|
||||
|
||||
# Page 2 request (subsequent requests include startAt)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.dashboards_endpoint(_DOMAIN).with_max_results(50).with_start_at(2).build(),
|
||||
JiraPaginatedResponseBuilder("dashboards")
|
||||
.with_records(page2_records)
|
||||
.with_pagination(start_at=2, max_results=2, total=3, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
assert output.records[0].record.data["id"] == "10001"
|
||||
assert output.records[1].record.data["id"] == "10002"
|
||||
assert output.records[2].record.data["id"] == "10003"
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_results(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty results gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# First request doesn't include startAt parameter
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.dashboards_endpoint(_DOMAIN).with_max_results(50).build(),
|
||||
JiraPaginatedResponseBuilder("dashboards")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_400_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector ignores 400 errors per the default error handler.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# First request doesn't include startAt parameter
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.dashboards_endpoint(_DOMAIN).with_max_results(50).build(),
|
||||
HttpResponse(
|
||||
body=json.dumps({"errorMessages": ["Bad request"]}),
|
||||
status_code=400,
|
||||
),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog, expecting_exception=False)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,172 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "filter_sharing"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestFilterSharingStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'filter_sharing' stream.
|
||||
|
||||
This is a substream of filters.
|
||||
Endpoint: /rest/api/3/filter/{filter_id}/permission
|
||||
Uses SubstreamPartitionRouter with filters as parent
|
||||
Has transformation: AddFields for filterId
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_with_parent_filters(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly fetches filter sharing permissions from multiple parent filters.
|
||||
|
||||
Per playbook: "All substreams should be tested against at least two parent records"
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Mock parent filters endpoint
|
||||
filter_records = [
|
||||
{"id": "10001", "name": "Filter 1", "self": f"https://{_DOMAIN}/rest/api/3/filter/10001"},
|
||||
{"id": "10002", "name": "Filter 2", "self": f"https://{_DOMAIN}/rest/api/3/filter/10002"},
|
||||
]
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filters_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(filter_records)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
# Mock filter sharing permissions for filter 10001
|
||||
filter1_permissions = [
|
||||
{"id": 1, "type": "user", "user": {"accountId": "user1"}},
|
||||
{"id": 2, "type": "group", "group": {"name": "developers"}},
|
||||
]
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filter_sharing_endpoint(_DOMAIN, "10001").build(),
|
||||
HttpResponse(body=json.dumps(filter1_permissions), status_code=200),
|
||||
)
|
||||
|
||||
# Mock filter sharing permissions for filter 10002
|
||||
filter2_permissions = [
|
||||
{"id": 3, "type": "project", "project": {"id": "10001"}},
|
||||
]
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filter_sharing_endpoint(_DOMAIN, "10002").build(),
|
||||
HttpResponse(body=json.dumps(filter2_permissions), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
# Verify transformation: filterId should be added
|
||||
filter_ids = [r.record.data.get("filterId") for r in output.records]
|
||||
assert "10001" in filter_ids
|
||||
assert "10002" in filter_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_parent_filters(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty parent filters gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filters_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_filter_without_sharing_permissions(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles filters without sharing permissions.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Mock parent filters endpoint
|
||||
filter_records = [
|
||||
{"id": "10001", "name": "Filter 1", "self": f"https://{_DOMAIN}/rest/api/3/filter/10001"},
|
||||
]
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filters_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(filter_records)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
# Mock empty permissions for filter 10001
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filter_sharing_endpoint(_DOMAIN, "10001").build(),
|
||||
HttpResponse(body=json.dumps([]), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_400_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector ignores 400 errors per the error handler.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Mock parent filters endpoint
|
||||
filter_records = [
|
||||
{"id": "10001", "name": "Filter 1", "self": f"https://{_DOMAIN}/rest/api/3/filter/10001"},
|
||||
]
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filters_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(filter_records)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
# Mock 400 error for filter sharing
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filter_sharing_endpoint(_DOMAIN, "10001").build(),
|
||||
HttpResponse(
|
||||
body=json.dumps({"errorMessages": ["Bad request"]}),
|
||||
status_code=400,
|
||||
),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog, expecting_exception=False)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,171 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "filters"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestFiltersStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'filters' stream.
|
||||
|
||||
This is a full refresh stream with pagination.
|
||||
Endpoint: /rest/api/3/filter/search
|
||||
Uses retriever_use_cache for caching
|
||||
"""
|
||||
|
||||
# Static expand parameter from manifest.yaml for filters stream
|
||||
_FILTERS_EXPAND = "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,isWritable,subscriptions"
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_single_page(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly fetches filters in a single page.
|
||||
|
||||
This test validates that the filters stream sends the correct static request parameters:
|
||||
- expand parameter with all filter fields to include in the response
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
filter_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"name": "My Open Issues",
|
||||
"description": "All open issues assigned to me",
|
||||
"self": f"https://{_DOMAIN}/rest/api/3/filter/10001",
|
||||
"jql": "assignee = currentUser() AND resolution = Unresolved",
|
||||
"favourite": True,
|
||||
},
|
||||
{
|
||||
"id": "10002",
|
||||
"name": "All Project Issues",
|
||||
"description": "All issues in the project",
|
||||
"self": f"https://{_DOMAIN}/rest/api/3/filter/10002",
|
||||
"jql": "project = PROJ",
|
||||
"favourite": False,
|
||||
},
|
||||
]
|
||||
|
||||
# Filters endpoint uses static expand parameter from manifest.yaml
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filters_endpoint(_DOMAIN).with_max_results(50).with_expand(self._FILTERS_EXPAND).build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(filter_records)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 2
|
||||
assert output.records[0].record.data["id"] == "10001"
|
||||
assert output.records[0].record.data["name"] == "My Open Issues"
|
||||
assert output.records[1].record.data["id"] == "10002"
|
||||
|
||||
@HttpMocker()
|
||||
def test_pagination_multiple_pages(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly handles pagination across multiple pages.
|
||||
|
||||
Pagination stop_condition: {{ response.get('isLast') or response.get('startAt') + response.get('maxResults') >= response.get('total') }}
|
||||
Page 1: startAt=0, maxResults=2, total=3 -> 0 + 2 >= 3 is False, fetch page 2
|
||||
Page 2: startAt=2, maxResults=2, total=3, isLast=True -> stops
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
page1_records = [
|
||||
{"id": "10001", "name": "Filter 1", "self": f"https://{_DOMAIN}/rest/api/3/filter/10001"},
|
||||
{"id": "10002", "name": "Filter 2", "self": f"https://{_DOMAIN}/rest/api/3/filter/10002"},
|
||||
]
|
||||
page2_records = [
|
||||
{"id": "10003", "name": "Filter 3", "self": f"https://{_DOMAIN}/rest/api/3/filter/10003"},
|
||||
]
|
||||
|
||||
# Use with_any_query_params() here because pagination involves dynamic startAt
|
||||
# parameters that change between pages (startAt=0 for page 1, startAt=2 for page 2)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filters_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
[
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(page1_records)
|
||||
.with_pagination(start_at=0, max_results=2, total=3, is_last=False)
|
||||
.build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(page2_records)
|
||||
.with_pagination(start_at=2, max_results=2, total=3, is_last=True)
|
||||
.build(),
|
||||
],
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
filter_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "10001" in filter_ids
|
||||
assert "10002" in filter_ids
|
||||
assert "10003" in filter_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_results(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty results gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filters_endpoint(_DOMAIN).with_max_results(50).with_expand(self._FILTERS_EXPAND).build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_400_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector ignores 400 errors per the error handler.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.filters_endpoint(_DOMAIN).with_max_results(50).with_expand(self._FILTERS_EXPAND).build(),
|
||||
HttpResponse(
|
||||
body=json.dumps({"errorMessages": ["Bad request"]}),
|
||||
status_code=400,
|
||||
),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog, expecting_exception=False)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,137 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "groups"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestGroupsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'groups' stream.
|
||||
|
||||
This is a full refresh stream.
|
||||
Endpoint: /rest/api/3/group/bulk
|
||||
Extract field: values
|
||||
Primary key: groupId
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_single_page(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test full refresh sync with a single page of results.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
group_records = [
|
||||
{
|
||||
"name": "jira-administrators",
|
||||
"groupId": "group-1",
|
||||
},
|
||||
{
|
||||
"name": "jira-software-users",
|
||||
"groupId": "group-2",
|
||||
},
|
||||
{
|
||||
"name": "site-admins",
|
||||
"groupId": "group-3",
|
||||
},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.groups_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(group_records)
|
||||
.with_pagination(start_at=0, max_results=50, total=3, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
|
||||
group_ids = [r.record.data["groupId"] for r in output.records]
|
||||
assert "group-1" in group_ids
|
||||
assert "group-2" in group_ids
|
||||
assert "group-3" in group_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_pagination_multiple_pages(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that pagination works correctly with multiple pages.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Page 1
|
||||
page1_groups = [
|
||||
{"name": "group-a", "groupId": "group-1"},
|
||||
{"name": "group-b", "groupId": "group-2"},
|
||||
]
|
||||
|
||||
# Page 2
|
||||
page2_groups = [
|
||||
{"name": "group-c", "groupId": "group-3"},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.groups_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
[
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(page1_groups)
|
||||
.with_pagination(start_at=0, max_results=2, total=3, is_last=False)
|
||||
.build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(page2_groups)
|
||||
.with_pagination(start_at=2, max_results=2, total=3, is_last=True)
|
||||
.build(),
|
||||
],
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
group_ids = [r.record.data["groupId"] for r in output.records]
|
||||
assert "group-1" in group_ids
|
||||
assert "group-2" in group_ids
|
||||
assert "group-3" in group_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_results(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty results gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.groups_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,486 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraJqlResponseBuilder, JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "issue_changelogs"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestIssueChangelogsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'issue_changelogs' stream.
|
||||
|
||||
This is an incremental substream of issues using SubstreamPartitionRouter.
|
||||
Endpoint: /rest/api/3/issue/{issueIdOrKey}/changelog
|
||||
Parent stream: issues (via JQL search)
|
||||
Has transformations: AddFields for issueId
|
||||
Has incremental_dependency: true - parent stream's incremental state affects this substream
|
||||
Cursor field: updated
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_with_parent_issues(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly fetches changelogs from multiple parent issues.
|
||||
|
||||
Per the playbook: "All substreams should be tested against at least two parent records"
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Parent issues from JQL search
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "10002",
|
||||
"key": "PROJ-2",
|
||||
"fields": {
|
||||
"summary": "Test Issue 2",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-02T00:00:00.000+0000",
|
||||
"updated": "2024-01-16T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Changelogs for issue 10001
|
||||
issue1_changelogs = [
|
||||
{
|
||||
"id": "100001",
|
||||
"author": {
|
||||
"accountId": "user1",
|
||||
"displayName": "User One",
|
||||
"active": True,
|
||||
},
|
||||
"created": "2024-01-10T00:00:00.000+0000",
|
||||
"updated": "2024-01-10T00:00:00.000+0000",
|
||||
"items": [
|
||||
{
|
||||
"field": "status",
|
||||
"fieldtype": "jira",
|
||||
"from": "10000",
|
||||
"fromString": "To Do",
|
||||
"to": "10001",
|
||||
"toString": "In Progress",
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "100002",
|
||||
"author": {
|
||||
"accountId": "user1",
|
||||
"displayName": "User One",
|
||||
"active": True,
|
||||
},
|
||||
"created": "2024-01-12T00:00:00.000+0000",
|
||||
"updated": "2024-01-12T00:00:00.000+0000",
|
||||
"items": [
|
||||
{
|
||||
"field": "assignee",
|
||||
"fieldtype": "jira",
|
||||
"from": None,
|
||||
"fromString": None,
|
||||
"to": "user2",
|
||||
"toString": "User Two",
|
||||
}
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Changelogs for issue 10002
|
||||
issue2_changelogs = [
|
||||
{
|
||||
"id": "200001",
|
||||
"author": {
|
||||
"accountId": "user2",
|
||||
"displayName": "User Two",
|
||||
"active": True,
|
||||
},
|
||||
"created": "2024-01-14T00:00:00.000+0000",
|
||||
"updated": "2024-01-14T00:00:00.000+0000",
|
||||
"items": [
|
||||
{
|
||||
"field": "priority",
|
||||
"fieldtype": "jira",
|
||||
"from": "3",
|
||||
"fromString": "Medium",
|
||||
"to": "2",
|
||||
"toString": "High",
|
||||
}
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint (JQL search)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=2, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock changelogs endpoint for issue 10001
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_changelogs_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(issue1_changelogs)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
# Mock changelogs endpoint for issue 10002
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_changelogs_endpoint(_DOMAIN, "10002").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(issue2_changelogs)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
# Should have 3 changelogs total (2 from issue 10001, 1 from issue 10002)
|
||||
assert len(output.records) == 3
|
||||
|
||||
# Verify changelog IDs
|
||||
changelog_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "100001" in changelog_ids
|
||||
assert "100002" in changelog_ids
|
||||
assert "200001" in changelog_ids
|
||||
|
||||
# Verify issueId transformation is applied
|
||||
for record in output.records:
|
||||
assert "issueId" in record.record.data
|
||||
|
||||
@HttpMocker()
|
||||
def test_incremental_sync_with_state(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test incremental sync with prior state.
|
||||
|
||||
The issue_changelogs stream has incremental_dependency: true, meaning
|
||||
the parent stream's incremental state affects when this substream fetches data.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# State with cursor for the stream
|
||||
state = (
|
||||
StateBuilder()
|
||||
.with_stream_state(
|
||||
_STREAM_NAME,
|
||||
{
|
||||
"use_global_cursor": False,
|
||||
"state": {"updated": "2024-01-10T00:00:00.000+0000"},
|
||||
"lookback_window": 0,
|
||||
"states": [{"partition": {"issue_id": "10001"}, "cursor": {"updated": "2024-01-10T00:00:00.000+0000"}}],
|
||||
},
|
||||
)
|
||||
.build()
|
||||
)
|
||||
|
||||
# Parent issues from JQL search (only issues updated after state cursor)
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# New changelogs since state
|
||||
new_changelogs = [
|
||||
{
|
||||
"id": "100003",
|
||||
"author": {
|
||||
"accountId": "user1",
|
||||
"displayName": "User One",
|
||||
"active": True,
|
||||
},
|
||||
"created": "2024-01-14T00:00:00.000+0000",
|
||||
"updated": "2024-01-14T00:00:00.000+0000",
|
||||
"items": [
|
||||
{
|
||||
"field": "status",
|
||||
"fieldtype": "jira",
|
||||
"from": "10001",
|
||||
"fromString": "In Progress",
|
||||
"to": "10002",
|
||||
"toString": "Done",
|
||||
}
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=1, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock changelogs endpoint for issue 10001
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_changelogs_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(new_changelogs)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config, state=state)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.incremental).build()
|
||||
output = read(source, config=config, catalog=catalog, state=state)
|
||||
|
||||
# Should have 1 new changelog
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["id"] == "100003"
|
||||
|
||||
# Verify state message is emitted
|
||||
assert len(output.state_messages) > 0
|
||||
|
||||
@HttpMocker()
|
||||
def test_pagination_within_changelogs(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that pagination works correctly within the changelogs substream.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Parent issue
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Changelogs page 1
|
||||
page1_changelogs = [
|
||||
{
|
||||
"id": "100001",
|
||||
"author": {"accountId": "user1", "displayName": "User One", "active": True},
|
||||
"created": "2024-01-10T00:00:00.000+0000",
|
||||
"updated": "2024-01-10T00:00:00.000+0000",
|
||||
"items": [
|
||||
{
|
||||
"field": "status",
|
||||
"fieldtype": "jira",
|
||||
"from": "10000",
|
||||
"fromString": "To Do",
|
||||
"to": "10001",
|
||||
"toString": "In Progress",
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "100002",
|
||||
"author": {"accountId": "user1", "displayName": "User One", "active": True},
|
||||
"created": "2024-01-11T00:00:00.000+0000",
|
||||
"updated": "2024-01-11T00:00:00.000+0000",
|
||||
"items": [
|
||||
{"field": "assignee", "fieldtype": "jira", "from": None, "fromString": None, "to": "user2", "toString": "User Two"}
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Changelogs page 2
|
||||
page2_changelogs = [
|
||||
{
|
||||
"id": "100003",
|
||||
"author": {"accountId": "user2", "displayName": "User Two", "active": True},
|
||||
"created": "2024-01-12T00:00:00.000+0000",
|
||||
"updated": "2024-01-12T00:00:00.000+0000",
|
||||
"items": [{"field": "priority", "fieldtype": "jira", "from": "3", "fromString": "Medium", "to": "2", "toString": "High"}],
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=1, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock changelogs endpoint with pagination
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_changelogs_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
[
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(page1_changelogs)
|
||||
.with_pagination(start_at=0, max_results=2, total=3, is_last=False)
|
||||
.build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(page2_changelogs)
|
||||
.with_pagination(start_at=2, max_results=2, total=3, is_last=True)
|
||||
.build(),
|
||||
],
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
# Should have 3 changelogs total
|
||||
assert len(output.records) == 3
|
||||
changelog_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "100001" in changelog_ids
|
||||
assert "100002" in changelog_ids
|
||||
assert "100003" in changelog_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_parent_issues_no_changelogs(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty parent issues gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# No parent issues
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records([]).with_pagination(start_at=0, max_results=50, total=0, is_last=True).build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_issue_with_no_changelogs(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles issues with no changelogs gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Parent issue
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=1, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock changelogs endpoint with empty response
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_changelogs_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_issueId_transformation_applied(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that the AddFields transformation correctly adds issueId to each record.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Parent issue
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Changelog without issueId (will be added by transformation)
|
||||
changelogs = [
|
||||
{
|
||||
"id": "100001",
|
||||
"author": {"accountId": "user1", "displayName": "User One", "active": True},
|
||||
"created": "2024-01-10T00:00:00.000+0000",
|
||||
"updated": "2024-01-10T00:00:00.000+0000",
|
||||
"items": [
|
||||
{
|
||||
"field": "status",
|
||||
"fieldtype": "jira",
|
||||
"from": "10000",
|
||||
"fromString": "To Do",
|
||||
"to": "10001",
|
||||
"toString": "In Progress",
|
||||
}
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=1, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock changelogs endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_changelogs_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(changelogs)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
# Verify issueId transformation is applied with correct value
|
||||
assert output.records[0].record.data["issueId"] == "10001"
|
||||
@@ -0,0 +1,455 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker
|
||||
from airbyte_cdk.test.state_builder import StateBuilder
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraJqlResponseBuilder, JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "issue_comments"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestIssueCommentsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'issue_comments' stream.
|
||||
|
||||
This is a semi-incremental (client-side incremental) substream of issues.
|
||||
Endpoint: /rest/api/3/issue/{issueIdOrKey}/comment
|
||||
Parent stream: issues (via JQL search)
|
||||
Has transformations: AddFields for issueId
|
||||
Has incremental_dependency: true
|
||||
Extract field: comments
|
||||
Cursor field: updated (client-side filtering)
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_with_parent_issues(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector correctly fetches comments from multiple parent issues.
|
||||
|
||||
Per the playbook: "All substreams should be tested against at least two parent records"
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Parent issues from JQL search
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": "10002",
|
||||
"key": "PROJ-2",
|
||||
"fields": {
|
||||
"summary": "Test Issue 2",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-02T00:00:00.000+0000",
|
||||
"updated": "2024-01-16T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Comments for issue 10001
|
||||
issue1_comments = [
|
||||
{
|
||||
"id": "100001",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10001/comment/100001",
|
||||
"author": {
|
||||
"accountId": "user1",
|
||||
"displayName": "User One",
|
||||
"active": True,
|
||||
},
|
||||
"body": {
|
||||
"type": "doc",
|
||||
"version": 1,
|
||||
"content": [{"type": "paragraph", "content": [{"type": "text", "text": "First comment"}]}],
|
||||
},
|
||||
"created": "2024-01-10T00:00:00.000+0000",
|
||||
"updated": "2024-01-10T00:00:00.000+0000",
|
||||
},
|
||||
{
|
||||
"id": "100002",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10001/comment/100002",
|
||||
"author": {
|
||||
"accountId": "user2",
|
||||
"displayName": "User Two",
|
||||
"active": True,
|
||||
},
|
||||
"body": {
|
||||
"type": "doc",
|
||||
"version": 1,
|
||||
"content": [{"type": "paragraph", "content": [{"type": "text", "text": "Second comment"}]}],
|
||||
},
|
||||
"created": "2024-01-12T00:00:00.000+0000",
|
||||
"updated": "2024-01-12T00:00:00.000+0000",
|
||||
},
|
||||
]
|
||||
|
||||
# Comments for issue 10002
|
||||
issue2_comments = [
|
||||
{
|
||||
"id": "200001",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10002/comment/200001",
|
||||
"author": {
|
||||
"accountId": "user1",
|
||||
"displayName": "User One",
|
||||
"active": True,
|
||||
},
|
||||
"body": {
|
||||
"type": "doc",
|
||||
"version": 1,
|
||||
"content": [{"type": "paragraph", "content": [{"type": "text", "text": "Comment on issue 2"}]}],
|
||||
},
|
||||
"created": "2024-01-14T00:00:00.000+0000",
|
||||
"updated": "2024-01-14T00:00:00.000+0000",
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint (JQL search)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=2, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock comments endpoint for issue 10001
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_comments_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("comments")
|
||||
.with_records(issue1_comments)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
# Mock comments endpoint for issue 10002
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_comments_endpoint(_DOMAIN, "10002").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("comments")
|
||||
.with_records(issue2_comments)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
# Should have 3 comments total (2 from issue 10001, 1 from issue 10002)
|
||||
assert len(output.records) == 3
|
||||
|
||||
# Verify comment IDs
|
||||
comment_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "100001" in comment_ids
|
||||
assert "100002" in comment_ids
|
||||
assert "200001" in comment_ids
|
||||
|
||||
# Verify issueId transformation is applied
|
||||
for record in output.records:
|
||||
assert "issueId" in record.record.data
|
||||
|
||||
@HttpMocker()
|
||||
def test_incremental_sync_with_state(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test incremental sync with prior state.
|
||||
|
||||
The issue_comments stream is semi_incremental (client-side filtering).
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# State with cursor for the stream
|
||||
state = (
|
||||
StateBuilder()
|
||||
.with_stream_state(
|
||||
_STREAM_NAME,
|
||||
{
|
||||
"use_global_cursor": False,
|
||||
"state": {"updated": "2024-01-10T00:00:00.000+0000"},
|
||||
"lookback_window": 0,
|
||||
"states": [{"partition": {"issue_id": "10001"}, "cursor": {"updated": "2024-01-10T00:00:00.000+0000"}}],
|
||||
},
|
||||
)
|
||||
.build()
|
||||
)
|
||||
|
||||
# Parent issues from JQL search
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Comments (API returns all, client-side filtering applies)
|
||||
all_comments = [
|
||||
{
|
||||
"id": "100001",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10001/comment/100001",
|
||||
"author": {"accountId": "user1", "displayName": "User One", "active": True},
|
||||
"body": {"type": "doc", "version": 1, "content": []},
|
||||
"created": "2024-01-08T00:00:00.000+0000",
|
||||
"updated": "2024-01-08T00:00:00.000+0000",
|
||||
},
|
||||
{
|
||||
"id": "100002",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10001/comment/100002",
|
||||
"author": {"accountId": "user2", "displayName": "User Two", "active": True},
|
||||
"body": {"type": "doc", "version": 1, "content": []},
|
||||
"created": "2024-01-14T00:00:00.000+0000",
|
||||
"updated": "2024-01-14T00:00:00.000+0000",
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=1, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock comments endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_comments_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("comments")
|
||||
.with_records(all_comments)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config, state=state)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.incremental).build()
|
||||
output = read(source, config=config, catalog=catalog, state=state)
|
||||
|
||||
# Client-side filtering should only return comments updated after state cursor
|
||||
# Comment 100002 (updated 2024-01-14) should be returned, comment 100001 (updated 2024-01-08) filtered out
|
||||
assert len(output.records) == 1
|
||||
assert output.records[0].record.data["id"] == "100002"
|
||||
|
||||
# Verify state message is emitted
|
||||
assert len(output.state_messages) > 0
|
||||
|
||||
@HttpMocker()
|
||||
def test_pagination_within_comments(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that pagination works correctly within the comments substream.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Parent issue
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Comments page 1
|
||||
page1_comments = [
|
||||
{
|
||||
"id": "100001",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10001/comment/100001",
|
||||
"author": {"accountId": "user1", "displayName": "User One", "active": True},
|
||||
"body": {"type": "doc", "version": 1, "content": []},
|
||||
"created": "2024-01-10T00:00:00.000+0000",
|
||||
"updated": "2024-01-10T00:00:00.000+0000",
|
||||
},
|
||||
{
|
||||
"id": "100002",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10001/comment/100002",
|
||||
"author": {"accountId": "user1", "displayName": "User One", "active": True},
|
||||
"body": {"type": "doc", "version": 1, "content": []},
|
||||
"created": "2024-01-11T00:00:00.000+0000",
|
||||
"updated": "2024-01-11T00:00:00.000+0000",
|
||||
},
|
||||
]
|
||||
|
||||
# Comments page 2
|
||||
page2_comments = [
|
||||
{
|
||||
"id": "100003",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10001/comment/100003",
|
||||
"author": {"accountId": "user2", "displayName": "User Two", "active": True},
|
||||
"body": {"type": "doc", "version": 1, "content": []},
|
||||
"created": "2024-01-12T00:00:00.000+0000",
|
||||
"updated": "2024-01-12T00:00:00.000+0000",
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=1, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock comments endpoint with pagination
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_comments_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
[
|
||||
JiraPaginatedResponseBuilder("comments")
|
||||
.with_records(page1_comments)
|
||||
.with_pagination(start_at=0, max_results=2, total=3, is_last=False)
|
||||
.build(),
|
||||
JiraPaginatedResponseBuilder("comments")
|
||||
.with_records(page2_comments)
|
||||
.with_pagination(start_at=2, max_results=2, total=3, is_last=True)
|
||||
.build(),
|
||||
],
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
# Should have 3 comments total
|
||||
assert len(output.records) == 3
|
||||
comment_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "100001" in comment_ids
|
||||
assert "100002" in comment_ids
|
||||
assert "100003" in comment_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_parent_issues_no_comments(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty parent issues gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# No parent issues
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records([]).with_pagination(start_at=0, max_results=50, total=0, is_last=True).build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_issue_with_no_comments(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles issues with no comments gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Parent issue
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=1, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock comments endpoint with empty response
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_comments_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("comments")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_issueId_transformation_applied(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that the AddFields transformation correctly adds issueId to each record.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Parent issue
|
||||
issue_records = [
|
||||
{
|
||||
"id": "10001",
|
||||
"key": "PROJ-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue 1",
|
||||
"project": {"id": "10001", "key": "PROJ1"},
|
||||
"created": "2024-01-01T00:00:00.000+0000",
|
||||
"updated": "2024-01-15T00:00:00.000+0000",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
# Comment without issueId (will be added by transformation)
|
||||
comments = [
|
||||
{
|
||||
"id": "100001",
|
||||
"self": "https://airbyteio.atlassian.net/rest/api/3/issue/10001/comment/100001",
|
||||
"author": {"accountId": "user1", "displayName": "User One", "active": True},
|
||||
"body": {"type": "doc", "version": 1, "content": []},
|
||||
"created": "2024-01-10T00:00:00.000+0000",
|
||||
"updated": "2024-01-10T00:00:00.000+0000",
|
||||
},
|
||||
]
|
||||
|
||||
# Mock parent issues endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issues_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraJqlResponseBuilder().with_records(issue_records).with_pagination(start_at=0, max_results=50, total=1, is_last=True).build(),
|
||||
)
|
||||
|
||||
# Mock comments endpoint
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_comments_endpoint(_DOMAIN, "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("comments")
|
||||
.with_records(comments)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
# Verify issueId transformation is applied with correct value
|
||||
assert output.records[0].record.data["issueId"] == "10001"
|
||||
@@ -0,0 +1,178 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "issue_custom_field_contexts"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestIssueCustomFieldContextsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'issue_custom_field_contexts' stream.
|
||||
|
||||
This is a substream that depends on custom issue fields as parent.
|
||||
Endpoint: /rest/api/3/field/{fieldId}/context
|
||||
Extract field: values
|
||||
Primary key: id
|
||||
Transformations: AddFields (fieldId, fieldType)
|
||||
Error handler: 400/403/404 IGNORE
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_with_multiple_fields(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test full refresh sync with contexts from multiple custom fields.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Mock issue fields endpoint (parent stream) - only custom fields are used
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Story Points", "custom": True, "schema": {"type": "number", "items": None}},
|
||||
{"id": "customfield_10002", "name": "Sprint", "custom": True, "schema": {"type": "array", "items": "string"}},
|
||||
{"id": "summary", "name": "Summary", "custom": False}, # Non-custom field should be filtered out
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
# Mock contexts for field 1
|
||||
field1_contexts = [
|
||||
{"id": "10000", "name": "Default Context", "isGlobalContext": True},
|
||||
]
|
||||
|
||||
# Mock contexts for field 2
|
||||
field2_contexts = [
|
||||
{"id": "10001", "name": "Project Context", "isGlobalContext": False},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(field1_contexts)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10002").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(field2_contexts)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 2
|
||||
|
||||
context_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "10000" in context_ids
|
||||
assert "10001" in context_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_field_id_transformation(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that AddFields transformation correctly adds fieldId and fieldType.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Story Points", "custom": True, "schema": {"type": "number", "items": None}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
contexts = [
|
||||
{"id": "10000", "name": "Default Context", "isGlobalContext": True},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(contexts)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
record = output.records[0].record.data
|
||||
assert record["fieldId"] == "customfield_10001"
|
||||
assert record["fieldType"] == "number"
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_400_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that 400 errors are ignored gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Story Points", "custom": True, "schema": {"type": "number", "items": None}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
HttpResponse(body=json.dumps({"errorMessages": ["Bad request"]}), status_code=400),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_custom_fields(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles no custom fields gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Only non-custom fields
|
||||
issue_fields = [
|
||||
{"id": "summary", "name": "Summary", "custom": False},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,307 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "issue_custom_field_options"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestIssueCustomFieldOptionsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'issue_custom_field_options' stream.
|
||||
|
||||
This is a nested substream that depends on issue_custom_field_contexts.
|
||||
Endpoint: /rest/api/3/field/{fieldId}/context/{contextId}/option
|
||||
Extract field: values
|
||||
Primary key: id
|
||||
Transformations: AddFields (fieldId, contextId)
|
||||
Error handler: 400/403/404 IGNORE
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh_with_multiple_contexts(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test full refresh sync with options from multiple contexts.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
# Mock issue fields endpoint (grandparent stream) - only custom fields with option type
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Priority", "custom": True, "schema": {"type": "option", "items": None}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
# Mock contexts for field (parent stream)
|
||||
contexts = [
|
||||
{"id": "10000", "name": "Default Context", "isGlobalContext": True},
|
||||
{"id": "10001", "name": "Project Context", "isGlobalContext": False},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(contexts)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
# Mock options for context 1
|
||||
context1_options = [
|
||||
{"id": "10100", "value": "High", "disabled": False},
|
||||
]
|
||||
|
||||
# Mock options for context 2
|
||||
context2_options = [
|
||||
{"id": "10101", "value": "Low", "disabled": False},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_options_endpoint(_DOMAIN, "customfield_10001", "10000").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(context1_options)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_options_endpoint(_DOMAIN, "customfield_10001", "10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(context2_options)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 2
|
||||
|
||||
option_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "10100" in option_ids
|
||||
assert "10101" in option_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_context_id_transformation(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that AddFields transformation correctly adds fieldId and contextId.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Priority", "custom": True, "schema": {"type": "option", "items": None}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
contexts = [
|
||||
{"id": "10000", "name": "Default Context", "isGlobalContext": True},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(contexts)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
options = [
|
||||
{"id": "10100", "value": "High", "disabled": False},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_options_endpoint(_DOMAIN, "customfield_10001", "10000").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(options)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
record = output.records[0].record.data
|
||||
assert record["fieldId"] == "customfield_10001"
|
||||
assert record["contextId"] == "10000"
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_404_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that 404 errors are ignored gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Priority", "custom": True, "schema": {"type": "option", "items": None}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
contexts = [
|
||||
{"id": "10000", "name": "Default Context", "isGlobalContext": True},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(contexts)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_options_endpoint(_DOMAIN, "customfield_10001", "10000").with_any_query_params().build(),
|
||||
HttpResponse(body=json.dumps({"errorMessages": ["Not found"]}), status_code=404),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_400_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that 400 errors are ignored gracefully.
|
||||
|
||||
Per manifest.yaml, the error_handler for this stream has:
|
||||
http_codes: [400, 403, 404] -> action: IGNORE
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Priority", "custom": True, "schema": {"type": "option", "items": None}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
contexts = [
|
||||
{"id": "10000", "name": "Default Context", "isGlobalContext": True},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(contexts)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_options_endpoint(_DOMAIN, "customfield_10001", "10000").with_any_query_params().build(),
|
||||
HttpResponse(body=json.dumps({"errorMessages": ["Bad request"]}), status_code=400),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_error_403_ignored(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that 403 errors are ignored gracefully.
|
||||
|
||||
Per manifest.yaml, the error_handler for this stream has:
|
||||
http_codes: [400, 403, 404] -> action: IGNORE
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Priority", "custom": True, "schema": {"type": "option", "items": None}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
contexts = [
|
||||
{"id": "10000", "name": "Default Context", "isGlobalContext": True},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(contexts)
|
||||
.with_pagination(start_at=0, max_results=50, total=1, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_options_endpoint(_DOMAIN, "customfield_10001", "10000").with_any_query_params().build(),
|
||||
HttpResponse(body=json.dumps({"errorMessages": ["Forbidden"]}), status_code=403),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_contexts(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles no contexts gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
issue_fields = [
|
||||
{"id": "customfield_10001", "name": "Priority", "custom": True, "schema": {"type": "option", "items": None}},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(issue_fields), status_code=200),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_custom_field_contexts_endpoint(_DOMAIN, "customfield_10001").with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,129 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
from mock_server.response_builder import JiraPaginatedResponseBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "issue_field_configurations"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestIssueFieldConfigurationsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'issue_field_configurations' stream.
|
||||
|
||||
Endpoint: /rest/api/3/fieldconfiguration
|
||||
Extract field: values
|
||||
Primary key: id
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test full refresh sync returns all field configurations.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
field_configs = [
|
||||
{"id": 10000, "name": "Default Field Configuration", "description": "Default", "isDefault": True},
|
||||
{"id": 10001, "name": "Custom Field Configuration", "description": "Custom", "isDefault": False},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_field_configurations_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(field_configs)
|
||||
.with_pagination(start_at=0, max_results=50, total=2, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 2
|
||||
|
||||
config_ids = [r.record.data["id"] for r in output.records]
|
||||
assert 10000 in config_ids
|
||||
assert 10001 in config_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_pagination(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test pagination with 2 pages of field configurations.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
page1_configs = [
|
||||
{"id": 10000, "name": "Config 1", "isDefault": True},
|
||||
{"id": 10001, "name": "Config 2", "isDefault": False},
|
||||
]
|
||||
|
||||
page2_configs = [
|
||||
{"id": 10002, "name": "Config 3", "isDefault": False},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_field_configurations_endpoint(_DOMAIN).with_query_param("maxResults", "50").build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(page1_configs)
|
||||
.with_pagination(start_at=0, max_results=2, total=3, is_last=False)
|
||||
.build(),
|
||||
)
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_field_configurations_endpoint(_DOMAIN)
|
||||
.with_query_param("maxResults", "50")
|
||||
.with_query_param("startAt", "2")
|
||||
.build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records(page2_configs)
|
||||
.with_pagination(start_at=2, max_results=2, total=3, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
|
||||
config_ids = [r.record.data["id"] for r in output.records]
|
||||
assert 10000 in config_ids
|
||||
assert 10001 in config_ids
|
||||
assert 10002 in config_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_response(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty response gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_field_configurations_endpoint(_DOMAIN).with_any_query_params().build(),
|
||||
JiraPaginatedResponseBuilder("values")
|
||||
.with_records([])
|
||||
.with_pagination(start_at=0, max_results=50, total=0, is_last=True)
|
||||
.build(),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,143 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "issue_fields"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestIssueFieldsStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'issue_fields' stream.
|
||||
|
||||
This is a full refresh stream without pagination.
|
||||
Endpoint: /rest/api/3/field
|
||||
Primary key: id
|
||||
Uses retriever_no_pagination_use_cache
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test full refresh sync returns all issue fields.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
field_records = [
|
||||
{
|
||||
"id": "summary",
|
||||
"key": "summary",
|
||||
"name": "Summary",
|
||||
"custom": False,
|
||||
"orderable": True,
|
||||
"navigable": True,
|
||||
"searchable": True,
|
||||
"clauseNames": ["summary"],
|
||||
},
|
||||
{
|
||||
"id": "description",
|
||||
"key": "description",
|
||||
"name": "Description",
|
||||
"custom": False,
|
||||
"orderable": True,
|
||||
"navigable": True,
|
||||
"searchable": True,
|
||||
"clauseNames": ["description"],
|
||||
},
|
||||
{
|
||||
"id": "customfield_10001",
|
||||
"key": "customfield_10001",
|
||||
"name": "Story Points",
|
||||
"custom": True,
|
||||
"orderable": True,
|
||||
"navigable": True,
|
||||
"searchable": True,
|
||||
"clauseNames": ["cf[10001]", "Story Points"],
|
||||
},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(field_records), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
|
||||
field_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "summary" in field_ids
|
||||
assert "description" in field_ids
|
||||
assert "customfield_10001" in field_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_custom_field_properties(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that custom field properties are correctly returned.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
field_records = [
|
||||
{
|
||||
"id": "customfield_10001",
|
||||
"key": "customfield_10001",
|
||||
"name": "Story Points",
|
||||
"custom": True,
|
||||
"orderable": True,
|
||||
"navigable": True,
|
||||
"searchable": True,
|
||||
"clauseNames": ["cf[10001]", "Story Points"],
|
||||
"scope": {"type": "PROJECT", "project": {"id": "10000"}},
|
||||
},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps(field_records), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
record = output.records[0].record.data
|
||||
assert record["custom"] is True
|
||||
assert record["name"] == "Story Points"
|
||||
assert "scope" in record
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_results(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty results gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_fields_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps([]), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
@@ -0,0 +1,130 @@
|
||||
# Copyright (c) 2024 Airbyte, Inc., all rights reserved.
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest import TestCase
|
||||
|
||||
import freezegun
|
||||
from conftest import get_source
|
||||
|
||||
from airbyte_cdk.models import SyncMode
|
||||
from airbyte_cdk.test.catalog_builder import CatalogBuilder
|
||||
from airbyte_cdk.test.entrypoint_wrapper import read
|
||||
from airbyte_cdk.test.mock_http import HttpMocker, HttpResponse
|
||||
from mock_server.config import ConfigBuilder
|
||||
from mock_server.request_builder import JiraRequestBuilder
|
||||
|
||||
|
||||
_NOW = datetime.now(timezone.utc)
|
||||
_STREAM_NAME = "issue_link_types"
|
||||
_DOMAIN = "airbyteio.atlassian.net"
|
||||
|
||||
|
||||
@freezegun.freeze_time(_NOW.isoformat())
|
||||
class TestIssueLinkTypesStream(TestCase):
|
||||
"""
|
||||
Tests for the Jira 'issue_link_types' stream.
|
||||
|
||||
This is a full refresh stream without pagination.
|
||||
Endpoint: /rest/api/3/issueLinkType
|
||||
Extract field: issueLinkTypes
|
||||
Primary key: id
|
||||
"""
|
||||
|
||||
@HttpMocker()
|
||||
def test_full_refresh(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test full refresh sync returns all issue link types.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
link_type_records = [
|
||||
{
|
||||
"id": "10000",
|
||||
"name": "Blocks",
|
||||
"inward": "is blocked by",
|
||||
"outward": "blocks",
|
||||
"self": f"https://{_DOMAIN}/rest/api/3/issueLinkType/10000",
|
||||
},
|
||||
{
|
||||
"id": "10001",
|
||||
"name": "Cloners",
|
||||
"inward": "is cloned by",
|
||||
"outward": "clones",
|
||||
"self": f"https://{_DOMAIN}/rest/api/3/issueLinkType/10001",
|
||||
},
|
||||
{
|
||||
"id": "10002",
|
||||
"name": "Duplicate",
|
||||
"inward": "is duplicated by",
|
||||
"outward": "duplicates",
|
||||
"self": f"https://{_DOMAIN}/rest/api/3/issueLinkType/10002",
|
||||
},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_link_types_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps({"issueLinkTypes": link_type_records}), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 3
|
||||
|
||||
link_type_ids = [r.record.data["id"] for r in output.records]
|
||||
assert "10000" in link_type_ids
|
||||
assert "10001" in link_type_ids
|
||||
assert "10002" in link_type_ids
|
||||
|
||||
@HttpMocker()
|
||||
def test_link_type_properties(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that link type properties are correctly returned.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
link_type_records = [
|
||||
{
|
||||
"id": "10000",
|
||||
"name": "Blocks",
|
||||
"inward": "is blocked by",
|
||||
"outward": "blocks",
|
||||
"self": f"https://{_DOMAIN}/rest/api/3/issueLinkType/10000",
|
||||
},
|
||||
]
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_link_types_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps({"issueLinkTypes": link_type_records}), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 1
|
||||
record = output.records[0].record.data
|
||||
assert record["name"] == "Blocks"
|
||||
assert record["inward"] == "is blocked by"
|
||||
assert record["outward"] == "blocks"
|
||||
|
||||
@HttpMocker()
|
||||
def test_empty_results(self, http_mocker: HttpMocker):
|
||||
"""
|
||||
Test that connector handles empty results gracefully.
|
||||
"""
|
||||
config = ConfigBuilder().with_domain(_DOMAIN).build()
|
||||
|
||||
http_mocker.get(
|
||||
JiraRequestBuilder.issue_link_types_endpoint(_DOMAIN).build(),
|
||||
HttpResponse(body=json.dumps({"issueLinkTypes": []}), status_code=200),
|
||||
)
|
||||
|
||||
source = get_source(config=config)
|
||||
catalog = CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build()
|
||||
output = read(source, config=config, catalog=catalog)
|
||||
|
||||
assert len(output.records) == 0
|
||||
assert not any(log.log.level == "ERROR" for log in output.logs)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user